Home
last modified time | relevance | path

Searched refs:layer (Results 1 – 25 of 87) sorted by relevance

1234

/drivers/media/platform/s5p-tv/
Dmixer_video.c161 struct mxr_layer *layer = video_drvdata(file); in mxr_querycap() local
163 mxr_dbg(layer->mdev, "%s:%d\n", __func__, __LINE__); in mxr_querycap()
166 strlcpy(cap->card, layer->vfd.name, sizeof(cap->card)); in mxr_querycap()
167 sprintf(cap->bus_info, "%d", layer->idx); in mxr_querycap()
192 static void mxr_layer_default_geo(struct mxr_layer *layer) in mxr_layer_default_geo() argument
194 struct mxr_device *mdev = layer->mdev; in mxr_layer_default_geo()
197 memset(&layer->geo, 0, sizeof(layer->geo)); in mxr_layer_default_geo()
201 layer->geo.dst.full_width = mbus_fmt.width; in mxr_layer_default_geo()
202 layer->geo.dst.full_height = mbus_fmt.height; in mxr_layer_default_geo()
203 layer->geo.dst.width = layer->geo.dst.full_width; in mxr_layer_default_geo()
[all …]
Dmixer_vp_layer.c84 static void mxr_vp_layer_release(struct mxr_layer *layer) in mxr_vp_layer_release() argument
86 mxr_base_layer_unregister(layer); in mxr_vp_layer_release()
87 mxr_base_layer_release(layer); in mxr_vp_layer_release()
90 static void mxr_vp_buffer_set(struct mxr_layer *layer, in mxr_vp_buffer_set() argument
97 mxr_reg_vp_buffer(layer->mdev, luma_addr, chroma_addr); in mxr_vp_buffer_set()
101 if (layer->fmt->num_subframes == 2) { in mxr_vp_buffer_set()
108 &layer->fmt->plane[0], layer->geo.src.full_width, in mxr_vp_buffer_set()
109 layer->geo.src.full_height); in mxr_vp_buffer_set()
111 if (layer->fmt->cookie & VP_MODE_MEM_TILED) { in mxr_vp_buffer_set()
115 luma_addr[1] = luma_addr[0] + layer->geo.src.full_width; in mxr_vp_buffer_set()
[all …]
Dmixer_grp_layer.c77 static void mxr_graph_layer_release(struct mxr_layer *layer) in mxr_graph_layer_release() argument
79 mxr_base_layer_unregister(layer); in mxr_graph_layer_release()
80 mxr_base_layer_release(layer); in mxr_graph_layer_release()
83 static void mxr_graph_buffer_set(struct mxr_layer *layer, in mxr_graph_buffer_set() argument
90 mxr_reg_graph_buffer(layer->mdev, layer->idx, addr); in mxr_graph_buffer_set()
93 static void mxr_graph_stream_set(struct mxr_layer *layer, int en) in mxr_graph_stream_set() argument
95 mxr_reg_graph_layer_stream(layer->mdev, layer->idx, en); in mxr_graph_stream_set()
98 static void mxr_graph_format_set(struct mxr_layer *layer) in mxr_graph_format_set() argument
100 mxr_reg_graph_format(layer->mdev, layer->idx, in mxr_graph_format_set()
101 layer->fmt, &layer->geo); in mxr_graph_format_set()
[all …]
Dmixer_reg.c253 static void mxr_irq_layer_handle(struct mxr_layer *layer) in mxr_irq_layer_handle() argument
255 struct list_head *head = &layer->enq_list; in mxr_irq_layer_handle()
259 if (layer == NULL) in mxr_irq_layer_handle()
262 spin_lock(&layer->enq_slock); in mxr_irq_layer_handle()
263 if (layer->state == MXR_LAYER_IDLE) in mxr_irq_layer_handle()
266 done = layer->shadow_buf; in mxr_irq_layer_handle()
267 layer->shadow_buf = layer->update_buf; in mxr_irq_layer_handle()
270 if (layer->state != MXR_LAYER_STREAMING) in mxr_irq_layer_handle()
271 layer->update_buf = NULL; in mxr_irq_layer_handle()
276 layer->update_buf = next; in mxr_irq_layer_handle()
[all …]
Dmixer_drv.c327 for (i = 0; i < ARRAY_SIZE(mdev->layer); ++i) in mxr_release_layers()
328 if (mdev->layer[i]) in mxr_release_layers()
329 mxr_layer_release(mdev->layer[i]); in mxr_release_layers()
335 mdev->layer[0] = mxr_graph_layer_create(mdev, 0); in mxr_acquire_layers()
336 mdev->layer[1] = mxr_graph_layer_create(mdev, 1); in mxr_acquire_layers()
337 mdev->layer[2] = mxr_vp_layer_create(mdev, 0); in mxr_acquire_layers()
339 if (!mdev->layer[0] || !mdev->layer[1] || !mdev->layer[2]) { in mxr_acquire_layers()
Dmixer.h238 struct mxr_layer *layer[MXR_MAX_LAYERS]; member
305 void mxr_base_layer_release(struct mxr_layer *layer);
306 void mxr_layer_release(struct mxr_layer *layer);
308 int mxr_base_layer_register(struct mxr_layer *layer);
309 void mxr_base_layer_unregister(struct mxr_layer *layer);
/drivers/gpu/drm/atmel-hlcdc/
Datmel_hlcdc_layer.c45 atmel_hlcdc_layer_fb_flip_release_queue(struct atmel_hlcdc_layer *layer, in atmel_hlcdc_layer_fb_flip_release_queue() argument
53 for (i = 0; i < layer->max_planes; i++) { in atmel_hlcdc_layer_fb_flip_release_queue()
61 drm_flip_work_queue_task(&layer->gc, flip->task); in atmel_hlcdc_layer_fb_flip_release_queue()
62 drm_flip_work_commit(&layer->gc, layer->wq); in atmel_hlcdc_layer_fb_flip_release_queue()
65 static void atmel_hlcdc_layer_update_reset(struct atmel_hlcdc_layer *layer, in atmel_hlcdc_layer_update_reset() argument
68 struct atmel_hlcdc_layer_update *upd = &layer->update; in atmel_hlcdc_layer_update_reset()
75 bitmap_clear(slot->updated_configs, 0, layer->desc->nconfigs); in atmel_hlcdc_layer_update_reset()
77 sizeof(*slot->configs) * layer->desc->nconfigs); in atmel_hlcdc_layer_update_reset()
80 atmel_hlcdc_layer_fb_flip_release_queue(layer, slot->fb_flip); in atmel_hlcdc_layer_update_reset()
85 static void atmel_hlcdc_layer_update_apply(struct atmel_hlcdc_layer *layer) in atmel_hlcdc_layer_update_apply() argument
[all …]
Datmel_hlcdc_plane.c263 &plane->layer.desc->layout; in atmel_hlcdc_plane_update_pos_and_size()
266 atmel_hlcdc_layer_update_cfg(&plane->layer, in atmel_hlcdc_plane_update_pos_and_size()
273 atmel_hlcdc_layer_update_cfg(&plane->layer, in atmel_hlcdc_plane_update_pos_and_size()
280 atmel_hlcdc_layer_update_cfg(&plane->layer, in atmel_hlcdc_plane_update_pos_and_size()
299 atmel_hlcdc_layer_update_cfg(&plane->layer, in atmel_hlcdc_plane_update_pos_and_size()
322 atmel_hlcdc_layer_update_cfg(&plane->layer, in atmel_hlcdc_plane_update_pos_and_size()
336 atmel_hlcdc_layer_update_cfg(&plane->layer, 13, 0xffffffff, in atmel_hlcdc_plane_update_pos_and_size()
339 atmel_hlcdc_layer_update_cfg(&plane->layer, 13, 0xffffffff, 0); in atmel_hlcdc_plane_update_pos_and_size()
348 &plane->layer.desc->layout; in atmel_hlcdc_plane_update_general_settings()
362 atmel_hlcdc_layer_update_cfg(&plane->layer, in atmel_hlcdc_plane_update_general_settings()
[all …]
Datmel_hlcdc_layer.h371 void atmel_hlcdc_layer_irq(struct atmel_hlcdc_layer *layer);
374 struct atmel_hlcdc_layer *layer,
378 struct atmel_hlcdc_layer *layer);
380 void atmel_hlcdc_layer_disable(struct atmel_hlcdc_layer *layer);
382 int atmel_hlcdc_layer_update_start(struct atmel_hlcdc_layer *layer);
384 void atmel_hlcdc_layer_update_cfg(struct atmel_hlcdc_layer *layer, int cfg,
387 void atmel_hlcdc_layer_update_set_fb(struct atmel_hlcdc_layer *layer,
391 void atmel_hlcdc_layer_update_set_finished(struct atmel_hlcdc_layer *layer,
395 void atmel_hlcdc_layer_update_rollback(struct atmel_hlcdc_layer *layer);
397 void atmel_hlcdc_layer_update_commit(struct atmel_hlcdc_layer *layer);
Datmel_hlcdc_dc.c393 struct atmel_hlcdc_layer *layer = dc->layers[i]; in atmel_hlcdc_dc_irq_handler() local
395 if (!(ATMEL_HLCDC_LAYER_STATUS(i) & status) || !layer) in atmel_hlcdc_dc_irq_handler()
398 atmel_hlcdc_layer_irq(layer); in atmel_hlcdc_dc_irq_handler()
455 dc->layers[planes->primary->layer.desc->id] = in atmel_hlcdc_dc_modeset_init()
456 &planes->primary->layer; in atmel_hlcdc_dc_modeset_init()
459 dc->layers[planes->cursor->layer.desc->id] = in atmel_hlcdc_dc_modeset_init()
460 &planes->cursor->layer; in atmel_hlcdc_dc_modeset_init()
463 dc->layers[planes->overlays[i]->layer.desc->id] = in atmel_hlcdc_dc_modeset_init()
464 &planes->overlays[i]->layer; in atmel_hlcdc_dc_modeset_init()
Datmel_hlcdc_dc.h87 struct atmel_hlcdc_layer layer; member
100 return container_of(l, struct atmel_hlcdc_plane, layer); in atmel_hlcdc_layer_to_plane()
/drivers/media/platform/davinci/
Dvpbe_display.c51 struct vpbe_layer *layer);
72 struct vpbe_layer *layer) in vpbe_isr_even_field() argument
74 if (layer->cur_frm == layer->next_frm) in vpbe_isr_even_field()
77 v4l2_get_timestamp(&layer->cur_frm->vb.timestamp); in vpbe_isr_even_field()
78 vb2_buffer_done(&layer->cur_frm->vb.vb2_buf, VB2_BUF_STATE_DONE); in vpbe_isr_even_field()
80 layer->cur_frm = layer->next_frm; in vpbe_isr_even_field()
84 struct vpbe_layer *layer) in vpbe_isr_odd_field() argument
90 if (list_empty(&layer->dma_queue) || in vpbe_isr_odd_field()
91 (layer->cur_frm != layer->next_frm)) { in vpbe_isr_odd_field()
101 layer->next_frm = list_entry(layer->dma_queue.next, in vpbe_isr_odd_field()
[all …]
Dvpbe_osd.c117 #define is_osd_win(layer) (((layer) == WIN_OSD0) || ((layer) == WIN_OSD1)) argument
118 #define is_vid_win(layer) (((layer) == WIN_VID0) || ((layer) == WIN_VID1)) argument
424 static void _osd_set_zoom(struct osd_state *sd, enum osd_layer layer, in _osd_set_zoom() argument
430 switch (layer) { in _osd_set_zoom()
458 static void _osd_disable_layer(struct osd_state *sd, enum osd_layer layer) in _osd_disable_layer() argument
460 switch (layer) { in _osd_disable_layer()
478 static void osd_disable_layer(struct osd_state *sd, enum osd_layer layer) in osd_disable_layer() argument
481 struct osd_window_state *win = &osd->win[layer]; in osd_disable_layer()
492 _osd_disable_layer(sd, layer); in osd_disable_layer()
503 static void _osd_enable_layer(struct osd_state *sd, enum osd_layer layer) in _osd_enable_layer() argument
[all …]
/drivers/media/dvb-frontends/
Dmb86a20s.c384 unsigned layer) in mb86a20s_get_modulation() argument
393 if (layer >= ARRAY_SIZE(reg)) in mb86a20s_get_modulation()
395 rc = mb86a20s_writereg(state, 0x6d, reg[layer]); in mb86a20s_get_modulation()
416 unsigned layer) in mb86a20s_get_fec() argument
426 if (layer >= ARRAY_SIZE(reg)) in mb86a20s_get_fec()
428 rc = mb86a20s_writereg(state, 0x6d, reg[layer]); in mb86a20s_get_fec()
451 unsigned layer) in mb86a20s_get_interleaving() argument
464 if (layer >= ARRAY_SIZE(reg)) in mb86a20s_get_interleaving()
466 rc = mb86a20s_writereg(state, 0x6d, reg[layer]); in mb86a20s_get_interleaving()
477 unsigned layer) in mb86a20s_get_segment_count() argument
[all …]
Ddib8000.c1994 switch (c->layer[layer_index].modulation) { in dib8000_set_layer()
2010 switch (c->layer[layer_index].fec) { in dib8000_set_layer()
2029 time_intlv = fls(c->layer[layer_index].interleaving); in dib8000_set_layer()
2033 …dib8000_write_word(state, 2 + layer_index, (constellation << 10) | ((c->layer[layer_index].segment… in dib8000_set_layer()
2034 if (c->layer[layer_index].segment_count > 0) { in dib8000_set_layer()
2038 if (c->layer[layer_index].modulation == QAM_16 || c->layer[layer_index].modulation == QAM_64) in dib8000_set_layer()
2039 max_constellation = c->layer[layer_index].modulation; in dib8000_set_layer()
2042 if (c->layer[layer_index].modulation == QAM_64) in dib8000_set_layer()
2043 max_constellation = c->layer[layer_index].modulation; in dib8000_set_layer()
2199 if (c->layer[0].modulation == DQPSK) /* DQPSK */ in dib8000_small_fine_tune()
[all …]
Dtc90522.c230 c->layer[0].fec = c->fec_inner; in tc90522s_get_frontend()
231 c->layer[0].modulation = c->modulation; in tc90522s_get_frontend()
232 c->layer[0].segment_count = val[3] & 0x3f; /* slots */ in tc90522s_get_frontend()
236 c->layer[1].fec = fec_conv_sat[v]; in tc90522s_get_frontend()
238 c->layer[1].segment_count = 0; in tc90522s_get_frontend()
240 c->layer[1].segment_count = val[4] & 0x3f; /* slots */ in tc90522s_get_frontend()
245 c->layer[1].modulation = QPSK; in tc90522s_get_frontend()
373 c->layer[0].segment_count = 0; in tc90522t_get_frontend()
376 c->layer[0].segment_count = v; in tc90522t_get_frontend()
377 c->layer[0].fec = fec_conv_ter[(val[1] & 0x1c) >> 2]; in tc90522t_get_frontend()
[all …]
/drivers/pci/pcie/aer/
Daerdrv_errprint.c168 int layer, agent; in aer_print_error() local
177 layer = AER_GET_LAYER_ERROR(info->severity, info->status); in aer_print_error()
182 aer_error_layer[layer], id, aer_agent_string[agent]); in aer_print_error()
225 int aer_severity, layer, agent, status_strs_size, tlp_header_valid = 0; in cper_print_aer() local
244 layer = AER_GET_LAYER_ERROR(aer_severity, status); in cper_print_aer()
250 aer_error_layer[layer], aer_agent_string[agent]); in cper_print_aer()
/drivers/staging/most/Documentation/
Ddriver_usage.txt10 data using a single medium (physical layer). Media currently in use are
26 The driver consists basically of three layers. The hardware layer, the
27 core layer and the application layer. The core layer consists of the core
33 system architecture. A module of the hardware layer is referred to as an
34 HDM (hardware dependent module). Each module of this layer handles exactly
36 USB, MediaLB, I2C). A module of the application layer is referred to as an
37 AIM (application interfacing module). The modules of this layer give access
49 The hardware layer contains so called hardware dependent modules (HDM). For each
69 The core layer contains the mostcore module only, which processes the driver
76 The application layer contains so called application interfacing modules (AIM).
/drivers/net/ethernet/stmicro/stmmac/
DKconfig48 This selects the IPQ806x SoC glue layer support for the stmmac
68 This selects the Amlogic Meson SoC glue layer support for
80 This selects the Rockchip RK3288 SoC glue layer support for
91 This selects the Altera SOCFPGA SoC glue layer support
103 This selects STi SoC glue layer support for the stmmac
114 This selects Allwinner SoC glue layer support for the
/drivers/media/dvb-core/
Ddvb_frontend.c1053 c->layer[i].fec = FEC_AUTO; in dvb_frontend_clear_cache()
1054 c->layer[i].modulation = QAM_AUTO; in dvb_frontend_clear_cache()
1055 c->layer[i].interleaving = 0; in dvb_frontend_clear_cache()
1056 c->layer[i].segment_count = 0; in dvb_frontend_clear_cache()
1470 tvp->u.data = c->layer[0].fec; in dtv_property_process_get()
1473 tvp->u.data = c->layer[0].modulation; in dtv_property_process_get()
1476 tvp->u.data = c->layer[0].segment_count; in dtv_property_process_get()
1479 tvp->u.data = c->layer[0].interleaving; in dtv_property_process_get()
1482 tvp->u.data = c->layer[1].fec; in dtv_property_process_get()
1485 tvp->u.data = c->layer[1].modulation; in dtv_property_process_get()
[all …]
Ddvb_filter.c313 ai->layer = (headr[1] & 0x06) >> 1;
316 printk("Audiostream: Layer: %d", 4-ai->layer);
319 ai->bit_rate = bitrates[(3-ai->layer)][(headr[2] >> 4 )]*1000;
370 ai->layer = 0; // 0 for AC3 in dvb_filter_get_ac3info()
/drivers/staging/lustre/
DTODO5 * Clean up libcfs layer. Ideally we can remove include/linux/libcfs entirely.
6 * Clean up CLIO layer. Lustre client readahead/writeback control needs to better
/drivers/s390/net/
DKconfig77 prompt "qeth layer 2 device support"
80 Select this option to be able to run qeth devices in layer 2 mode.
86 prompt "qeth layer 3 device support"
89 Select this option to be able to run qeth devices in layer 3 mode.
/drivers/ata/
DKconfig559 PATA controllers via the new ATA layer.
568 PATA controllers via the new ATA layer.
577 controllers via the new ATA layer.
586 PATA controllers via the new ATA layer
620 controllers via the new ATA layer.
629 PATA controllers via the new ATA layer, including RAID
639 ATA layer.
740 PATA controllers via the new ATA layer
749 controllers via the new ATA layer. For the RDC 1010, you need to
777 HT1000 PATA controllers, via the new ATA layer.
[all …]
/drivers/edac/
Dedac_mc.c272 struct edac_mc_layer *layer; in edac_mc_alloc() local
306 layer = edac_align_ptr(&ptr, sizeof(*layer), n_layers); in edac_mc_alloc()
332 layer = (struct edac_mc_layer *)(((char *)mci) + ((unsigned long)layer)); in edac_mc_alloc()
344 mci->layers = layer; in edac_mc_alloc()
345 memcpy(mci->layers, layers, sizeof(*layer) * n_layers); in edac_mc_alloc()
391 off = EDAC_DIMM_OFF(layer, n_layers, pos[0], pos[1], pos[2]); in edac_mc_alloc()

1234