Home
last modified time | relevance | path

Searched refs:xt (Results 1 – 15 of 15) sorted by relevance

/drivers/media/platform/
Dm2m-deinterlace.c142 struct dma_interleaved_template *xt; member
235 ctx->xt->numf = s_height / 2; in deinterlace_issue_dma()
236 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma()
237 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma()
238 ctx->xt->src_start = p_in; in deinterlace_issue_dma()
239 ctx->xt->dst_start = p_out; in deinterlace_issue_dma()
242 ctx->xt->numf = s_height / 2; in deinterlace_issue_dma()
243 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma()
244 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma()
245 ctx->xt->src_start = p_in + s_size / 2; in deinterlace_issue_dma()
[all …]
/drivers/media/platform/omap/
Domap_vout_vrfb.c142 vout->vrfb_dma_tx.xt = kzalloc(xt_size, GFP_KERNEL); in omap_vout_setup_vrfb_bufs()
143 if (!vout->vrfb_dma_tx.xt) { in omap_vout_setup_vrfb_bufs()
187 kfree(vout->vrfb_dma_tx.xt); in omap_vout_release_vrfb()
236 struct dma_interleaved_template *xt = vout->vrfb_dma_tx.xt; in omap_vout_prepare_vrfb() local
256 xt->src_start = buf_phy_addr; in omap_vout_prepare_vrfb()
257 xt->dst_start = vout->vrfb_context[vb->index].paddr[0]; in omap_vout_prepare_vrfb()
259 xt->numf = vout->pix.height; in omap_vout_prepare_vrfb()
260 xt->frame_size = 1; in omap_vout_prepare_vrfb()
261 xt->sgl[0].size = vout->pix.width * vout->bpp; in omap_vout_prepare_vrfb()
262 xt->sgl[0].icg = dst_icg; in omap_vout_prepare_vrfb()
[all …]
Domap_voutdef.h87 struct dma_interleaved_template *xt; member
/drivers/dma/
Ddma-axi-dmac.c611 struct dma_chan *c, struct dma_interleaved_template *xt, in axi_dmac_prep_interleaved() argument
618 if (xt->frame_size != 1) in axi_dmac_prep_interleaved()
621 if (xt->dir != chan->direction) in axi_dmac_prep_interleaved()
625 if (!xt->src_inc || !axi_dmac_check_addr(chan, xt->src_start)) in axi_dmac_prep_interleaved()
630 if (!xt->dst_inc || !axi_dmac_check_addr(chan, xt->dst_start)) in axi_dmac_prep_interleaved()
634 dst_icg = dmaengine_get_dst_icg(xt, &xt->sgl[0]); in axi_dmac_prep_interleaved()
635 src_icg = dmaengine_get_src_icg(xt, &xt->sgl[0]); in axi_dmac_prep_interleaved()
638 if (!axi_dmac_check_len(chan, xt->sgl[0].size) || in axi_dmac_prep_interleaved()
639 xt->numf == 0) in axi_dmac_prep_interleaved()
641 if (xt->sgl[0].size + dst_icg > chan->max_length || in axi_dmac_prep_interleaved()
[all …]
Dat_xdmac.c852 struct dma_interleaved_template *xt, in at_xdmac_interleaved_queue_desc() argument
890 if (xt->src_inc) { in at_xdmac_interleaved_queue_desc()
891 if (xt->src_sgl) in at_xdmac_interleaved_queue_desc()
897 if (xt->dst_inc) { in at_xdmac_interleaved_queue_desc()
898 if (xt->dst_sgl) in at_xdmac_interleaved_queue_desc()
918 desc->lld.mbr_sus = dmaengine_get_src_icg(xt, chunk); in at_xdmac_interleaved_queue_desc()
919 desc->lld.mbr_dus = dmaengine_get_dst_icg(xt, chunk); in at_xdmac_interleaved_queue_desc()
941 struct dma_interleaved_template *xt, in at_xdmac_prep_interleaved() argument
951 if (!xt || !xt->numf || (xt->dir != DMA_MEM_TO_MEM)) in at_xdmac_prep_interleaved()
958 if ((xt->numf > 1) && (xt->frame_size > 1)) in at_xdmac_prep_interleaved()
[all …]
Dimx-dma.c957 struct dma_chan *chan, struct dma_interleaved_template *xt, in imxdma_prep_dma_interleaved() argument
966 imxdmac->channel, (unsigned long long)xt->src_start, in imxdma_prep_dma_interleaved()
967 (unsigned long long) xt->dst_start, in imxdma_prep_dma_interleaved()
968 xt->src_sgl ? "true" : "false", xt->dst_sgl ? "true" : "false", in imxdma_prep_dma_interleaved()
969 xt->numf, xt->frame_size); in imxdma_prep_dma_interleaved()
975 if (xt->frame_size != 1 || xt->numf <= 0 || xt->dir != DMA_MEM_TO_MEM) in imxdma_prep_dma_interleaved()
981 desc->src = xt->src_start; in imxdma_prep_dma_interleaved()
982 desc->dest = xt->dst_start; in imxdma_prep_dma_interleaved()
983 desc->x = xt->sgl[0].size; in imxdma_prep_dma_interleaved()
984 desc->y = xt->numf; in imxdma_prep_dma_interleaved()
[all …]
Dat_hdmac.c662 struct dma_interleaved_template *xt, in atc_prep_dma_interleaved() argument
675 if (unlikely(!xt || xt->numf != 1 || !xt->frame_size)) in atc_prep_dma_interleaved()
678 first = xt->sgl; in atc_prep_dma_interleaved()
682 __func__, &xt->src_start, &xt->dst_start, xt->numf, in atc_prep_dma_interleaved()
683 xt->frame_size, flags); in atc_prep_dma_interleaved()
691 for (i = 0; i < xt->frame_size; i++) { in atc_prep_dma_interleaved()
692 struct data_chunk *chunk = xt->sgl + i; in atc_prep_dma_interleaved()
694 if ((chunk->size != xt->sgl->size) || in atc_prep_dma_interleaved()
695 (dmaengine_get_dst_icg(xt, chunk) != dmaengine_get_dst_icg(xt, first)) || in atc_prep_dma_interleaved()
696 (dmaengine_get_src_icg(xt, chunk) != dmaengine_get_src_icg(xt, first))) { in atc_prep_dma_interleaved()
[all …]
Dsirf-dma.c689 struct dma_chan *chan, struct dma_interleaved_template *xt, in sirfsoc_dma_prep_interleaved() argument
698 if ((xt->dir != DMA_MEM_TO_DEV) && (xt->dir != DMA_DEV_TO_MEM)) { in sirfsoc_dma_prep_interleaved()
726 if ((xt->frame_size == 1) && (xt->numf > 0)) { in sirfsoc_dma_prep_interleaved()
728 sdesc->xlen = xt->sgl[0].size / SIRFSOC_DMA_WORD_LEN; in sirfsoc_dma_prep_interleaved()
729 sdesc->width = (xt->sgl[0].size + xt->sgl[0].icg) / in sirfsoc_dma_prep_interleaved()
731 sdesc->ylen = xt->numf - 1; in sirfsoc_dma_prep_interleaved()
732 if (xt->dir == DMA_MEM_TO_DEV) { in sirfsoc_dma_prep_interleaved()
733 sdesc->addr = xt->src_start; in sirfsoc_dma_prep_interleaved()
736 sdesc->addr = xt->dst_start; in sirfsoc_dma_prep_interleaved()
/drivers/media/platform/xilinx/
Dxilinx-dma.c354 dma->xt.dir = DMA_DEV_TO_MEM; in xvip_dma_buffer_queue()
355 dma->xt.src_sgl = false; in xvip_dma_buffer_queue()
356 dma->xt.dst_sgl = true; in xvip_dma_buffer_queue()
357 dma->xt.dst_start = addr; in xvip_dma_buffer_queue()
360 dma->xt.dir = DMA_MEM_TO_DEV; in xvip_dma_buffer_queue()
361 dma->xt.src_sgl = true; in xvip_dma_buffer_queue()
362 dma->xt.dst_sgl = false; in xvip_dma_buffer_queue()
363 dma->xt.src_start = addr; in xvip_dma_buffer_queue()
366 dma->xt.frame_size = 1; in xvip_dma_buffer_queue()
369 dma->xt.numf = dma->format.height; in xvip_dma_buffer_queue()
[all …]
Dxilinx-dma.h94 struct dma_interleaved_template xt; member
/drivers/dma/ti/
Domap-dma.c1284 struct dma_chan *chan, struct dma_interleaved_template *xt, in omap_dma_prep_dma_interleaved() argument
1294 if (is_slave_direction(xt->dir)) in omap_dma_prep_dma_interleaved()
1297 if (xt->frame_size != 1 || xt->numf == 0) in omap_dma_prep_dma_interleaved()
1304 data_type = __ffs((xt->src_start | xt->dst_start | xt->sgl[0].size)); in omap_dma_prep_dma_interleaved()
1310 d->dev_addr = xt->src_start; in omap_dma_prep_dma_interleaved()
1312 sg->en = xt->sgl[0].size / BIT(data_type); in omap_dma_prep_dma_interleaved()
1313 sg->fn = xt->numf; in omap_dma_prep_dma_interleaved()
1314 sg->addr = xt->dst_start; in omap_dma_prep_dma_interleaved()
1318 src_icg = dmaengine_get_src_icg(xt, &xt->sgl[0]); in omap_dma_prep_dma_interleaved()
1319 dst_icg = dmaengine_get_dst_icg(xt, &xt->sgl[0]); in omap_dma_prep_dma_interleaved()
[all …]
Dedma.c1280 struct dma_interleaved_template *xt, in edma_prep_dma_interleaved() argument
1291 if (is_slave_direction(xt->dir)) in edma_prep_dma_interleaved()
1294 if (xt->frame_size != 1 || xt->numf == 0) in edma_prep_dma_interleaved()
1297 if (xt->sgl[0].size > SZ_64K || xt->numf > SZ_64K) in edma_prep_dma_interleaved()
1300 src_icg = dmaengine_get_src_icg(xt, &xt->sgl[0]); in edma_prep_dma_interleaved()
1302 src_bidx = src_icg + xt->sgl[0].size; in edma_prep_dma_interleaved()
1303 } else if (xt->src_inc) { in edma_prep_dma_interleaved()
1304 src_bidx = xt->sgl[0].size; in edma_prep_dma_interleaved()
1311 dst_icg = dmaengine_get_dst_icg(xt, &xt->sgl[0]); in edma_prep_dma_interleaved()
1313 dst_bidx = dst_icg + xt->sgl[0].size; in edma_prep_dma_interleaved()
[all …]
/drivers/dma/xilinx/
Dxilinx_dpdma.c697 struct dma_interleaved_template *xt) in xilinx_dpdma_chan_prep_interleaved_dma() argument
702 size_t hsize = xt->sgl[0].size; in xilinx_dpdma_chan_prep_interleaved_dma()
703 size_t stride = hsize + xt->sgl[0].icg; in xilinx_dpdma_chan_prep_interleaved_dma()
705 if (!IS_ALIGNED(xt->src_start, XILINX_DPDMA_ALIGN_BYTES)) { in xilinx_dpdma_chan_prep_interleaved_dma()
722 &xt->src_start, 1); in xilinx_dpdma_chan_prep_interleaved_dma()
726 hw_desc->xfer_size = hsize * xt->numf; in xilinx_dpdma_chan_prep_interleaved_dma()
1195 struct dma_interleaved_template *xt, in xilinx_dpdma_prep_interleaved_dma() argument
1201 if (xt->dir != DMA_MEM_TO_DEV) in xilinx_dpdma_prep_interleaved_dma()
1204 if (!xt->numf || !xt->sgl[0].size) in xilinx_dpdma_prep_interleaved_dma()
1210 desc = xilinx_dpdma_chan_prep_interleaved_dma(chan, xt); in xilinx_dpdma_prep_interleaved_dma()
Dxilinx_dma.c1995 struct dma_interleaved_template *xt, in xilinx_vdma_dma_prep_interleaved() argument
2003 if (!is_slave_direction(xt->dir)) in xilinx_vdma_dma_prep_interleaved()
2006 if (!xt->numf || !xt->sgl[0].size) in xilinx_vdma_dma_prep_interleaved()
2009 if (xt->frame_size != 1) in xilinx_vdma_dma_prep_interleaved()
2028 hw->vsize = xt->numf; in xilinx_vdma_dma_prep_interleaved()
2029 hw->hsize = xt->sgl[0].size; in xilinx_vdma_dma_prep_interleaved()
2030 hw->stride = (xt->sgl[0].icg + xt->sgl[0].size) << in xilinx_vdma_dma_prep_interleaved()
2035 if (xt->dir != DMA_MEM_TO_DEV) { in xilinx_vdma_dma_prep_interleaved()
2037 hw->buf_addr = lower_32_bits(xt->dst_start); in xilinx_vdma_dma_prep_interleaved()
2038 hw->buf_addr_msb = upper_32_bits(xt->dst_start); in xilinx_vdma_dma_prep_interleaved()
[all …]
/drivers/gpu/drm/xlnx/
Dzynqmp_disp.c122 struct dma_interleaved_template xt; member
1114 dma->xt.numf = height; in zynqmp_disp_layer_update()
1117 dma->xt.src_start = paddr; in zynqmp_disp_layer_update()
1118 dma->xt.frame_size = 1; in zynqmp_disp_layer_update()
1119 dma->xt.dir = DMA_MEM_TO_DEV; in zynqmp_disp_layer_update()
1120 dma->xt.src_sgl = true; in zynqmp_disp_layer_update()
1121 dma->xt.dst_sgl = false; in zynqmp_disp_layer_update()
1123 desc = dmaengine_prep_interleaved_dma(dma->chan, &dma->xt, in zynqmp_disp_layer_update()