Home
last modified time | relevance | path

Searched refs:xt (Results 1 – 15 of 15) sorted by relevance

/drivers/media/platform/
Dm2m-deinterlace.c142 struct dma_interleaved_template *xt; member
235 ctx->xt->numf = s_height / 2; in deinterlace_issue_dma()
236 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma()
237 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma()
238 ctx->xt->src_start = p_in; in deinterlace_issue_dma()
239 ctx->xt->dst_start = p_out; in deinterlace_issue_dma()
242 ctx->xt->numf = s_height / 2; in deinterlace_issue_dma()
243 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma()
244 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma()
245 ctx->xt->src_start = p_in + s_size / 2; in deinterlace_issue_dma()
[all …]
/drivers/media/platform/omap/
Domap_vout_vrfb.c142 vout->vrfb_dma_tx.xt = kzalloc(xt_size, GFP_KERNEL); in omap_vout_setup_vrfb_bufs()
143 if (!vout->vrfb_dma_tx.xt) { in omap_vout_setup_vrfb_bufs()
187 kfree(vout->vrfb_dma_tx.xt); in omap_vout_release_vrfb()
236 struct dma_interleaved_template *xt = vout->vrfb_dma_tx.xt; in omap_vout_prepare_vrfb() local
256 xt->src_start = buf_phy_addr; in omap_vout_prepare_vrfb()
257 xt->dst_start = vout->vrfb_context[vb->index].paddr[0]; in omap_vout_prepare_vrfb()
259 xt->numf = vout->pix.height; in omap_vout_prepare_vrfb()
260 xt->frame_size = 1; in omap_vout_prepare_vrfb()
261 xt->sgl[0].size = vout->pix.width * vout->bpp; in omap_vout_prepare_vrfb()
262 xt->sgl[0].icg = dst_icg; in omap_vout_prepare_vrfb()
[all …]
Domap_voutdef.h87 struct dma_interleaved_template *xt; member
/drivers/dma/
Ddma-axi-dmac.c611 struct dma_chan *c, struct dma_interleaved_template *xt, in axi_dmac_prep_interleaved() argument
618 if (xt->frame_size != 1) in axi_dmac_prep_interleaved()
621 if (xt->dir != chan->direction) in axi_dmac_prep_interleaved()
625 if (!xt->src_inc || !axi_dmac_check_addr(chan, xt->src_start)) in axi_dmac_prep_interleaved()
630 if (!xt->dst_inc || !axi_dmac_check_addr(chan, xt->dst_start)) in axi_dmac_prep_interleaved()
634 dst_icg = dmaengine_get_dst_icg(xt, &xt->sgl[0]); in axi_dmac_prep_interleaved()
635 src_icg = dmaengine_get_src_icg(xt, &xt->sgl[0]); in axi_dmac_prep_interleaved()
638 if (!axi_dmac_check_len(chan, xt->sgl[0].size) || in axi_dmac_prep_interleaved()
639 xt->numf == 0) in axi_dmac_prep_interleaved()
641 if (xt->sgl[0].size + dst_icg > chan->max_length || in axi_dmac_prep_interleaved()
[all …]
Dat_xdmac.c906 struct dma_interleaved_template *xt, in at_xdmac_interleaved_queue_desc() argument
946 if (xt->src_inc) { in at_xdmac_interleaved_queue_desc()
947 if (xt->src_sgl) in at_xdmac_interleaved_queue_desc()
953 if (xt->dst_inc) { in at_xdmac_interleaved_queue_desc()
954 if (xt->dst_sgl) in at_xdmac_interleaved_queue_desc()
974 desc->lld.mbr_sus = dmaengine_get_src_icg(xt, chunk); in at_xdmac_interleaved_queue_desc()
975 desc->lld.mbr_dus = dmaengine_get_dst_icg(xt, chunk); in at_xdmac_interleaved_queue_desc()
997 struct dma_interleaved_template *xt, in at_xdmac_prep_interleaved() argument
1007 if (!xt || !xt->numf || (xt->dir != DMA_MEM_TO_MEM)) in at_xdmac_prep_interleaved()
1014 if ((xt->numf > 1) && (xt->frame_size > 1)) in at_xdmac_prep_interleaved()
[all …]
Dimx-dma.c938 struct dma_chan *chan, struct dma_interleaved_template *xt, in imxdma_prep_dma_interleaved() argument
947 imxdmac->channel, (unsigned long long)xt->src_start, in imxdma_prep_dma_interleaved()
948 (unsigned long long) xt->dst_start, in imxdma_prep_dma_interleaved()
949 xt->src_sgl ? "true" : "false", xt->dst_sgl ? "true" : "false", in imxdma_prep_dma_interleaved()
950 xt->numf, xt->frame_size); in imxdma_prep_dma_interleaved()
956 if (xt->frame_size != 1 || xt->numf <= 0 || xt->dir != DMA_MEM_TO_MEM) in imxdma_prep_dma_interleaved()
962 desc->src = xt->src_start; in imxdma_prep_dma_interleaved()
963 desc->dest = xt->dst_start; in imxdma_prep_dma_interleaved()
964 desc->x = xt->sgl[0].size; in imxdma_prep_dma_interleaved()
965 desc->y = xt->numf; in imxdma_prep_dma_interleaved()
[all …]
Dat_hdmac.c681 struct dma_interleaved_template *xt, in atc_prep_dma_interleaved() argument
694 if (unlikely(!xt || xt->numf != 1 || !xt->frame_size)) in atc_prep_dma_interleaved()
697 first = xt->sgl; in atc_prep_dma_interleaved()
701 __func__, &xt->src_start, &xt->dst_start, xt->numf, in atc_prep_dma_interleaved()
702 xt->frame_size, flags); in atc_prep_dma_interleaved()
710 for (i = 0; i < xt->frame_size; i++) { in atc_prep_dma_interleaved()
711 struct data_chunk *chunk = xt->sgl + i; in atc_prep_dma_interleaved()
713 if ((chunk->size != xt->sgl->size) || in atc_prep_dma_interleaved()
714 (dmaengine_get_dst_icg(xt, chunk) != dmaengine_get_dst_icg(xt, first)) || in atc_prep_dma_interleaved()
715 (dmaengine_get_src_icg(xt, chunk) != dmaengine_get_src_icg(xt, first))) { in atc_prep_dma_interleaved()
[all …]
/drivers/media/platform/xilinx/
Dxilinx-dma.c353 dma->xt.dir = DMA_DEV_TO_MEM; in xvip_dma_buffer_queue()
354 dma->xt.src_sgl = false; in xvip_dma_buffer_queue()
355 dma->xt.dst_sgl = true; in xvip_dma_buffer_queue()
356 dma->xt.dst_start = addr; in xvip_dma_buffer_queue()
359 dma->xt.dir = DMA_MEM_TO_DEV; in xvip_dma_buffer_queue()
360 dma->xt.src_sgl = true; in xvip_dma_buffer_queue()
361 dma->xt.dst_sgl = false; in xvip_dma_buffer_queue()
362 dma->xt.src_start = addr; in xvip_dma_buffer_queue()
365 dma->xt.frame_size = 1; in xvip_dma_buffer_queue()
368 dma->xt.numf = dma->format.height; in xvip_dma_buffer_queue()
[all …]
Dxilinx-dma.h94 struct dma_interleaved_template xt; member
/drivers/dma/ti/
Domap-dma.c1284 struct dma_chan *chan, struct dma_interleaved_template *xt, in omap_dma_prep_dma_interleaved() argument
1294 if (is_slave_direction(xt->dir)) in omap_dma_prep_dma_interleaved()
1297 if (xt->frame_size != 1 || xt->numf == 0) in omap_dma_prep_dma_interleaved()
1304 data_type = __ffs((xt->src_start | xt->dst_start | xt->sgl[0].size)); in omap_dma_prep_dma_interleaved()
1310 d->dev_addr = xt->src_start; in omap_dma_prep_dma_interleaved()
1312 sg->en = xt->sgl[0].size / BIT(data_type); in omap_dma_prep_dma_interleaved()
1313 sg->fn = xt->numf; in omap_dma_prep_dma_interleaved()
1314 sg->addr = xt->dst_start; in omap_dma_prep_dma_interleaved()
1318 src_icg = dmaengine_get_src_icg(xt, &xt->sgl[0]); in omap_dma_prep_dma_interleaved()
1319 dst_icg = dmaengine_get_dst_icg(xt, &xt->sgl[0]); in omap_dma_prep_dma_interleaved()
[all …]
Dedma.c1280 struct dma_interleaved_template *xt, in edma_prep_dma_interleaved() argument
1291 if (is_slave_direction(xt->dir)) in edma_prep_dma_interleaved()
1294 if (xt->frame_size != 1 || xt->numf == 0) in edma_prep_dma_interleaved()
1297 if (xt->sgl[0].size > SZ_64K || xt->numf > SZ_64K) in edma_prep_dma_interleaved()
1300 src_icg = dmaengine_get_src_icg(xt, &xt->sgl[0]); in edma_prep_dma_interleaved()
1302 src_bidx = src_icg + xt->sgl[0].size; in edma_prep_dma_interleaved()
1303 } else if (xt->src_inc) { in edma_prep_dma_interleaved()
1304 src_bidx = xt->sgl[0].size; in edma_prep_dma_interleaved()
1311 dst_icg = dmaengine_get_dst_icg(xt, &xt->sgl[0]); in edma_prep_dma_interleaved()
1313 dst_bidx = dst_icg + xt->sgl[0].size; in edma_prep_dma_interleaved()
[all …]
/drivers/dma/xilinx/
Dxilinx_dpdma.c684 struct dma_interleaved_template *xt) in xilinx_dpdma_chan_prep_interleaved_dma() argument
689 size_t hsize = xt->sgl[0].size; in xilinx_dpdma_chan_prep_interleaved_dma()
690 size_t stride = hsize + xt->sgl[0].icg; in xilinx_dpdma_chan_prep_interleaved_dma()
692 if (!IS_ALIGNED(xt->src_start, XILINX_DPDMA_ALIGN_BYTES)) { in xilinx_dpdma_chan_prep_interleaved_dma()
710 &xt->src_start, 1); in xilinx_dpdma_chan_prep_interleaved_dma()
714 hw_desc->xfer_size = hsize * xt->numf; in xilinx_dpdma_chan_prep_interleaved_dma()
1193 struct dma_interleaved_template *xt, in xilinx_dpdma_prep_interleaved_dma() argument
1199 if (xt->dir != DMA_MEM_TO_DEV) in xilinx_dpdma_prep_interleaved_dma()
1202 if (!xt->numf || !xt->sgl[0].size) in xilinx_dpdma_prep_interleaved_dma()
1208 desc = xilinx_dpdma_chan_prep_interleaved_dma(chan, xt); in xilinx_dpdma_prep_interleaved_dma()
Dxilinx_dma.c2005 struct dma_interleaved_template *xt, in xilinx_vdma_dma_prep_interleaved() argument
2013 if (!is_slave_direction(xt->dir)) in xilinx_vdma_dma_prep_interleaved()
2016 if (!xt->numf || !xt->sgl[0].size) in xilinx_vdma_dma_prep_interleaved()
2019 if (xt->frame_size != 1) in xilinx_vdma_dma_prep_interleaved()
2038 hw->vsize = xt->numf; in xilinx_vdma_dma_prep_interleaved()
2039 hw->hsize = xt->sgl[0].size; in xilinx_vdma_dma_prep_interleaved()
2040 hw->stride = (xt->sgl[0].icg + xt->sgl[0].size) << in xilinx_vdma_dma_prep_interleaved()
2045 if (xt->dir != DMA_MEM_TO_DEV) { in xilinx_vdma_dma_prep_interleaved()
2047 hw->buf_addr = lower_32_bits(xt->dst_start); in xilinx_vdma_dma_prep_interleaved()
2048 hw->buf_addr_msb = upper_32_bits(xt->dst_start); in xilinx_vdma_dma_prep_interleaved()
[all …]
/drivers/gpu/drm/xlnx/
Dzynqmp_disp.c121 struct dma_interleaved_template xt; member
1100 dma->xt.numf = height; in zynqmp_disp_layer_update()
1103 dma->xt.src_start = paddr; in zynqmp_disp_layer_update()
1104 dma->xt.frame_size = 1; in zynqmp_disp_layer_update()
1105 dma->xt.dir = DMA_MEM_TO_DEV; in zynqmp_disp_layer_update()
1106 dma->xt.src_sgl = true; in zynqmp_disp_layer_update()
1107 dma->xt.dst_sgl = false; in zynqmp_disp_layer_update()
1109 desc = dmaengine_prep_interleaved_dma(dma->chan, &dma->xt, in zynqmp_disp_layer_update()
/drivers/iommu/amd/
Dinit.c2081 union intcapxt xt; in intcapxt_unmask_irq() local
2083 xt.capxt = 0ULL; in intcapxt_unmask_irq()
2084 xt.dest_mode_logical = apic->dest_mode_logical; in intcapxt_unmask_irq()
2085 xt.vector = cfg->vector; in intcapxt_unmask_irq()
2086 xt.destid_0_23 = cfg->dest_apicid & GENMASK(23, 0); in intcapxt_unmask_irq()
2087 xt.destid_24_31 = cfg->dest_apicid >> 24; in intcapxt_unmask_irq()
2093 writeq(xt.capxt, iommu->mmio_base + MMIO_INTCAPXT_EVT_OFFSET); in intcapxt_unmask_irq()
2094 writeq(xt.capxt, iommu->mmio_base + MMIO_INTCAPXT_PPR_OFFSET); in intcapxt_unmask_irq()
2095 writeq(xt.capxt, iommu->mmio_base + MMIO_INTCAPXT_GALOG_OFFSET); in intcapxt_unmask_irq()