Home
last modified time | relevance | path

Searched refs:sgt (Results 1 – 25 of 70) sorted by relevance

123

/drivers/media/v4l2-core/
Dvideobuf2-dma-contig.c48 static unsigned long vb2_dc_get_contiguous_size(struct sg_table *sgt) in vb2_dc_get_contiguous_size() argument
51 dma_addr_t expected = sg_dma_address(sgt->sgl); in vb2_dc_get_contiguous_size()
55 for_each_sg(sgt->sgl, s, sgt->nents, i) { in vb2_dc_get_contiguous_size()
95 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_prepare() local
98 if (!sgt || buf->db_attach) in vb2_dc_prepare()
101 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_prepare()
108 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_finish() local
111 if (!sgt || buf->db_attach) in vb2_dc_finish()
114 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->orig_nents, buf->dma_dir); in vb2_dc_finish()
220 struct sg_table sgt; member
[all …]
Dvideobuf2-dma-sg.c103 struct sg_table *sgt; in vb2_dma_sg_alloc() local
139 sgt = &buf->sg_table; in vb2_dma_sg_alloc()
144 sgt->nents = dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_alloc()
146 if (!sgt->nents) in vb2_dma_sg_alloc()
176 struct sg_table *sgt = &buf->sg_table; in vb2_dma_sg_put() local
182 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_put()
198 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_prepare() local
204 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_prepare()
211 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_finish() local
217 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->orig_nents, buf->dma_dir); in vb2_dma_sg_finish()
[all …]
Dvideobuf2-vmalloc.c206 struct sg_table sgt; member
216 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_attach() local
226 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_attach()
227 ret = sg_alloc_table(sgt, num_pages, GFP_KERNEL); in vb2_vmalloc_dmabuf_ops_attach()
232 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in vb2_vmalloc_dmabuf_ops_attach()
236 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_attach()
253 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_detach() local
258 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_detach()
262 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_vmalloc_dmabuf_ops_detach()
264 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_detach()
[all …]
/drivers/gpu/drm/tegra/
Dgem.c35 static dma_addr_t tegra_bo_pin(struct host1x_bo *bo, struct sg_table **sgt) in tegra_bo_pin() argument
42 static void tegra_bo_unpin(struct host1x_bo *bo, struct sg_table *sgt) in tegra_bo_unpin() argument
111 err = iommu_map_sg(tegra->domain, bo->paddr, bo->sgt->sgl, in tegra_bo_iommu_map()
112 bo->sgt->nents, prot); in tegra_bo_iommu_map()
175 sg_free_table(bo->sgt); in tegra_bo_free()
176 kfree(bo->sgt); in tegra_bo_free()
193 bo->sgt = drm_prime_pages_to_sg(bo->pages, bo->num_pages); in tegra_bo_get_pages()
194 if (IS_ERR(bo->sgt)) in tegra_bo_get_pages()
204 for_each_sg(bo->sgt->sgl, s, bo->sgt->nents, i) in tegra_bo_get_pages()
207 dma_sync_sg_for_device(drm->dev, bo->sgt->sgl, bo->sgt->nents, in tegra_bo_get_pages()
[all …]
/drivers/gpu/drm/armada/
Darmada_gem.c76 if (dobj->sgt) in armada_gem_free_object()
78 dobj->sgt, DMA_TO_DEVICE); in armada_gem_free_object()
429 struct sg_table *sgt; in armada_gem_prime_map_dma_buf() local
432 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in armada_gem_prime_map_dma_buf()
433 if (!sgt) in armada_gem_prime_map_dma_buf()
441 if (sg_alloc_table(sgt, count, GFP_KERNEL)) in armada_gem_prime_map_dma_buf()
446 for_each_sg(sgt->sgl, sg, count, i) { in armada_gem_prime_map_dma_buf()
458 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) { in armada_gem_prime_map_dma_buf()
459 num = sgt->nents; in armada_gem_prime_map_dma_buf()
464 if (sg_alloc_table(sgt, 1, GFP_KERNEL)) in armada_gem_prime_map_dma_buf()
[all …]
/drivers/gpu/drm/udl/
Dudl_dmabuf.c26 struct sg_table sgt; member
54 struct sg_table *sgt; in udl_detach_dma_buf() local
62 sgt = &udl_attach->sgt; in udl_detach_dma_buf()
65 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, in udl_detach_dma_buf()
68 sg_free_table(sgt); in udl_detach_dma_buf()
80 struct sg_table *sgt = NULL; in udl_map_dma_buf() local
90 return &udl_attach->sgt; in udl_map_dma_buf()
107 sgt = &udl_attach->sgt; in udl_map_dma_buf()
109 ret = sg_alloc_table(sgt, obj->sg->orig_nents, GFP_KERNEL); in udl_map_dma_buf()
118 wr = sgt->sgl; in udl_map_dma_buf()
[all …]
/drivers/gpu/drm/
Ddrm_prime.c73 struct sg_table *sgt; member
196 struct sg_table *sgt; in drm_gem_map_detach() local
204 sgt = prime_attach->sgt; in drm_gem_map_detach()
205 if (sgt) { in drm_gem_map_detach()
207 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, in drm_gem_map_detach()
209 sg_free_table(sgt); in drm_gem_map_detach()
212 kfree(sgt); in drm_gem_map_detach()
247 struct sg_table *sgt; in drm_gem_map_dma_buf() local
254 return prime_attach->sgt; in drm_gem_map_dma_buf()
263 sgt = obj->dev->driver->gem_prime_get_sg_table(obj); in drm_gem_map_dma_buf()
[all …]
Ddrm_gem_cma_helper.c192 drm_prime_gem_destroy(gem_obj, cma_obj->sgt); in drm_gem_cma_free_object()
402 struct sg_table *sgt; in drm_gem_cma_prime_get_sg_table() local
405 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in drm_gem_cma_prime_get_sg_table()
406 if (!sgt) in drm_gem_cma_prime_get_sg_table()
409 ret = dma_get_sgtable(obj->dev->dev, sgt, cma_obj->vaddr, in drm_gem_cma_prime_get_sg_table()
414 return sgt; in drm_gem_cma_prime_get_sg_table()
417 kfree(sgt); in drm_gem_cma_prime_get_sg_table()
442 struct sg_table *sgt) in drm_gem_cma_prime_import_sg_table() argument
446 if (sgt->nents != 1) in drm_gem_cma_prime_import_sg_table()
454 cma_obj->paddr = sg_dma_address(sgt->sgl); in drm_gem_cma_prime_import_sg_table()
[all …]
/drivers/gpu/drm/etnaviv/
Detnaviv_mmu.c32 struct sg_table *sgt, unsigned len, int prot) in etnaviv_iommu_map() argument
40 if (!domain || !sgt) in etnaviv_iommu_map()
43 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in etnaviv_iommu_map()
61 for_each_sg(sgt->sgl, sg, i, j) { in etnaviv_iommu_map()
71 struct sg_table *sgt, unsigned len) in etnaviv_iommu_unmap() argument
78 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in etnaviv_iommu_unmap()
102 etnaviv_obj->sgt, etnaviv_obj->base.size); in etnaviv_iommu_remove_mapping()
210 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_iommu_map_gem() local
220 sgt->nents == 1 && !(etnaviv_obj->flags & ETNA_BO_FORCE_MMU)) { in etnaviv_iommu_map_gem()
223 iova = sg_dma_address(sgt->sgl) - memory_base; in etnaviv_iommu_map_gem()
[all …]
Detnaviv_gem.c28 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_gem_scatter_map() local
35 dma_map_sg(dev->dev, sgt->sgl, sgt->nents, DMA_BIDIRECTIONAL); in etnaviv_gem_scatter_map()
41 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_gem_scatterlist_unmap() local
59 dma_unmap_sg(dev->dev, sgt->sgl, sgt->nents, DMA_BIDIRECTIONAL); in etnaviv_gem_scatterlist_unmap()
80 if (etnaviv_obj->sgt) { in put_pages()
82 sg_free_table(etnaviv_obj->sgt); in put_pages()
83 kfree(etnaviv_obj->sgt); in put_pages()
84 etnaviv_obj->sgt = NULL; in put_pages()
106 if (!etnaviv_obj->sgt) { in etnaviv_gem_get_pages()
109 struct sg_table *sgt; in etnaviv_gem_get_pages() local
[all …]
Detnaviv_gem_prime.c27 BUG_ON(!etnaviv_obj->sgt); /* should have already pinned! */ in etnaviv_gem_prime_get_sg_table()
29 return etnaviv_obj->sgt; in etnaviv_gem_prime_get_sg_table()
77 drm_prime_gem_destroy(&etnaviv_obj->base, etnaviv_obj->sgt); in etnaviv_gem_prime_release()
101 struct dma_buf_attachment *attach, struct sg_table *sgt) in etnaviv_gem_prime_import_sg_table() argument
115 etnaviv_obj->sgt = sgt; in etnaviv_gem_prime_import_sg_table()
122 ret = drm_prime_sg_to_page_addr_arrays(sgt, etnaviv_obj->pages, in etnaviv_gem_prime_import_sg_table()
Detnaviv_mmu.h55 struct sg_table *sgt, unsigned len, int prot);
57 struct sg_table *sgt, unsigned len);
/drivers/gpu/drm/msm/
Dmsm_iommu.c49 struct sg_table *sgt, unsigned len, int prot) in msm_iommu_map() argument
58 if (!domain || !sgt) in msm_iommu_map()
61 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in msm_iommu_map()
79 for_each_sg(sgt->sgl, sg, i, j) { in msm_iommu_map()
88 struct sg_table *sgt, unsigned len) in msm_iommu_unmap() argument
96 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in msm_iommu_unmap()
Dmsm_gem.c96 msm_obj->sgt = drm_prime_pages_to_sg(p, npages); in get_pages()
97 if (IS_ERR(msm_obj->sgt)) { in get_pages()
98 void *ptr = ERR_CAST(msm_obj->sgt); in get_pages()
101 msm_obj->sgt = NULL; in get_pages()
109 dma_map_sg(dev->dev, msm_obj->sgt->sgl, in get_pages()
110 msm_obj->sgt->nents, DMA_BIDIRECTIONAL); in get_pages()
125 dma_unmap_sg(obj->dev->dev, msm_obj->sgt->sgl, in put_pages()
126 msm_obj->sgt->nents, DMA_BIDIRECTIONAL); in put_pages()
128 if (msm_obj->sgt) in put_pages()
129 sg_free_table(msm_obj->sgt); in put_pages()
[all …]
Dmsm_mmu.h26 int (*map)(struct msm_mmu *mmu, uint32_t iova, struct sg_table *sgt,
28 int (*unmap)(struct msm_mmu *mmu, uint32_t iova, struct sg_table *sgt,
/drivers/gpu/drm/vmwgfx/
Dvmwgfx_buffer.c222 struct sg_table sgt; member
329 __sg_page_iter_start(&viter->iter, vsgt->sgt->sgl, in vmw_piter_start()
330 vsgt->sgt->orig_nents, p_offset); in vmw_piter_start()
349 dma_unmap_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.nents, in vmw_ttm_unmap_from_dma()
351 vmw_tt->sgt.nents = vmw_tt->sgt.orig_nents; in vmw_ttm_unmap_from_dma()
372 ret = dma_map_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.orig_nents, in vmw_ttm_map_for_dma()
377 vmw_tt->sgt.nents = ret; in vmw_ttm_map_for_dma()
410 vsgt->sgt = &vmw_tt->sgt; in vmw_ttm_map_dma()
425 ret = sg_alloc_table_from_pages(&vmw_tt->sgt, vsgt->pages, in vmw_ttm_map_dma()
433 if (vsgt->num_pages > vmw_tt->sgt.nents) { in vmw_ttm_map_dma()
[all …]
/drivers/spi/
Dspi-ep93xx.c439 struct sg_table *sgt; in ep93xx_spi_dma_prepare() local
456 sgt = &espi->rx_sgt; in ep93xx_spi_dma_prepare()
463 sgt = &espi->tx_sgt; in ep93xx_spi_dma_prepare()
484 if (nents != sgt->nents) { in ep93xx_spi_dma_prepare()
485 sg_free_table(sgt); in ep93xx_spi_dma_prepare()
487 ret = sg_alloc_table(sgt, nents, GFP_KERNEL); in ep93xx_spi_dma_prepare()
493 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in ep93xx_spi_dma_prepare()
513 nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare()
517 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir, DMA_CTRL_ACK); in ep93xx_spi_dma_prepare()
519 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare()
[all …]
Dspi-pxa2xx-dma.c85 struct sg_table *sgt; in pxa2xx_spi_dma_prepare_one() local
108 sgt = &xfer->tx_sg; in pxa2xx_spi_dma_prepare_one()
115 sgt = &xfer->rx_sg; in pxa2xx_spi_dma_prepare_one()
125 return dmaengine_prep_slave_sg(chan, sgt->sgl, sgt->nents, dir, in pxa2xx_spi_dma_prepare_one()
/drivers/gpu/drm/exynos/
Dexynos_drm_gem.c29 struct sg_table sgt; in exynos_drm_alloc_buf() local
76 ret = dma_get_sgtable_attrs(to_dma_dev(dev), &sgt, exynos_gem->cookie, in exynos_drm_alloc_buf()
84 if (drm_prime_sg_to_page_addr_arrays(&sgt, exynos_gem->pages, NULL, in exynos_drm_alloc_buf()
91 sg_free_table(&sgt); in exynos_drm_alloc_buf()
99 sg_free_table(&sgt); in exynos_drm_alloc_buf()
163 drm_prime_gem_destroy(obj, exynos_gem->sgt); in exynos_drm_gem_destroy()
557 struct sg_table *sgt) in exynos_drm_gem_prime_import_sg_table() argument
569 exynos_gem->dma_addr = sg_dma_address(sgt->sgl); in exynos_drm_gem_prime_import_sg_table()
578 ret = drm_prime_sg_to_page_addr_arrays(sgt, exynos_gem->pages, NULL, in exynos_drm_gem_prime_import_sg_table()
583 exynos_gem->sgt = sgt; in exynos_drm_gem_prime_import_sg_table()
[all …]
Dexynos_drm_g2d.c209 struct sg_table *sgt; member
402 dma_unmap_sg(to_dma_dev(drm_dev), g2d_userptr->sgt->sgl, in g2d_userptr_put_dma_addr()
403 g2d_userptr->sgt->nents, DMA_BIDIRECTIONAL); in g2d_userptr_put_dma_addr()
418 sg_free_table(g2d_userptr->sgt); in g2d_userptr_put_dma_addr()
419 kfree(g2d_userptr->sgt); in g2d_userptr_put_dma_addr()
433 struct sg_table *sgt; in g2d_userptr_get_dma_addr() local
505 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in g2d_userptr_get_dma_addr()
506 if (!sgt) { in g2d_userptr_get_dma_addr()
511 ret = sg_alloc_table_from_pages(sgt, in g2d_userptr_get_dma_addr()
519 g2d_userptr->sgt = sgt; in g2d_userptr_get_dma_addr()
[all …]
/drivers/gpu/drm/rockchip/
Drockchip_drm_gem.c253 struct sg_table *sgt; in rockchip_gem_prime_get_sg_table() local
256 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in rockchip_gem_prime_get_sg_table()
257 if (!sgt) in rockchip_gem_prime_get_sg_table()
260 ret = dma_get_sgtable_attrs(drm->dev, sgt, rk_obj->kvaddr, in rockchip_gem_prime_get_sg_table()
265 kfree(sgt); in rockchip_gem_prime_get_sg_table()
269 return sgt; in rockchip_gem_prime_get_sg_table()
/drivers/gpu/drm/mediatek/
Dmtk_drm_gem.c217 struct sg_table *sgt; in mtk_gem_prime_get_sg_table() local
220 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in mtk_gem_prime_get_sg_table()
221 if (!sgt) in mtk_gem_prime_get_sg_table()
224 ret = dma_get_sgtable_attrs(priv->dma_dev, sgt, mtk_gem->cookie, in mtk_gem_prime_get_sg_table()
229 kfree(sgt); in mtk_gem_prime_get_sg_table()
233 return sgt; in mtk_gem_prime_get_sg_table()
/drivers/gpu/drm/omapdrm/
Domap_gem_dmabuf.c192 struct sg_table *sgt; in omap_gem_prime_import() local
213 sgt = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL); in omap_gem_prime_import()
214 if (IS_ERR(sgt)) { in omap_gem_prime_import()
215 ret = PTR_ERR(sgt); in omap_gem_prime_import()
219 obj = omap_gem_new_dmabuf(dev, dma_buf->size, sgt); in omap_gem_prime_import()
230 dma_buf_unmap_attachment(attach, sgt, DMA_BIDIRECTIONAL); in omap_gem_prime_import()
/drivers/gpu/host1x/
Djob.c185 struct sg_table *sgt; in pin_job() local
192 phys_addr = host1x_bo_pin(reloc->target.bo, &sgt); in pin_job()
198 job->unpins[job->num_unpins].sgt = sgt; in pin_job()
204 struct sg_table *sgt; in pin_job() local
211 phys_addr = host1x_bo_pin(g->bo, &sgt); in pin_job()
217 job->unpins[job->num_unpins].sgt = sgt; in pin_job()
577 host1x_bo_unpin(unpin->bo, unpin->sgt); in host1x_job_unpin()
/drivers/iommu/
Ddma-iommu.c350 struct sg_table sgt; in iommu_dma_alloc() local
376 if (sg_alloc_table_from_pages(&sgt, pages, count, 0, size, GFP_KERNEL)) in iommu_dma_alloc()
385 sg_miter_start(&miter, sgt.sgl, sgt.orig_nents, SG_MITER_FROM_SG); in iommu_dma_alloc()
392 if (iommu_map_sg(domain, dma_addr, sgt.sgl, sgt.orig_nents, prot) in iommu_dma_alloc()
397 sg_free_table(&sgt); in iommu_dma_alloc()
401 sg_free_table(&sgt); in iommu_dma_alloc()

123