/kernel/linux/linux-5.10/drivers/media/common/videobuf2/ |
D | videobuf2-dma-contig.c | 49 static unsigned long vb2_dc_get_contiguous_size(struct sg_table *sgt) in vb2_dc_get_contiguous_size() argument 52 dma_addr_t expected = sg_dma_address(sgt->sgl); in vb2_dc_get_contiguous_size() 56 for_each_sgtable_dma_sg(sgt, s, i) { in vb2_dc_get_contiguous_size() 96 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_prepare() local 98 if (!sgt) in vb2_dc_prepare() 101 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir); in vb2_dc_prepare() 107 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_finish() local 109 if (!sgt) in vb2_dc_finish() 112 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir); in vb2_dc_finish() 211 struct sg_table sgt; member [all …]
|
D | videobuf2-dma-sg.c | 104 struct sg_table *sgt; in vb2_dma_sg_alloc() local 145 sgt = &buf->sg_table; in vb2_dma_sg_alloc() 150 if (dma_map_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_alloc() 181 struct sg_table *sgt = &buf->sg_table; in vb2_dma_sg_put() local 187 dma_unmap_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_put() 203 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_prepare() local 205 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir); in vb2_dma_sg_prepare() 211 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_finish() local 213 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir); in vb2_dma_sg_finish() 221 struct sg_table *sgt; in vb2_dma_sg_get_userptr() local [all …]
|
D | videobuf2-vmalloc.c | 206 struct sg_table sgt; member 216 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_attach() local 226 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_attach() 227 ret = sg_alloc_table(sgt, num_pages, GFP_KERNEL); in vb2_vmalloc_dmabuf_ops_attach() 232 for_each_sgtable_sg(sgt, sg, i) { in vb2_vmalloc_dmabuf_ops_attach() 236 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_attach() 253 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_detach() local 258 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_detach() 262 dma_unmap_sgtable(db_attach->dev, sgt, attach->dma_dir, 0); in vb2_vmalloc_dmabuf_ops_detach() 263 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_detach() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/tegra/ |
D | gem.c | 31 static int sg_alloc_table_from_sg(struct sg_table *sgt, struct scatterlist *sg, in sg_alloc_table_from_sg() argument 38 err = sg_alloc_table(sgt, nents, gfp_mask); in sg_alloc_table_from_sg() 42 dst = sgt->sgl; in sg_alloc_table_from_sg() 57 struct sg_table *sgt; in tegra_bo_pin() local 81 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in tegra_bo_pin() 82 if (!sgt) in tegra_bo_pin() 90 err = sg_alloc_table_from_pages(sgt, obj->pages, obj->num_pages, in tegra_bo_pin() 94 } else if (obj->sgt) { in tegra_bo_pin() 101 err = sg_alloc_table_from_sg(sgt, obj->sgt->sgl, in tegra_bo_pin() 102 obj->sgt->orig_nents, GFP_KERNEL); in tegra_bo_pin() [all …]
|
D | plane.c | 73 copy->sgt[i] = NULL; in tegra_plane_atomic_duplicate_state() 120 struct sg_table *sgt; in tegra_dc_pin() local 127 sgt = host1x_bo_pin(dc->dev, &bo->base, phys); in tegra_dc_pin() 128 if (IS_ERR(sgt)) { in tegra_dc_pin() 129 err = PTR_ERR(sgt); in tegra_dc_pin() 133 if (sgt) { in tegra_dc_pin() 134 err = dma_map_sgtable(dc->dev, sgt, DMA_TO_DEVICE, 0); in tegra_dc_pin() 144 if (sgt->nents > 1) { in tegra_dc_pin() 149 state->iova[i] = sg_dma_address(sgt->sgl); in tegra_dc_pin() 150 state->sgt[i] = sgt; in tegra_dc_pin() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/ |
D | i915_mm.c | 36 struct sgt_iter sgt; member 56 return (r->sgt.dma + r->sgt.curr + r->iobase) >> PAGE_SHIFT; in sgt_pfn() 58 return r->sgt.pfn + (r->sgt.curr >> PAGE_SHIFT); in sgt_pfn() 65 if (GEM_WARN_ON(!r->sgt.pfn)) in remap_sg() 73 r->sgt.curr += PAGE_SIZE; in remap_sg() 74 if (r->sgt.curr >= r->sgt.max) in remap_sg() 75 r->sgt = __sgt_iter(__sg_next(r->sgt.sgp), use_dma(r->iobase)); in remap_sg() 132 .sgt = __sgt_iter(sgl, use_dma(iobase)), in remap_io_sg()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/armada/ |
D | armada_gem.c | 66 if (dobj->sgt) in armada_gem_free_object() 68 dobj->sgt, DMA_TO_DEVICE); in armada_gem_free_object() 381 struct sg_table *sgt; in armada_gem_prime_map_dma_buf() local 384 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in armada_gem_prime_map_dma_buf() 385 if (!sgt) in armada_gem_prime_map_dma_buf() 393 if (sg_alloc_table(sgt, count, GFP_KERNEL)) in armada_gem_prime_map_dma_buf() 398 for_each_sgtable_sg(sgt, sg, i) { in armada_gem_prime_map_dma_buf() 408 if (dma_map_sgtable(attach->dev, sgt, dir, 0)) in armada_gem_prime_map_dma_buf() 412 if (sg_alloc_table(sgt, 1, GFP_KERNEL)) in armada_gem_prime_map_dma_buf() 415 sg_set_page(sgt->sgl, dobj->page, dobj->obj.size, 0); in armada_gem_prime_map_dma_buf() [all …]
|
/kernel/linux/linux-5.10/drivers/hwtracing/intel_th/ |
D | msu-sink.c | 51 static int msu_sink_alloc_window(void *data, struct sg_table **sgt, size_t size) in msu_sink_alloc_window() argument 64 ret = sg_alloc_table(*sgt, nents, GFP_KERNEL); in msu_sink_alloc_window() 68 priv->sgts[priv->nr_sgts++] = *sgt; in msu_sink_alloc_window() 70 for_each_sg((*sgt)->sgl, sg_ptr, nents, i) { in msu_sink_alloc_window() 84 static void msu_sink_free_window(void *data, struct sg_table *sgt) in msu_sink_free_window() argument 90 for_each_sg(sgt->sgl, sg_ptr, sgt->nents, i) { in msu_sink_free_window() 95 sg_free_table(sgt); in msu_sink_free_window() 99 static int msu_sink_ready(void *data, struct sg_table *sgt, size_t bytes) in msu_sink_ready() argument 103 intel_th_msc_window_unlock(priv->dev, sgt); in msu_sink_ready()
|
/kernel/linux/linux-5.10/drivers/xen/ |
D | gntdev-dmabuf.c | 51 struct sg_table *sgt; member 70 struct sg_table *sgt; member 203 struct sg_table *sgt; in dmabuf_pages_to_sgt() local 206 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in dmabuf_pages_to_sgt() 207 if (!sgt) { in dmabuf_pages_to_sgt() 212 ret = sg_alloc_table_from_pages(sgt, pages, nr_pages, 0, in dmabuf_pages_to_sgt() 218 return sgt; in dmabuf_pages_to_sgt() 221 kfree(sgt); in dmabuf_pages_to_sgt() 246 struct sg_table *sgt = gntdev_dmabuf_attach->sgt; in dmabuf_exp_ops_detach() local 248 if (sgt) { in dmabuf_exp_ops_detach() [all …]
|
/kernel/linux/linux-5.10/drivers/staging/media/tegra-vde/ |
D | dmabuf-cache.c | 24 struct sg_table *sgt; member 38 dma_buf_unmap_attachment(entry->a, entry->sgt, entry->dma_dir); in tegra_vde_release_entry() 69 struct sg_table *sgt; in tegra_vde_dmabuf_cache_map() local 90 *addrp = sg_dma_address(entry->sgt->sgl); in tegra_vde_dmabuf_cache_map() 102 sgt = dma_buf_map_attachment(attachment, dma_dir); in tegra_vde_dmabuf_cache_map() 103 if (IS_ERR(sgt)) { in tegra_vde_dmabuf_cache_map() 105 err = PTR_ERR(sgt); in tegra_vde_dmabuf_cache_map() 109 if (!vde->domain && sgt->nents > 1) { in tegra_vde_dmabuf_cache_map() 122 err = tegra_vde_iommu_map(vde, sgt, &iova, dmabuf->size); in tegra_vde_dmabuf_cache_map() 128 *addrp = sg_dma_address(sgt->sgl); in tegra_vde_dmabuf_cache_map() [all …]
|
/kernel/linux/linux-5.10/net/ceph/ |
D | crypto.c | 160 static int setup_sgtable(struct sg_table *sgt, struct scatterlist *prealloc_sg, in setup_sgtable() argument 172 memset(sgt, 0, sizeof(*sgt)); in setup_sgtable() 182 ret = sg_alloc_table(sgt, chunk_cnt, GFP_NOFS); in setup_sgtable() 188 sgt->sgl = prealloc_sg; in setup_sgtable() 189 sgt->nents = sgt->orig_nents = 1; in setup_sgtable() 192 for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) { in setup_sgtable() 212 static void teardown_sgtable(struct sg_table *sgt) in teardown_sgtable() argument 214 if (sgt->orig_nents > 1) in teardown_sgtable() 215 sg_free_table(sgt); in teardown_sgtable() 222 struct sg_table sgt; in ceph_aes_crypt() local [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/ |
D | drm_gem_shmem_helper.c | 126 drm_prime_gem_destroy(obj, shmem->sgt); in drm_gem_shmem_free_object() 128 if (shmem->sgt) { in drm_gem_shmem_free_object() 129 dma_unmap_sgtable(obj->dev->dev, shmem->sgt, in drm_gem_shmem_free_object() 131 sg_free_table(shmem->sgt); in drm_gem_shmem_free_object() 132 kfree(shmem->sgt); in drm_gem_shmem_free_object() 428 dma_unmap_sgtable(obj->dev->dev, shmem->sgt, DMA_BIDIRECTIONAL, 0); in drm_gem_shmem_purge_locked() 429 sg_free_table(shmem->sgt); in drm_gem_shmem_purge_locked() 430 kfree(shmem->sgt); in drm_gem_shmem_purge_locked() 431 shmem->sgt = NULL; in drm_gem_shmem_purge_locked() 714 struct sg_table *sgt; in drm_gem_shmem_get_pages_sgt() local [all …]
|
D | drm_prime.c | 623 struct sg_table *sgt; in drm_gem_map_dma_buf() local 630 sgt = obj->funcs->get_sg_table(obj); in drm_gem_map_dma_buf() 632 sgt = obj->dev->driver->gem_prime_get_sg_table(obj); in drm_gem_map_dma_buf() 634 ret = dma_map_sgtable(attach->dev, sgt, dir, in drm_gem_map_dma_buf() 637 sg_free_table(sgt); in drm_gem_map_dma_buf() 638 kfree(sgt); in drm_gem_map_dma_buf() 639 sgt = ERR_PTR(ret); in drm_gem_map_dma_buf() 642 return sgt; in drm_gem_map_dma_buf() 655 struct sg_table *sgt, in drm_gem_unmap_dma_buf() argument 658 if (!sgt) in drm_gem_unmap_dma_buf() [all …]
|
D | drm_gem_cma_helper.c | 185 drm_prime_gem_destroy(gem_obj, cma_obj->sgt); in drm_gem_cma_free_object() 430 struct sg_table *sgt; in drm_gem_cma_prime_get_sg_table() local 433 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in drm_gem_cma_prime_get_sg_table() 434 if (!sgt) in drm_gem_cma_prime_get_sg_table() 437 ret = dma_get_sgtable(obj->dev->dev, sgt, cma_obj->vaddr, in drm_gem_cma_prime_get_sg_table() 442 return sgt; in drm_gem_cma_prime_get_sg_table() 445 kfree(sgt); in drm_gem_cma_prime_get_sg_table() 470 struct sg_table *sgt) in drm_gem_cma_prime_import_sg_table() argument 475 if (drm_prime_get_contiguous_size(sgt) < attach->dmabuf->size) in drm_gem_cma_prime_import_sg_table() 483 cma_obj->paddr = sg_dma_address(sgt->sgl); in drm_gem_cma_prime_import_sg_table() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/mediatek/ |
D | mtk_drm_gem.c | 190 struct sg_table *sgt; in mtk_gem_prime_get_sg_table() local 193 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in mtk_gem_prime_get_sg_table() 194 if (!sgt) in mtk_gem_prime_get_sg_table() 197 ret = dma_get_sgtable_attrs(priv->dma_dev, sgt, mtk_gem->cookie, in mtk_gem_prime_get_sg_table() 202 kfree(sgt); in mtk_gem_prime_get_sg_table() 206 return sgt; in mtk_gem_prime_get_sg_table() 233 struct sg_table *sgt; in mtk_drm_gem_prime_vmap() local 239 sgt = mtk_gem_prime_get_sg_table(obj); in mtk_drm_gem_prime_vmap() 240 if (IS_ERR(sgt)) in mtk_drm_gem_prime_vmap() 248 drm_prime_sg_to_page_addr_arrays(sgt, mtk_gem->pages, NULL, npages); in mtk_drm_gem_prime_vmap() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/host1x/ |
D | job.c | 119 struct sg_table *sgt; in pin_job() local 144 sgt = host1x_bo_pin(dev, reloc->target.bo, phys); in pin_job() 145 if (IS_ERR(sgt)) { in pin_job() 146 err = PTR_ERR(sgt); in pin_job() 150 if (sgt) { in pin_job() 173 err = dma_map_sgtable(dev, sgt, dir, 0); in pin_job() 179 phys_addr = sg_dma_address(sgt->sgl); in pin_job() 184 job->unpins[job->num_unpins].sgt = sgt; in pin_job() 198 struct sg_table *sgt; in pin_job() local 222 sgt = host1x_bo_pin(host->dev, g->bo, phys); in pin_job() [all …]
|
/kernel/linux/linux-5.10/include/linux/ |
D | scatterlist.h | 157 #define for_each_sgtable_sg(sgt, sg, i) \ argument 158 for_each_sg((sgt)->sgl, sg, (sgt)->orig_nents, i) 165 #define for_each_sgtable_dma_sg(sgt, sg, i) \ argument 166 for_each_sg((sgt)->sgl, sg, (sgt)->nents, i) 294 struct scatterlist *__sg_alloc_table_from_pages(struct sg_table *sgt, 299 int sg_alloc_table_from_pages(struct sg_table *sgt, struct page **pages, 459 #define for_each_sgtable_page(sgt, piter, pgoffset) \ argument 460 for_each_sg_page((sgt)->sgl, piter, (sgt)->orig_nents, pgoffset) 473 #define for_each_sgtable_dma_page(sgt, dma_iter, pgoffset) \ argument 474 for_each_sg_dma_page((sgt)->sgl, dma_iter, (sgt)->nents, pgoffset)
|
D | intel_th.h | 43 int (*alloc_window)(void *priv, struct sg_table **sgt, 45 void (*free_window)(void *priv, struct sg_table *sgt); 59 int (*ready)(void *priv, struct sg_table *sgt, size_t bytes); 65 void intel_th_msc_window_unlock(struct device *dev, struct sg_table *sgt);
|
/kernel/linux/linux-5.10/drivers/gpu/drm/lima/ |
D | lima_gem.c | 28 struct sg_table sgt; in lima_heap_alloc() local 66 ret = sg_alloc_table_from_pages(&sgt, pages, i, 0, in lima_heap_alloc() 71 if (bo->base.sgt) { in lima_heap_alloc() 72 dma_unmap_sgtable(dev, bo->base.sgt, DMA_BIDIRECTIONAL, 0); in lima_heap_alloc() 73 sg_free_table(bo->base.sgt); in lima_heap_alloc() 75 bo->base.sgt = kmalloc(sizeof(*bo->base.sgt), GFP_KERNEL); in lima_heap_alloc() 76 if (!bo->base.sgt) { in lima_heap_alloc() 77 sg_free_table(&sgt); in lima_heap_alloc() 82 ret = dma_map_sgtable(dev, &sgt, DMA_BIDIRECTIONAL, 0); in lima_heap_alloc() 84 sg_free_table(&sgt); in lima_heap_alloc() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/etnaviv/ |
D | etnaviv_gem.c | 23 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_gem_scatter_map() local 30 dma_map_sgtable(dev->dev, sgt, DMA_BIDIRECTIONAL, 0); in etnaviv_gem_scatter_map() 36 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_gem_scatterlist_unmap() local 54 dma_unmap_sgtable(dev->dev, sgt, DMA_BIDIRECTIONAL, 0); in etnaviv_gem_scatterlist_unmap() 75 if (etnaviv_obj->sgt) { in put_pages() 77 sg_free_table(etnaviv_obj->sgt); in put_pages() 78 kfree(etnaviv_obj->sgt); in put_pages() 79 etnaviv_obj->sgt = NULL; in put_pages() 101 if (!etnaviv_obj->sgt) { in etnaviv_gem_get_pages() 104 struct sg_table *sgt; in etnaviv_gem_get_pages() local [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/rockchip/ |
D | rockchip_drm_gem.c | 39 ret = iommu_map_sgtable(private->domain, rk_obj->dma_addr, rk_obj->sgt, in rockchip_gem_iommu_map() 88 rk_obj->sgt = drm_prime_pages_to_sg(rk_obj->base.dev, in rockchip_gem_get_pages() 90 if (IS_ERR(rk_obj->sgt)) { in rockchip_gem_get_pages() 91 ret = PTR_ERR(rk_obj->sgt); in rockchip_gem_get_pages() 102 for_each_sgtable_sg(rk_obj->sgt, s, i) in rockchip_gem_get_pages() 105 dma_sync_sgtable_for_device(drm->dev, rk_obj->sgt, DMA_TO_DEVICE); in rockchip_gem_get_pages() 116 sg_free_table(rk_obj->sgt); in rockchip_gem_put_pages() 117 kfree(rk_obj->sgt); in rockchip_gem_put_pages() 350 dma_unmap_sgtable(drm->dev, rk_obj->sgt, in rockchip_gem_free_object() 353 drm_prime_gem_destroy(obj, rk_obj->sgt); in rockchip_gem_free_object() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/exynos/ |
D | exynos_drm_gem.c | 120 drm_prime_gem_destroy(obj, exynos_gem->sgt); in exynos_drm_gem_destroy() 406 struct sg_table *sgt; in exynos_drm_gem_prime_get_sg_table() local 409 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in exynos_drm_gem_prime_get_sg_table() 410 if (!sgt) in exynos_drm_gem_prime_get_sg_table() 413 ret = dma_get_sgtable_attrs(to_dma_dev(drm_dev), sgt, exynos_gem->cookie, in exynos_drm_gem_prime_get_sg_table() 418 kfree(sgt); in exynos_drm_gem_prime_get_sg_table() 422 return sgt; in exynos_drm_gem_prime_get_sg_table() 428 struct sg_table *sgt) in exynos_drm_gem_prime_import_sg_table() argument 433 if (drm_prime_get_contiguous_size(sgt) < attach->dmabuf->size) { in exynos_drm_gem_prime_import_sg_table() 452 exynos_gem->dma_addr = sg_dma_address(sgt->sgl); in exynos_drm_gem_prime_import_sg_table() [all …]
|
/kernel/linux/linux-5.10/drivers/spi/ |
D | internals.h | 24 struct sg_table *sgt, void *buf, size_t len, 27 struct sg_table *sgt, enum dma_data_direction dir); 30 struct sg_table *sgt, void *buf, size_t len, in spi_map_buf() argument 37 struct device *dev, struct sg_table *sgt, in spi_unmap_buf() argument
|
/kernel/linux/linux-5.10/drivers/fpga/ |
D | fpga-mgr.c | 98 struct sg_table *sgt) in fpga_mgr_write_init_sg() argument 112 sg_miter_start(&miter, sgt->sgl, sgt->nents, SG_MITER_FROM_SG); in fpga_mgr_write_init_sg() 127 len = sg_copy_to_buffer(sgt->sgl, sgt->nents, buf, in fpga_mgr_write_init_sg() 176 struct sg_table *sgt) in fpga_mgr_buf_load_sg() argument 180 ret = fpga_mgr_write_init_sg(mgr, info, sgt); in fpga_mgr_buf_load_sg() 187 ret = mgr->mops->write_sg(mgr, sgt); in fpga_mgr_buf_load_sg() 191 sg_miter_start(&miter, sgt->sgl, sgt->nents, SG_MITER_FROM_SG); in fpga_mgr_buf_load_sg() 252 struct sg_table sgt; in fpga_mgr_buf_load() local 293 rc = sg_alloc_table_from_pages(&sgt, pages, index, offset_in_page(buf), in fpga_mgr_buf_load() 299 rc = fpga_mgr_buf_load_sg(mgr, info, &sgt); in fpga_mgr_buf_load() [all …]
|
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_dma_buf.c | 293 struct sg_table *sgt; in amdgpu_dma_buf_map() local 318 sgt = drm_prime_pages_to_sg(obj->dev, in amdgpu_dma_buf_map() 321 if (IS_ERR(sgt)) in amdgpu_dma_buf_map() 322 return sgt; in amdgpu_dma_buf_map() 324 if (dma_map_sgtable(attach->dev, sgt, dir, in amdgpu_dma_buf_map() 331 dir, &sgt); in amdgpu_dma_buf_map() 339 return sgt; in amdgpu_dma_buf_map() 342 sg_free_table(sgt); in amdgpu_dma_buf_map() 343 kfree(sgt); in amdgpu_dma_buf_map() 357 struct sg_table *sgt, in amdgpu_dma_buf_unmap() argument [all …]
|