Home
last modified time | relevance | path

Searched refs:sgt (Results 1 – 25 of 149) sorted by relevance

123456

/drivers/media/common/videobuf2/
Dvideobuf2-dma-contig.c51 static unsigned long vb2_dc_get_contiguous_size(struct sg_table *sgt) in vb2_dc_get_contiguous_size() argument
54 dma_addr_t expected = sg_dma_address(sgt->sgl); in vb2_dc_get_contiguous_size()
58 for_each_sgtable_dma_sg(sgt, s, i) { in vb2_dc_get_contiguous_size()
102 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_prepare() local
104 if (!sgt) in vb2_dc_prepare()
107 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir); in vb2_dc_prepare()
113 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_finish() local
115 if (!sgt) in vb2_dc_finish()
118 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir); in vb2_dc_finish()
219 struct sg_table sgt; member
[all …]
Dvideobuf2-dma-sg.c105 struct sg_table *sgt; in vb2_dma_sg_alloc() local
146 sgt = &buf->sg_table; in vb2_dma_sg_alloc()
151 if (dma_map_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_alloc()
183 struct sg_table *sgt = &buf->sg_table; in vb2_dma_sg_put() local
189 dma_unmap_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_put()
205 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_prepare() local
207 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir); in vb2_dma_sg_prepare()
213 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_finish() local
215 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir); in vb2_dma_sg_finish()
222 struct sg_table *sgt; in vb2_dma_sg_get_userptr() local
[all …]
Dvideobuf2-vmalloc.c204 struct sg_table sgt; member
214 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_attach() local
224 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_attach()
225 ret = sg_alloc_table(sgt, num_pages, GFP_KERNEL); in vb2_vmalloc_dmabuf_ops_attach()
230 for_each_sgtable_sg(sgt, sg, i) { in vb2_vmalloc_dmabuf_ops_attach()
234 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_attach()
251 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_detach() local
256 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_detach()
260 dma_unmap_sgtable(db_attach->dev, sgt, attach->dma_dir, 0); in vb2_vmalloc_dmabuf_ops_detach()
261 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_detach()
[all …]
/drivers/gpu/drm/tegra/
Dgem.c31 static int sg_alloc_table_from_sg(struct sg_table *sgt, struct scatterlist *sg, in sg_alloc_table_from_sg() argument
38 err = sg_alloc_table(sgt, nents, gfp_mask); in sg_alloc_table_from_sg()
42 dst = sgt->sgl; in sg_alloc_table_from_sg()
57 struct sg_table *sgt; in tegra_bo_pin() local
81 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in tegra_bo_pin()
82 if (!sgt) in tegra_bo_pin()
90 err = sg_alloc_table_from_pages(sgt, obj->pages, obj->num_pages, in tegra_bo_pin()
94 } else if (obj->sgt) { in tegra_bo_pin()
101 err = sg_alloc_table_from_sg(sgt, obj->sgt->sgl, in tegra_bo_pin()
102 obj->sgt->orig_nents, GFP_KERNEL); in tegra_bo_pin()
[all …]
Dplane.c77 copy->sgt[i] = NULL; in tegra_plane_atomic_duplicate_state()
148 struct sg_table *sgt; in tegra_dc_pin() local
163 sgt = host1x_bo_pin(dc->dev, &bo->base, phys); in tegra_dc_pin()
164 if (IS_ERR(sgt)) { in tegra_dc_pin()
165 err = PTR_ERR(sgt); in tegra_dc_pin()
169 if (sgt) { in tegra_dc_pin()
170 err = dma_map_sgtable(dc->dev, sgt, DMA_TO_DEVICE, 0); in tegra_dc_pin()
180 if (sgt->nents > 1) { in tegra_dc_pin()
185 state->iova[i] = sg_dma_address(sgt->sgl); in tegra_dc_pin()
186 state->sgt[i] = sgt; in tegra_dc_pin()
[all …]
Duapi.c20 if (mapping->sgt) in tegra_drm_mapping_release()
21 dma_unmap_sgtable(mapping->dev, mapping->sgt, mapping->direction, in tegra_drm_mapping_release()
24 host1x_bo_unpin(mapping->dev, mapping->bo, mapping->sgt); in tegra_drm_mapping_release()
211 mapping->sgt = host1x_bo_pin(mapping->dev, mapping->bo, NULL); in tegra_drm_ioctl_channel_map()
212 if (IS_ERR(mapping->sgt)) { in tegra_drm_ioctl_channel_map()
213 err = PTR_ERR(mapping->sgt); in tegra_drm_ioctl_channel_map()
217 err = dma_map_sgtable(mapping->dev, mapping->sgt, mapping->direction, in tegra_drm_ioctl_channel_map()
222 mapping->iova = sg_dma_address(mapping->sgt->sgl); in tegra_drm_ioctl_channel_map()
237 if (mapping->sgt) { in tegra_drm_ioctl_channel_map()
238 dma_unmap_sgtable(mapping->dev, mapping->sgt, mapping->direction, in tegra_drm_ioctl_channel_map()
[all …]
/drivers/gpu/drm/armada/
Darmada_gem.c66 if (dobj->sgt) in armada_gem_free_object()
68 dobj->sgt, DMA_TO_DEVICE); in armada_gem_free_object()
390 struct sg_table *sgt; in armada_gem_prime_map_dma_buf() local
393 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in armada_gem_prime_map_dma_buf()
394 if (!sgt) in armada_gem_prime_map_dma_buf()
402 if (sg_alloc_table(sgt, count, GFP_KERNEL)) in armada_gem_prime_map_dma_buf()
407 for_each_sgtable_sg(sgt, sg, i) { in armada_gem_prime_map_dma_buf()
417 if (dma_map_sgtable(attach->dev, sgt, dir, 0)) in armada_gem_prime_map_dma_buf()
421 if (sg_alloc_table(sgt, 1, GFP_KERNEL)) in armada_gem_prime_map_dma_buf()
424 sg_set_page(sgt->sgl, dobj->page, dobj->obj.size, 0); in armada_gem_prime_map_dma_buf()
[all …]
/drivers/xen/
Dgntdev-dmabuf.c52 struct sg_table *sgt; member
71 struct sg_table *sgt; member
204 struct sg_table *sgt; in dmabuf_pages_to_sgt() local
207 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in dmabuf_pages_to_sgt()
208 if (!sgt) { in dmabuf_pages_to_sgt()
213 ret = sg_alloc_table_from_pages(sgt, pages, nr_pages, 0, in dmabuf_pages_to_sgt()
219 return sgt; in dmabuf_pages_to_sgt()
222 kfree(sgt); in dmabuf_pages_to_sgt()
247 struct sg_table *sgt = gntdev_dmabuf_attach->sgt; in dmabuf_exp_ops_detach() local
249 if (sgt) { in dmabuf_exp_ops_detach()
[all …]
/drivers/gpu/drm/i915/
Di915_mm.c36 struct sgt_iter sgt; member
56 return (r->sgt.dma + r->sgt.curr + r->iobase) >> PAGE_SHIFT; in sgt_pfn()
58 return r->sgt.pfn + (r->sgt.curr >> PAGE_SHIFT); in sgt_pfn()
65 if (GEM_WARN_ON(!r->sgt.sgp)) in remap_sg()
73 r->sgt.curr += PAGE_SIZE; in remap_sg()
74 if (r->sgt.curr >= r->sgt.max) in remap_sg()
75 r->sgt = __sgt_iter(__sg_next(r->sgt.sgp), use_dma(r->iobase)); in remap_sg()
132 .sgt = __sgt_iter(sgl, use_dma(iobase)), in remap_io_sg()
/drivers/infiniband/core/
Dumem_dmabuf.c14 struct sg_table *sgt; in ib_umem_dmabuf_map_pages() local
23 if (umem_dmabuf->sgt) in ib_umem_dmabuf_map_pages()
26 sgt = dma_buf_map_attachment(umem_dmabuf->attach, DMA_BIDIRECTIONAL); in ib_umem_dmabuf_map_pages()
27 if (IS_ERR(sgt)) in ib_umem_dmabuf_map_pages()
28 return PTR_ERR(sgt); in ib_umem_dmabuf_map_pages()
35 for_each_sgtable_dma_sg(sgt, sg, i) { in ib_umem_dmabuf_map_pages()
58 umem_dmabuf->umem.sgt_append.sgt.sgl = umem_dmabuf->first_sg; in ib_umem_dmabuf_map_pages()
59 umem_dmabuf->umem.sgt_append.sgt.nents = nmap; in ib_umem_dmabuf_map_pages()
60 umem_dmabuf->sgt = sgt; in ib_umem_dmabuf_map_pages()
80 if (!umem_dmabuf->sgt) in ib_umem_dmabuf_unmap_pages()
[all …]
/drivers/hwtracing/intel_th/
Dmsu-sink.c51 static int msu_sink_alloc_window(void *data, struct sg_table **sgt, size_t size) in msu_sink_alloc_window() argument
64 ret = sg_alloc_table(*sgt, nents, GFP_KERNEL); in msu_sink_alloc_window()
68 priv->sgts[priv->nr_sgts++] = *sgt; in msu_sink_alloc_window()
70 for_each_sg((*sgt)->sgl, sg_ptr, nents, i) { in msu_sink_alloc_window()
84 static void msu_sink_free_window(void *data, struct sg_table *sgt) in msu_sink_free_window() argument
90 for_each_sg(sgt->sgl, sg_ptr, sgt->nents, i) { in msu_sink_free_window()
95 sg_free_table(sgt); in msu_sink_free_window()
99 static int msu_sink_ready(void *data, struct sg_table *sgt, size_t bytes) in msu_sink_ready() argument
103 intel_th_msc_window_unlock(priv->dev, sgt); in msu_sink_ready()
/drivers/gpu/drm/mediatek/
Dmtk_drm_gem.c181 struct sg_table *sgt; in mtk_gem_prime_get_sg_table() local
184 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in mtk_gem_prime_get_sg_table()
185 if (!sgt) in mtk_gem_prime_get_sg_table()
188 ret = dma_get_sgtable_attrs(priv->dma_dev, sgt, mtk_gem->cookie, in mtk_gem_prime_get_sg_table()
193 kfree(sgt); in mtk_gem_prime_get_sg_table()
197 return sgt; in mtk_gem_prime_get_sg_table()
224 struct sg_table *sgt = NULL; in mtk_drm_gem_prime_vmap() local
230 sgt = mtk_gem_prime_get_sg_table(obj); in mtk_drm_gem_prime_vmap()
231 if (IS_ERR(sgt)) in mtk_drm_gem_prime_vmap()
232 return PTR_ERR(sgt); in mtk_drm_gem_prime_vmap()
[all …]
/drivers/staging/media/tegra-vde/
Ddmabuf-cache.c24 struct sg_table *sgt; member
38 dma_buf_unmap_attachment(entry->a, entry->sgt, entry->dma_dir); in tegra_vde_release_entry()
69 struct sg_table *sgt; in tegra_vde_dmabuf_cache_map() local
90 *addrp = sg_dma_address(entry->sgt->sgl); in tegra_vde_dmabuf_cache_map()
102 sgt = dma_buf_map_attachment(attachment, dma_dir); in tegra_vde_dmabuf_cache_map()
103 if (IS_ERR(sgt)) { in tegra_vde_dmabuf_cache_map()
105 err = PTR_ERR(sgt); in tegra_vde_dmabuf_cache_map()
109 if (!vde->domain && sgt->nents > 1) { in tegra_vde_dmabuf_cache_map()
122 err = tegra_vde_iommu_map(vde, sgt, &iova, dmabuf->size); in tegra_vde_dmabuf_cache_map()
128 *addrp = sg_dma_address(sgt->sgl); in tegra_vde_dmabuf_cache_map()
[all …]
/drivers/gpu/drm/
Ddrm_prime.c621 struct sg_table *sgt; in drm_gem_map_dma_buf() local
630 sgt = obj->funcs->get_sg_table(obj); in drm_gem_map_dma_buf()
631 if (IS_ERR(sgt)) in drm_gem_map_dma_buf()
632 return sgt; in drm_gem_map_dma_buf()
634 ret = dma_map_sgtable(attach->dev, sgt, dir, in drm_gem_map_dma_buf()
637 sg_free_table(sgt); in drm_gem_map_dma_buf()
638 kfree(sgt); in drm_gem_map_dma_buf()
639 sgt = ERR_PTR(ret); in drm_gem_map_dma_buf()
642 return sgt; in drm_gem_map_dma_buf()
655 struct sg_table *sgt, in drm_gem_unmap_dma_buf() argument
[all …]
Ddrm_gem_shmem_helper.c128 drm_prime_gem_destroy(obj, shmem->sgt); in drm_gem_shmem_free_object()
130 if (shmem->sgt) { in drm_gem_shmem_free_object()
131 dma_unmap_sgtable(obj->dev->dev, shmem->sgt, in drm_gem_shmem_free_object()
133 sg_free_table(shmem->sgt); in drm_gem_shmem_free_object()
134 kfree(shmem->sgt); in drm_gem_shmem_free_object()
440 dma_unmap_sgtable(obj->dev->dev, shmem->sgt, DMA_BIDIRECTIONAL, 0); in drm_gem_shmem_purge_locked()
441 sg_free_table(shmem->sgt); in drm_gem_shmem_purge_locked()
442 kfree(shmem->sgt); in drm_gem_shmem_purge_locked()
443 shmem->sgt = NULL; in drm_gem_shmem_purge_locked()
694 struct sg_table *sgt; in drm_gem_shmem_get_pages_sgt() local
[all …]
Ddrm_gem_cma_helper.c211 drm_prime_gem_destroy(gem_obj, cma_obj->sgt); in drm_gem_cma_free_object()
405 struct sg_table *sgt; in drm_gem_cma_get_sg_table() local
408 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in drm_gem_cma_get_sg_table()
409 if (!sgt) in drm_gem_cma_get_sg_table()
412 ret = dma_get_sgtable(obj->dev->dev, sgt, cma_obj->vaddr, in drm_gem_cma_get_sg_table()
417 return sgt; in drm_gem_cma_get_sg_table()
420 kfree(sgt); in drm_gem_cma_get_sg_table()
445 struct sg_table *sgt) in drm_gem_cma_prime_import_sg_table() argument
450 if (drm_prime_get_contiguous_size(sgt) < attach->dmabuf->size) in drm_gem_cma_prime_import_sg_table()
458 cma_obj->paddr = sg_dma_address(sgt->sgl); in drm_gem_cma_prime_import_sg_table()
[all …]
/drivers/gpu/drm/lima/
Dlima_gem.c28 struct sg_table sgt; in lima_heap_alloc() local
66 ret = sg_alloc_table_from_pages(&sgt, pages, i, 0, in lima_heap_alloc()
71 if (bo->base.sgt) { in lima_heap_alloc()
72 dma_unmap_sgtable(dev, bo->base.sgt, DMA_BIDIRECTIONAL, 0); in lima_heap_alloc()
73 sg_free_table(bo->base.sgt); in lima_heap_alloc()
75 bo->base.sgt = kmalloc(sizeof(*bo->base.sgt), GFP_KERNEL); in lima_heap_alloc()
76 if (!bo->base.sgt) { in lima_heap_alloc()
77 sg_free_table(&sgt); in lima_heap_alloc()
82 ret = dma_map_sgtable(dev, &sgt, DMA_BIDIRECTIONAL, 0); in lima_heap_alloc()
84 sg_free_table(&sgt); in lima_heap_alloc()
[all …]
/drivers/gpu/drm/rockchip/
Drockchip_drm_gem.c40 ret = iommu_map_sgtable(private->domain, rk_obj->dma_addr, rk_obj->sgt, in rockchip_gem_iommu_map()
89 rk_obj->sgt = drm_prime_pages_to_sg(rk_obj->base.dev, in rockchip_gem_get_pages()
91 if (IS_ERR(rk_obj->sgt)) { in rockchip_gem_get_pages()
92 ret = PTR_ERR(rk_obj->sgt); in rockchip_gem_get_pages()
103 for_each_sgtable_sg(rk_obj->sgt, s, i) in rockchip_gem_get_pages()
106 dma_sync_sgtable_for_device(drm->dev, rk_obj->sgt, DMA_TO_DEVICE); in rockchip_gem_get_pages()
117 sg_free_table(rk_obj->sgt); in rockchip_gem_put_pages()
118 kfree(rk_obj->sgt); in rockchip_gem_put_pages()
361 dma_unmap_sgtable(drm->dev, rk_obj->sgt, in rockchip_gem_free_object()
364 drm_prime_gem_destroy(obj, rk_obj->sgt); in rockchip_gem_free_object()
[all …]
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_dma_buf.c160 struct sg_table *sgt; in amdgpu_dma_buf_map() local
185 sgt = drm_prime_pages_to_sg(obj->dev, in amdgpu_dma_buf_map()
188 if (IS_ERR(sgt)) in amdgpu_dma_buf_map()
189 return sgt; in amdgpu_dma_buf_map()
191 if (dma_map_sgtable(attach->dev, sgt, dir, in amdgpu_dma_buf_map()
199 dir, &sgt); in amdgpu_dma_buf_map()
207 return sgt; in amdgpu_dma_buf_map()
210 sg_free_table(sgt); in amdgpu_dma_buf_map()
211 kfree(sgt); in amdgpu_dma_buf_map()
225 struct sg_table *sgt, in amdgpu_dma_buf_unmap() argument
[all …]
/drivers/gpu/host1x/
Djob.c141 struct sg_table *sgt; in pin_job() local
175 sgt = host1x_bo_pin(dev, reloc->target.bo, phys); in pin_job()
176 if (IS_ERR(sgt)) { in pin_job()
177 err = PTR_ERR(sgt); in pin_job()
181 if (sgt) { in pin_job()
204 err = dma_map_sgtable(dev, sgt, dir, 0); in pin_job()
210 phys_addr = sg_dma_address(sgt->sgl); in pin_job()
215 job->unpins[job->num_unpins].sgt = sgt; in pin_job()
256 sgt = host1x_bo_pin(host->dev, g->bo, phys); in pin_job()
257 if (IS_ERR(sgt)) { in pin_job()
[all …]
/drivers/gpu/drm/etnaviv/
Detnaviv_gem.c23 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_gem_scatter_map() local
30 dma_map_sgtable(dev->dev, sgt, DMA_BIDIRECTIONAL, 0); in etnaviv_gem_scatter_map()
36 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_gem_scatterlist_unmap() local
54 dma_unmap_sgtable(dev->dev, sgt, DMA_BIDIRECTIONAL, 0); in etnaviv_gem_scatterlist_unmap()
75 if (etnaviv_obj->sgt) { in put_pages()
77 sg_free_table(etnaviv_obj->sgt); in put_pages()
78 kfree(etnaviv_obj->sgt); in put_pages()
79 etnaviv_obj->sgt = NULL; in put_pages()
101 if (!etnaviv_obj->sgt) { in etnaviv_gem_get_pages()
104 struct sg_table *sgt; in etnaviv_gem_get_pages() local
[all …]
/drivers/fpga/
Dfpga-mgr.c87 struct sg_table *sgt) in fpga_mgr_write_sg() argument
90 return mgr->mops->write_sg(mgr, sgt); in fpga_mgr_write_sg()
168 struct sg_table *sgt) in fpga_mgr_write_init_sg() argument
182 sg_miter_start(&miter, sgt->sgl, sgt->nents, SG_MITER_FROM_SG); in fpga_mgr_write_init_sg()
197 len = sg_copy_to_buffer(sgt->sgl, sgt->nents, buf, in fpga_mgr_write_init_sg()
225 struct sg_table *sgt) in fpga_mgr_buf_load_sg() argument
229 ret = fpga_mgr_write_init_sg(mgr, info, sgt); in fpga_mgr_buf_load_sg()
236 ret = fpga_mgr_write_sg(mgr, sgt); in fpga_mgr_buf_load_sg()
240 sg_miter_start(&miter, sgt->sgl, sgt->nents, SG_MITER_FROM_SG); in fpga_mgr_buf_load_sg()
301 struct sg_table sgt; in fpga_mgr_buf_load() local
[all …]
/drivers/gpu/drm/exynos/
Dexynos_drm_gem.c120 drm_prime_gem_destroy(obj, exynos_gem->sgt); in exynos_drm_gem_destroy()
419 struct sg_table *sgt; in exynos_drm_gem_prime_get_sg_table() local
422 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in exynos_drm_gem_prime_get_sg_table()
423 if (!sgt) in exynos_drm_gem_prime_get_sg_table()
426 ret = dma_get_sgtable_attrs(to_dma_dev(drm_dev), sgt, exynos_gem->cookie, in exynos_drm_gem_prime_get_sg_table()
431 kfree(sgt); in exynos_drm_gem_prime_get_sg_table()
435 return sgt; in exynos_drm_gem_prime_get_sg_table()
441 struct sg_table *sgt) in exynos_drm_gem_prime_import_sg_table() argument
446 if (drm_prime_get_contiguous_size(sgt) < attach->dmabuf->size) { in exynos_drm_gem_prime_import_sg_table()
465 exynos_gem->dma_addr = sg_dma_address(sgt->sgl); in exynos_drm_gem_prime_import_sg_table()
[all …]
/drivers/spi/
Dspi-ep93xx.c281 struct sg_table *sgt; in ep93xx_spi_dma_prepare() local
298 sgt = &espi->rx_sgt; in ep93xx_spi_dma_prepare()
305 sgt = &espi->tx_sgt; in ep93xx_spi_dma_prepare()
326 if (nents != sgt->nents) { in ep93xx_spi_dma_prepare()
327 sg_free_table(sgt); in ep93xx_spi_dma_prepare()
329 ret = sg_alloc_table(sgt, nents, GFP_KERNEL); in ep93xx_spi_dma_prepare()
335 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in ep93xx_spi_dma_prepare()
355 nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare()
359 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, conf.direction, in ep93xx_spi_dma_prepare()
362 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare()
[all …]
/drivers/gpu/drm/vmwgfx/
Dvmwgfx_ttm_buffer.c176 struct sg_table sgt; member
246 __sg_page_iter_start(&viter->iter.base, vsgt->sgt->sgl, in vmw_piter_start()
247 vsgt->sgt->orig_nents, p_offset); in vmw_piter_start()
266 dma_unmap_sgtable(dev, &vmw_tt->sgt, DMA_BIDIRECTIONAL, 0); in vmw_ttm_unmap_from_dma()
267 vmw_tt->sgt.nents = vmw_tt->sgt.orig_nents; in vmw_ttm_unmap_from_dma()
287 return dma_map_sgtable(dev, &vmw_tt->sgt, DMA_BIDIRECTIONAL, 0); in vmw_ttm_map_for_dma()
322 vsgt->sgt = &vmw_tt->sgt; in vmw_ttm_map_dma()
337 &vmw_tt->sgt, vsgt->pages, vsgt->num_pages, 0, in vmw_ttm_map_dma()
343 if (vsgt->num_pages > vmw_tt->sgt.orig_nents) { in vmw_ttm_map_dma()
346 vmw_tt->sgt.orig_nents); in vmw_ttm_map_dma()
[all …]

123456