/drivers/spi/ |
D | spi-pxa2xx-dma.c | 27 int i, nents, len = drv_data->len; in pxa2xx_spi_map_dma_buffer() local 57 nents = DIV_ROUND_UP(len, SZ_2K); in pxa2xx_spi_map_dma_buffer() 58 if (nents != sgt->nents) { in pxa2xx_spi_map_dma_buffer() 62 ret = sg_alloc_table(sgt, nents, GFP_ATOMIC); in pxa2xx_spi_map_dma_buffer() 68 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in pxa2xx_spi_map_dma_buffer() 80 nents = dma_map_sg(dmadev, sgt->sgl, sgt->nents, dir); in pxa2xx_spi_map_dma_buffer() 81 if (!nents) in pxa2xx_spi_map_dma_buffer() 84 return nents; in pxa2xx_spi_map_dma_buffer() 101 dma_unmap_sg(dmadev, sgt->sgl, sgt->nents, dir); in pxa2xx_spi_unmap_dma_buffer() 183 int nents, ret; in pxa2xx_spi_dma_prepare_one() local [all …]
|
D | spi-ep93xx.c | 557 int i, ret, nents; in ep93xx_spi_dma_prepare() local 597 nents = DIV_ROUND_UP(len, PAGE_SIZE); in ep93xx_spi_dma_prepare() 598 if (nents != sgt->nents) { in ep93xx_spi_dma_prepare() 601 ret = sg_alloc_table(sgt, nents, GFP_KERNEL); in ep93xx_spi_dma_prepare() 607 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in ep93xx_spi_dma_prepare() 627 nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare() 628 if (!nents) in ep93xx_spi_dma_prepare() 631 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir, DMA_CTRL_ACK); in ep93xx_spi_dma_prepare() 633 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare() 661 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_finish()
|
/drivers/crypto/caam/ |
D | sg_sw_sec4.h | 89 unsigned int nents, enum dma_data_direction dir, in dma_map_sg_chained() argument 94 for (i = 0; i < nents; i++) { in dma_map_sg_chained() 99 dma_map_sg(dev, sg, nents, dir); in dma_map_sg_chained() 101 return nents; in dma_map_sg_chained() 105 unsigned int nents, enum dma_data_direction dir, in dma_unmap_sg_chained() argument 110 for (i = 0; i < nents; i++) { in dma_unmap_sg_chained() 115 dma_unmap_sg(dev, sg, nents, dir); in dma_unmap_sg_chained() 117 return nents; in dma_unmap_sg_chained()
|
/drivers/parisc/ |
D | ccio-rm-dma.c | 115 static int ccio_map_sg(struct pci_dev *dev, struct scatterlist *sglist, int nents, int direction) in ccio_map_sg() argument 117 int tmp = nents; in ccio_map_sg() 120 while (nents) { in ccio_map_sg() 123 nents--; in ccio_map_sg() 131 static void ccio_unmap_sg(struct pci_dev *dev, struct scatterlist *sglist, int nents, int direction) in ccio_unmap_sg() argument 134 while (nents) { in ccio_unmap_sg() 136 nents--; in ccio_unmap_sg()
|
D | iommu-helpers.h | 14 iommu_fill_pdir(struct ioc *ioc, struct scatterlist *startsg, int nents, in iommu_fill_pdir() argument 29 while (nents-- > 0) { in iommu_fill_pdir() 33 DBG_RUN_SG(" %d : %08lx/%05x %08lx/%05x\n", nents, in iommu_fill_pdir() 101 struct scatterlist *startsg, int nents, in iommu_coalesce_chunks() argument 109 while (nents > 0) { in iommu_coalesce_chunks() 126 while(--nents > 0) { in iommu_coalesce_chunks()
|
D | ccio-dma.c | 901 ccio_map_sg(struct device *dev, struct scatterlist *sglist, int nents, in ccio_map_sg() argument 914 DBG_RUN_SG("%s() START %d entries\n", __func__, nents); in ccio_map_sg() 917 if (nents == 1) { in ccio_map_sg() 925 for(i = 0; i < nents; i++) in ccio_map_sg() 942 coalesced = iommu_coalesce_chunks(ioc, dev, sglist, nents, ccio_alloc_range); in ccio_map_sg() 952 filled = iommu_fill_pdir(ioc, sglist, nents, hint, ccio_io_pdir_entry); in ccio_map_sg() 978 ccio_unmap_sg(struct device *dev, struct scatterlist *sglist, int nents, in ccio_unmap_sg() argument 987 __func__, nents, sg_virt_addr(sglist), sglist->length); in ccio_unmap_sg() 993 while(sg_dma_len(sglist) && nents--) { in ccio_unmap_sg() 1003 DBG_RUN_SG("%s() DONE (nents %d)\n", __func__, nents); in ccio_unmap_sg()
|
D | sba_iommu.c | 274 sba_dump_sg( struct ioc *ioc, struct scatterlist *startsg, int nents) in sba_dump_sg() argument 276 while (nents-- > 0) { in sba_dump_sg() 278 nents, in sba_dump_sg() 935 sba_map_sg(struct device *dev, struct scatterlist *sglist, int nents, in sba_map_sg() argument 942 DBG_RUN_SG("%s() START %d entries\n", __func__, nents); in sba_map_sg() 947 if (nents == 1) { in sba_map_sg() 960 sba_dump_sg(ioc, sglist, nents); in sba_map_sg() 977 coalesced = iommu_coalesce_chunks(ioc, dev, sglist, nents, sba_alloc_range); in sba_map_sg() 987 filled = iommu_fill_pdir(ioc, sglist, nents, 0, sba_io_pdir_entry); in sba_map_sg() 996 sba_dump_sg(ioc, sglist, nents); in sba_map_sg() [all …]
|
/drivers/infiniband/core/ |
D | umem.c | 57 chunk->nents, DMA_BIDIRECTIONAL); in __ib_umem_release() 58 for (i = 0; i < chunk->nents; ++i) { in __ib_umem_release() 174 chunk->nents = min_t(int, ret, IB_UMEM_MAX_PAGE_CHUNK); in ib_umem_get() 175 sg_init_table(chunk->page_list, chunk->nents); in ib_umem_get() 176 for (i = 0; i < chunk->nents; ++i) { in ib_umem_get() 185 chunk->nents, in ib_umem_get() 189 for (i = 0; i < chunk->nents; ++i) in ib_umem_get() 197 ret -= chunk->nents; in ib_umem_get() 198 off += chunk->nents; in ib_umem_get()
|
/drivers/mmc/core/ |
D | sdio_ops.c | 129 unsigned int nents, left_size, i; in mmc_io_rw_extended() local 160 nents = (left_size - 1) / seg_size + 1; in mmc_io_rw_extended() 161 if (nents > 1) { in mmc_io_rw_extended() 162 if (sg_alloc_table(&sgtable, nents, GFP_KERNEL)) in mmc_io_rw_extended() 166 data.sg_len = nents; in mmc_io_rw_extended() 185 if (nents > 1) in mmc_io_rw_extended()
|
/drivers/infiniband/hw/qib/ |
D | qib_dma.c | 95 int nents, enum dma_data_direction direction) in qib_map_sg() argument 100 int ret = nents; in qib_map_sg() 104 for_each_sg(sgl, sg, nents, i) { in qib_map_sg() 116 struct scatterlist *sg, int nents, in qib_unmap_sg() argument
|
/drivers/infiniband/hw/ipath/ |
D | ipath_dma.c | 102 int nents, enum dma_data_direction direction) in ipath_map_sg() argument 107 int ret = nents; in ipath_map_sg() 111 for_each_sg(sgl, sg, nents, i) { in ipath_map_sg() 123 struct scatterlist *sg, int nents, in ipath_unmap_sg() argument
|
/drivers/gpu/drm/exynos/ |
D | exynos_drm_dmabuf.c | 53 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, in exynos_gem_detach_dma_buf() 72 int nents, ret; in exynos_gem_map_dma_buf() local 105 nents = dma_map_sg(attach->dev, sgt->sgl, sgt->orig_nents, dir); in exynos_gem_map_dma_buf() 106 if (!nents) { in exynos_gem_map_dma_buf() 272 if (sgt->nents == 1) { in exynos_dmabuf_prime_import()
|
D | exynos_drm_gem.c | 97 for_each_sg(buf->sgt->sgl, sgl, buf->sgt->nents, i) { in exynos_drm_gem_map_buf() 622 int nents; in exynos_gem_map_sgt_with_dma() local 626 nents = dma_map_sg(drm_dev->dev, sgt->sgl, sgt->nents, dir); in exynos_gem_map_sgt_with_dma() 627 if (!nents) { in exynos_gem_map_sgt_with_dma() 630 return nents; in exynos_gem_map_sgt_with_dma() 641 dma_unmap_sg(drm_dev->dev, sgt->sgl, sgt->nents, dir); in exynos_gem_unmap_sgt_from_dma()
|
/drivers/gpu/drm/i915/ |
D | i915_gem_dmabuf.c | 55 ret = sg_alloc_table(st, obj->pages->nents, GFP_KERNEL); in i915_gem_map_dma_buf() 64 for (i = 0; i < obj->pages->nents; i++) { in i915_gem_map_dma_buf() 70 if (!dma_map_sg(attachment->dev, st->sgl, st->nents, dir)) { in i915_gem_map_dma_buf() 88 dma_unmap_sg(attachment->dev, sg->sgl, sg->nents, dir); in i915_gem_unmap_dma_buf() 132 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) in i915_gem_dmabuf_vmap()
|
/drivers/staging/android/ion/ |
D | ion_heap.c | 48 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_kernel() 81 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_user() 118 static int ion_heap_sglist_zero(struct scatterlist *sgl, unsigned int nents, in ion_heap_sglist_zero() argument 126 for_each_sg_page(sgl, &piter, nents, 0) { in ion_heap_sglist_zero() 151 return ion_heap_sglist_zero(table->sgl, table->nents, pgprot); in ion_heap_buffer_zero()
|
/drivers/video/adf/ |
D | adf_memblock.c | 31 int nents, ret; in adf_memblock_map() local 43 nents = dma_map_sg(attach->dev, table->sgl, 1, direction); in adf_memblock_map() 44 if (!nents) { in adf_memblock_map()
|
/drivers/misc/ |
D | tifm_core.c | 296 int tifm_map_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents, in tifm_map_sg() argument 299 return pci_map_sg(to_pci_dev(sock->dev.parent), sg, nents, direction); in tifm_map_sg() 303 void tifm_unmap_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents, in tifm_unmap_sg() argument 306 pci_unmap_sg(to_pci_dev(sock->dev.parent), sg, nents, direction); in tifm_unmap_sg()
|
/drivers/scsi/ |
D | scsi_lib.c | 609 static inline unsigned int scsi_sgtable_index(unsigned short nents) in scsi_sgtable_index() argument 613 BUG_ON(nents > SCSI_MAX_SG_SEGMENTS); in scsi_sgtable_index() 615 if (nents <= 8) in scsi_sgtable_index() 618 index = get_count_order(nents) - 3; in scsi_sgtable_index() 623 static void scsi_sg_free(struct scatterlist *sgl, unsigned int nents) in scsi_sg_free() argument 627 sgp = scsi_sg_pools + scsi_sgtable_index(nents); in scsi_sg_free() 631 static struct scatterlist *scsi_sg_alloc(unsigned int nents, gfp_t gfp_mask) in scsi_sg_alloc() argument 635 sgp = scsi_sg_pools + scsi_sgtable_index(nents); in scsi_sg_alloc() 639 static int scsi_alloc_sgtable(struct scsi_data_buffer *sdb, int nents, in scsi_alloc_sgtable() argument 644 BUG_ON(!nents); in scsi_alloc_sgtable() [all …]
|
/drivers/scsi/libfc/ |
D | fc_libfc.c | 113 u32 *nents, size_t *offset, in fc_copy_buffer_to_sglist() argument 128 if (!(*nents)) in fc_copy_buffer_to_sglist() 130 --(*nents); in fc_copy_buffer_to_sglist()
|
/drivers/dma/ |
D | coh901318_lli.c | 233 struct scatterlist *sgl, unsigned int nents, in coh901318_lli_fill_sg() argument 258 for_each_sg(sgl, sg, nents, i) { in coh901318_lli_fill_sg() 265 } else if (i == nents - 1) in coh901318_lli_fill_sg()
|
/drivers/iommu/ |
D | omap-iovmm.c | 78 if (!sgt || !sgt->nents) in sgtable_offset() 93 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in sgtable_len() 222 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in vmap_sg() 426 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in sgtable_fill_vmalloc() 466 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in map_iovm_area() 520 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in unmap_iovm_area()
|
/drivers/crypto/ |
D | picoxcell_crypto.c | 295 unsigned nents, mapped_ents; in spacc_sg_to_ddt() local 300 nents = sg_count(payload, nbytes); in spacc_sg_to_ddt() 301 mapped_ents = dma_map_sg(engine->dev, payload, nents, dir); in spacc_sg_to_ddt() 317 dma_unmap_sg(engine->dev, payload, nents, dir); in spacc_sg_to_ddt() 328 unsigned nents = sg_count(areq->src, areq->cryptlen); in spacc_aead_make_ddts() local 350 src_ents = dma_map_sg(engine->dev, areq->src, nents, in spacc_aead_make_ddts() 352 dst_ents = dma_map_sg(engine->dev, areq->dst, nents, in spacc_aead_make_ddts() 355 src_ents = dma_map_sg(engine->dev, areq->src, nents, in spacc_aead_make_ddts() 412 unsigned nents = sg_count(areq->src, areq->cryptlen); in spacc_aead_free_ddts() local 415 dma_unmap_sg(engine->dev, areq->src, nents, DMA_TO_DEVICE); in spacc_aead_free_ddts() [all …]
|
/drivers/media/v4l2-core/ |
D | videobuf2-dma-contig.c | 77 for_each_sg(sgt->sgl, s, sgt->nents, i) { in vb2_dc_get_contiguous_size() 120 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); in vb2_dc_prepare() 132 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); in vb2_dc_finish() 573 sgt->nents = dma_map_sg(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_get_userptr() 575 if (sgt->nents <= 0) { in vb2_dc_get_userptr()
|
/drivers/media/common/saa7146/ |
D | saa7146_core.c | 191 pt->nents = pages; in saa7146_vmalloc_build_pgtable() 192 slen = pci_map_sg(pci,pt->slist,pt->nents,PCI_DMA_FROMDEVICE); in saa7146_vmalloc_build_pgtable() 202 pci_unmap_sg(pci, pt->slist, pt->nents, PCI_DMA_FROMDEVICE); in saa7146_vmalloc_build_pgtable() 216 pci_unmap_sg(pci, pt->slist, pt->nents, PCI_DMA_FROMDEVICE); in saa7146_vfree_destroy_pgtable()
|
/drivers/hsi/ |
D | hsi.c | 302 struct hsi_msg *hsi_alloc_msg(unsigned int nents, gfp_t flags) in hsi_alloc_msg() argument 311 if (!nents) in hsi_alloc_msg() 314 err = sg_alloc_table(&msg->sgt, nents, flags); in hsi_alloc_msg()
|