/drivers/infiniband/sw/rdmavt/ |
D | dma.c | 107 int nents, enum dma_data_direction direction) in rvt_map_sg() argument 112 int ret = nents; in rvt_map_sg() 117 for_each_sg(sgl, sg, nents, i) { in rvt_map_sg() 132 struct scatterlist *sg, int nents, in rvt_unmap_sg() argument 139 int nents, enum dma_data_direction direction, in rvt_map_sg_attrs() argument 142 return rvt_map_sg(dev, sgl, nents, direction); in rvt_map_sg_attrs() 146 struct scatterlist *sg, int nents, in rvt_unmap_sg_attrs() argument 150 return rvt_unmap_sg(dev, sg, nents, direction); in rvt_unmap_sg_attrs()
|
/drivers/infiniband/sw/rxe/ |
D | rxe_dma.c | 89 int nents, enum dma_data_direction direction) in rxe_map_sg() argument 94 int ret = nents; in rxe_map_sg() 98 for_each_sg(sgl, sg, nents, i) { in rxe_map_sg() 114 struct scatterlist *sg, int nents, in rxe_unmap_sg() argument 121 int nents, enum dma_data_direction direction, in rxe_map_sg_attrs() argument 124 return rxe_map_sg(dev, sgl, nents, direction); in rxe_map_sg_attrs() 128 struct scatterlist *sg, int nents, in rxe_unmap_sg_attrs() argument 132 rxe_unmap_sg(dev, sg, nents, direction); in rxe_unmap_sg_attrs()
|
/drivers/parisc/ |
D | ccio-rm-dma.c | 115 static int ccio_map_sg(struct pci_dev *dev, struct scatterlist *sglist, int nents, int direction) in ccio_map_sg() argument 117 int tmp = nents; in ccio_map_sg() 120 while (nents) { in ccio_map_sg() 123 nents--; in ccio_map_sg() 131 static void ccio_unmap_sg(struct pci_dev *dev, struct scatterlist *sglist, int nents, int direction) in ccio_unmap_sg() argument 134 while (nents) { in ccio_unmap_sg() 136 nents--; in ccio_unmap_sg()
|
D | iommu-helpers.h | 14 iommu_fill_pdir(struct ioc *ioc, struct scatterlist *startsg, int nents, in iommu_fill_pdir() argument 29 while (nents-- > 0) { in iommu_fill_pdir() 33 DBG_RUN_SG(" %d : %08lx/%05x %p/%05x\n", nents, in iommu_fill_pdir() 101 struct scatterlist *startsg, int nents, in iommu_coalesce_chunks() argument 113 while (nents > 0) { in iommu_coalesce_chunks() 130 while(--nents > 0) { in iommu_coalesce_chunks()
|
D | ccio-dma.c | 915 ccio_map_sg(struct device *dev, struct scatterlist *sglist, int nents, in ccio_map_sg() argument 930 DBG_RUN_SG("%s() START %d entries\n", __func__, nents); in ccio_map_sg() 933 if (nents == 1) { in ccio_map_sg() 941 for(i = 0; i < nents; i++) in ccio_map_sg() 958 coalesced = iommu_coalesce_chunks(ioc, dev, sglist, nents, ccio_alloc_range); in ccio_map_sg() 968 filled = iommu_fill_pdir(ioc, sglist, nents, hint, ccio_io_pdir_entry); in ccio_map_sg() 994 ccio_unmap_sg(struct device *dev, struct scatterlist *sglist, int nents, in ccio_unmap_sg() argument 1007 __func__, nents, sg_virt(sglist), sglist->length); in ccio_unmap_sg() 1013 while(sg_dma_len(sglist) && nents--) { in ccio_unmap_sg() 1023 DBG_RUN_SG("%s() DONE (nents %d)\n", __func__, nents); in ccio_unmap_sg()
|
D | sba_iommu.c | 274 sba_dump_sg( struct ioc *ioc, struct scatterlist *startsg, int nents) in sba_dump_sg() argument 276 while (nents-- > 0) { in sba_dump_sg() 278 nents, in sba_dump_sg() 953 sba_map_sg(struct device *dev, struct scatterlist *sglist, int nents, in sba_map_sg() argument 960 DBG_RUN_SG("%s() START %d entries\n", __func__, nents); in sba_map_sg() 967 if (nents == 1) { in sba_map_sg() 979 sba_dump_sg(ioc, sglist, nents); in sba_map_sg() 996 coalesced = iommu_coalesce_chunks(ioc, dev, sglist, nents, sba_alloc_range); in sba_map_sg() 1006 filled = iommu_fill_pdir(ioc, sglist, nents, 0, sba_io_pdir_entry); in sba_map_sg() 1015 sba_dump_sg(ioc, sglist, nents); in sba_map_sg() [all …]
|
/drivers/target/iscsi/cxgbit/ |
D | cxgbit_ddp.c | 156 unsigned int nents) in cxgbit_ddp_sgl_check() argument 158 unsigned int last_sgidx = nents - 1; in cxgbit_ddp_sgl_check() 161 for (i = 0; i < nents; i++, sg = sg_next(sg)) { in cxgbit_ddp_sgl_check() 180 unsigned int sgcnt = ttinfo->nents; in cxgbit_ddp_reserve() 187 xferlen, ttinfo->nents); in cxgbit_ddp_reserve() 249 ttinfo->nents = cmd->se_cmd.t_data_nents; in cxgbit_get_r2t_ttt() 254 csk, cmd, cmd->se_cmd.data_length, ttinfo->nents); in cxgbit_get_r2t_ttt() 257 ttinfo->nents = 0; in cxgbit_get_r2t_ttt() 281 ttinfo->nents, DMA_FROM_DEVICE); in cxgbit_release_cmd()
|
/drivers/mmc/core/ |
D | sdio_ops.c | 126 unsigned int nents, left_size, i; in mmc_io_rw_extended() local 155 nents = (left_size - 1) / seg_size + 1; in mmc_io_rw_extended() 156 if (nents > 1) { in mmc_io_rw_extended() 157 if (sg_alloc_table(&sgtable, nents, GFP_KERNEL)) in mmc_io_rw_extended() 161 data.sg_len = nents; in mmc_io_rw_extended() 180 if (nents > 1) in mmc_io_rw_extended()
|
/drivers/infiniband/hw/hfi1/ |
D | dma.c | 109 int nents, enum dma_data_direction direction) in hfi1_map_sg() argument 114 int ret = nents; in hfi1_map_sg() 119 for_each_sg(sgl, sg, nents, i) { in hfi1_map_sg() 134 struct scatterlist *sg, int nents, in hfi1_unmap_sg() argument
|
/drivers/infiniband/hw/qib/ |
D | qib_dma.c | 95 int nents, enum dma_data_direction direction) in qib_map_sg() argument 100 int ret = nents; in qib_map_sg() 104 for_each_sg(sgl, sg, nents, i) { in qib_map_sg() 120 struct scatterlist *sg, int nents, in qib_unmap_sg() argument
|
/drivers/infiniband/hw/usnic/ |
D | usnic_uiom.c | 87 for_each_sg(chunk->page_list, sg, chunk->nents, i) { in usnic_uiom_put_pages() 166 chunk->nents = min_t(int, ret, USNIC_UIOM_PAGE_CHUNK); in usnic_uiom_get_pages() 167 sg_init_table(chunk->page_list, chunk->nents); in usnic_uiom_get_pages() 168 for_each_sg(chunk->page_list, sg, chunk->nents, i) { in usnic_uiom_get_pages() 175 cur_base += chunk->nents * PAGE_SIZE; in usnic_uiom_get_pages() 176 ret -= chunk->nents; in usnic_uiom_get_pages() 177 off += chunk->nents; in usnic_uiom_get_pages() 264 for (i = 0; i < chunk->nents; i++, va += PAGE_SIZE) { in usnic_uiom_map_sorted_intervals() 315 if (i == chunk->nents) { in usnic_uiom_map_sorted_intervals()
|
/drivers/spi/ |
D | spi-ep93xx.c | 443 int i, ret, nents; in ep93xx_spi_dma_prepare() local 483 nents = DIV_ROUND_UP(len, PAGE_SIZE); in ep93xx_spi_dma_prepare() 484 if (nents != sgt->nents) { in ep93xx_spi_dma_prepare() 487 ret = sg_alloc_table(sgt, nents, GFP_KERNEL); in ep93xx_spi_dma_prepare() 493 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in ep93xx_spi_dma_prepare() 513 nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare() 514 if (!nents) in ep93xx_spi_dma_prepare() 517 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir, DMA_CTRL_ACK); in ep93xx_spi_dma_prepare() 519 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare() 547 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_finish()
|
D | spi-bcm2835.c | 251 unsigned int nents; in bcm2835_spi_prepare_sg() local 261 nents = tfr->tx_sg.nents; in bcm2835_spi_prepare_sg() 268 nents = tfr->rx_sg.nents; in bcm2835_spi_prepare_sg() 273 desc = dmaengine_prep_slave_sg(chan, sgl, nents, dir, flags); in bcm2835_spi_prepare_sg() 295 for_each_sg(sgt->sgl, sgl, (int)sgt->nents - 1, i) { in bcm2835_check_sg_length()
|
/drivers/iommu/ |
D | dma-iommu.c | 469 static int __finalise_sg(struct device *dev, struct scatterlist *sg, int nents, in __finalise_sg() argument 477 for_each_sg(sg, s, nents, i) { in __finalise_sg() 522 static void __invalidate_sg(struct scatterlist *sg, int nents) in __invalidate_sg() argument 527 for_each_sg(sg, s, nents, i) { in __invalidate_sg() 545 int nents, int prot) in iommu_dma_map_sg() argument 562 for_each_sg(sg, s, nents, i) { in iommu_dma_map_sg() 604 if (iommu_map_sg(domain, dma_addr, sg, nents, prot) < iova_len) in iommu_dma_map_sg() 607 return __finalise_sg(dev, sg, nents, dma_addr); in iommu_dma_map_sg() 612 __invalidate_sg(sg, nents); in iommu_dma_map_sg() 616 void iommu_dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, in iommu_dma_unmap_sg() argument
|
/drivers/crypto/ |
D | mxc-scc.c | 213 int nents; in mxc_scc_ablkcipher_req_init() local 215 nents = sg_nents_for_len(req->src, req->nbytes); in mxc_scc_ablkcipher_req_init() 216 if (nents < 0) { in mxc_scc_ablkcipher_req_init() 218 return nents; in mxc_scc_ablkcipher_req_init() 220 ctx->src_nents = nents; in mxc_scc_ablkcipher_req_init() 222 nents = sg_nents_for_len(req->dst, req->nbytes); in mxc_scc_ablkcipher_req_init() 223 if (nents < 0) { in mxc_scc_ablkcipher_req_init() 225 return nents; in mxc_scc_ablkcipher_req_init() 227 ctx->dst_nents = nents; in mxc_scc_ablkcipher_req_init()
|
D | atmel-aes.c | 145 int nents; member 556 int nents; in atmel_aes_check_aligned() local 561 for (nents = 0; sg; sg = sg_next(sg), ++nents) { in atmel_aes_check_aligned() 569 dma->nents = nents+1; in atmel_aes_check_aligned() 587 int nents = dma->nents; in atmel_aes_restore_sg() local 592 while (--nents > 0 && sg) in atmel_aes_restore_sg() 628 dd->src.nents = 1; in atmel_aes_map() 634 dd->dst.nents = 1; in atmel_aes_map() 643 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map() 649 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map() [all …]
|
/drivers/usb/storage/ |
D | protocol.c | 144 unsigned int nents = scsi_sg_count(srb); in usb_stor_access_xfer_buf() local 147 nents = sg_nents(sg); in usb_stor_access_xfer_buf() 151 sg_miter_start(&miter, sg, nents, dir == FROM_XFER_BUF ? in usb_stor_access_xfer_buf()
|
/drivers/misc/mic/host/ |
D | mic_boot.c | 200 int nents, enum dma_data_direction dir, in __mic_dma_map_sg() argument 209 ret = dma_map_sg(&mdev->pdev->dev, sg, nents, dir); in __mic_dma_map_sg() 213 for_each_sg(sg, s, nents, i) { in __mic_dma_map_sg() 219 return nents; in __mic_dma_map_sg() 225 dma_unmap_sg(&mdev->pdev->dev, sg, nents, dir); in __mic_dma_map_sg() 230 struct scatterlist *sg, int nents, in __mic_dma_unmap_sg() argument 240 for_each_sg(sg, s, nents, i) { in __mic_dma_unmap_sg() 245 dma_unmap_sg(&mdev->pdev->dev, sg, nents, dir); in __mic_dma_unmap_sg()
|
/drivers/crypto/qce/ |
D | dma.c | 83 int nents, unsigned long flags, in qce_dma_prep_sg() argument 90 if (!sg || !nents) in qce_dma_prep_sg() 93 desc = dmaengine_prep_slave_sg(chan, sg, nents, dir, flags); in qce_dma_prep_sg()
|
/drivers/gpu/drm/udl/ |
D | udl_dmabuf.c | 65 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, in udl_detach_dma_buf() 83 int nents, ret; in udl_map_dma_buf() local 126 nents = dma_map_sg(attach->dev, sgt->sgl, sgt->orig_nents, dir); in udl_map_dma_buf() 127 if (!nents) { in udl_map_dma_buf()
|
/drivers/staging/android/ion/ |
D | ion_heap.c | 48 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_kernel() 81 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_user() 118 static int ion_heap_sglist_zero(struct scatterlist *sgl, unsigned int nents, in ion_heap_sglist_zero() argument 126 for_each_sg_page(sgl, &piter, nents, 0) { in ion_heap_sglist_zero() 151 return ion_heap_sglist_zero(table->sgl, table->nents, pgprot); in ion_heap_buffer_zero()
|
/drivers/dma/hsu/ |
D | hsu.c | 80 count = desc->nents - desc->active; in hsu_dma_chan_start() 222 } else if (desc->active < desc->nents) { in hsu_dma_do_irq() 236 static struct hsu_dma_desc *hsu_dma_alloc_desc(unsigned int nents) in hsu_dma_alloc_desc() argument 244 desc->sg = kcalloc(nents, sizeof(*desc->sg), GFP_NOWAIT); in hsu_dma_alloc_desc() 282 desc->nents = sg_len; in hsu_dma_prep_slave_sg() 307 for (i = desc->active; i < desc->nents; i++) in hsu_dma_active_desc_size()
|
/drivers/pci/host/ |
D | vmd.c | 334 static int vmd_map_sg(struct device *dev, struct scatterlist *sg, int nents, in vmd_map_sg() argument 337 return vmd_dma_ops(dev)->map_sg(to_vmd_dev(dev), sg, nents, dir, attrs); in vmd_map_sg() 340 static void vmd_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, in vmd_unmap_sg() argument 343 vmd_dma_ops(dev)->unmap_sg(to_vmd_dev(dev), sg, nents, dir, attrs); in vmd_unmap_sg() 360 int nents, enum dma_data_direction dir) in vmd_sync_sg_for_cpu() argument 362 vmd_dma_ops(dev)->sync_sg_for_cpu(to_vmd_dev(dev), sg, nents, dir); in vmd_sync_sg_for_cpu() 366 int nents, enum dma_data_direction dir) in vmd_sync_sg_for_device() argument 368 vmd_dma_ops(dev)->sync_sg_for_device(to_vmd_dev(dev), sg, nents, dir); in vmd_sync_sg_for_device()
|
/drivers/gpu/drm/i915/ |
D | i915_gem_dmabuf.c | 64 ret = sg_alloc_table(st, obj->pages->nents, GFP_KERNEL); in i915_gem_map_dma_buf() 70 for (i = 0; i < obj->pages->nents; i++) { in i915_gem_map_dma_buf() 76 if (!dma_map_sg(attachment->dev, st->sgl, st->nents, dir)) { in i915_gem_map_dma_buf() 102 dma_unmap_sg(attachment->dev, sg->sgl, sg->nents, dir); in i915_gem_unmap_dma_buf()
|
/drivers/misc/ |
D | tifm_core.c | 298 int tifm_map_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents, in tifm_map_sg() argument 301 return pci_map_sg(to_pci_dev(sock->dev.parent), sg, nents, direction); in tifm_map_sg() 305 void tifm_unmap_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents, in tifm_unmap_sg() argument 308 pci_unmap_sg(to_pci_dev(sock->dev.parent), sg, nents, direction); in tifm_unmap_sg()
|