/drivers/crypto/ccree/ |
D | cc_buffer_mgr.c | 35 int nents[MAX_NUM_OF_BUFFERS_IN_MLLI]; member 88 unsigned int nents = 0; in cc_get_sgl_nents() local 91 nents++; in cc_get_sgl_nents() 98 dev_dbg(dev, "nents %d last bytes %d\n", nents, *lbytes); in cc_get_sgl_nents() 99 return nents; in cc_get_sgl_nents() 115 u32 nents; in cc_copy_sg_portion() local 117 nents = sg_nents_for_len(sg, end); in cc_copy_sg_portion() 118 sg_copy_buffer(sg, nents, (void *)dest, (end - to_skip + 1), to_skip, in cc_copy_sg_portion() 256 sgl_data->nents[index] = 1; in cc_add_buffer_entry() 269 unsigned int nents, struct scatterlist *sgl, in cc_add_sg_entry() argument [all …]
|
/drivers/hwtracing/intel_th/ |
D | msu-sink.c | 54 unsigned int nents; in msu_sink_alloc_window() local 62 nents = DIV_ROUND_UP(size, PAGE_SIZE); in msu_sink_alloc_window() 64 ret = sg_alloc_table(*sgt, nents, GFP_KERNEL); in msu_sink_alloc_window() 70 for_each_sg((*sgt)->sgl, sg_ptr, nents, i) { in msu_sink_alloc_window() 77 return nents; in msu_sink_alloc_window() 87 for_each_sg(sgt->sgl, sg_ptr, sgt->nents, i) { in msu_sink_free_window()
|
/drivers/parisc/ |
D | iommu-helpers.h | 15 iommu_fill_pdir(struct ioc *ioc, struct scatterlist *startsg, int nents, in iommu_fill_pdir() argument 30 while (nents-- > 0) { in iommu_fill_pdir() 34 DBG_RUN_SG(" %d : %08lx/%05x %p/%05x\n", nents, in iommu_fill_pdir() 102 struct scatterlist *startsg, int nents, in iommu_coalesce_chunks() argument 114 while (nents > 0) { in iommu_coalesce_chunks() 131 while(--nents > 0) { in iommu_coalesce_chunks()
|
/drivers/crypto/mediatek/ |
D | mtk-aes.c | 194 int nents; in mtk_aes_check_aligned() local 199 for (nents = 0; sg; sg = sg_next(sg), ++nents) { in mtk_aes_check_aligned() 207 dma->nents = nents + 1; in mtk_aes_check_aligned() 232 int nents = dma->nents; in mtk_aes_restore_sg() local 237 while (--nents > 0 && sg) in mtk_aes_restore_sg() 283 int nents; in mtk_aes_xmit() local 286 for (nents = 0; nents < slen; ++nents, ssg = sg_next(ssg)) { in mtk_aes_xmit() 291 if (nents == 0) { in mtk_aes_xmit() 306 for (nents = 0; nents < dlen; ++nents, dsg = sg_next(dsg)) { in mtk_aes_xmit() 311 if (nents == 0) in mtk_aes_xmit() [all …]
|
/drivers/crypto/cavium/nitrox/ |
D | nitrox_req.h | 547 static inline void *alloc_req_buf(int nents, int extralen, gfp_t gfp) in alloc_req_buf() argument 551 size = sizeof(struct scatterlist) * nents; in alloc_req_buf() 613 int nents, int ivsize) in alloc_src_req_buf() argument 617 nkreq->src = alloc_req_buf(nents, ivsize, creq->gfp); in alloc_src_req_buf() 635 int nents, int ivsize, in nitrox_creq_set_src_sg() argument 644 sg_init_table(sg, nents); in nitrox_creq_set_src_sg() 659 int nents) in alloc_dst_req_buf() argument 664 nkreq->dst = alloc_req_buf(nents, extralen, creq->gfp); in alloc_dst_req_buf() 693 int nents, int ivsize, in nitrox_creq_set_dst_sg() argument 702 sg_init_table(sg, nents); in nitrox_creq_set_dst_sg()
|
D | nitrox_reqmgr.c | 161 int i, nents, ret = 0; in dma_map_inbufs() local 163 nents = dma_map_sg(dev, req->src, sg_nents(req->src), in dma_map_inbufs() 165 if (!nents) in dma_map_inbufs() 168 for_each_sg(req->src, sg, nents, i) in dma_map_inbufs() 172 sr->in.sgmap_cnt = nents; in dma_map_inbufs() 180 dma_unmap_sg(dev, req->src, nents, DMA_BIDIRECTIONAL); in dma_map_inbufs() 189 int nents, ret = 0; in dma_map_outbufs() local 191 nents = dma_map_sg(dev, req->dst, sg_nents(req->dst), in dma_map_outbufs() 193 if (!nents) in dma_map_outbufs() 197 sr->out.sgmap_cnt = nents; in dma_map_outbufs() [all …]
|
D | nitrox_aead.c | 81 int nents = sg_nents_for_len(src, buflen); in alloc_src_sglist() local 84 if (nents < 0) in alloc_src_sglist() 85 return nents; in alloc_src_sglist() 88 nents += 1; in alloc_src_sglist() 90 ret = alloc_src_req_buf(nkreq, nents, ivsize); in alloc_src_sglist() 95 nitrox_creq_set_src_sg(nkreq, nents, ivsize, src, buflen); in alloc_src_sglist() 103 int nents = sg_nents_for_len(dst, buflen); in alloc_dst_sglist() local 106 if (nents < 0) in alloc_dst_sglist() 107 return nents; in alloc_dst_sglist() 110 nents += 3; in alloc_dst_sglist() [all …]
|
D | nitrox_skcipher.c | 140 int nents = sg_nents(skreq->src) + 1; in alloc_src_sglist() local 144 ret = alloc_src_req_buf(nkreq, nents, ivsize); in alloc_src_sglist() 149 nitrox_creq_set_src_sg(nkreq, nents, ivsize, skreq->src, in alloc_src_sglist() 158 int nents = sg_nents(skreq->dst) + 3; in alloc_dst_sglist() local 164 ret = alloc_dst_req_buf(nkreq, nents); in alloc_dst_sglist() 170 nitrox_creq_set_dst_sg(nkreq, nents, ivsize, skreq->dst, in alloc_dst_sglist()
|
/drivers/target/iscsi/cxgbit/ |
D | cxgbit_ddp.c | 153 unsigned int nents) in cxgbit_ddp_sgl_check() argument 155 unsigned int last_sgidx = nents - 1; in cxgbit_ddp_sgl_check() 158 for (i = 0; i < nents; i++, sg = sg_next(sg)) { in cxgbit_ddp_sgl_check() 177 unsigned int sgcnt = ttinfo->nents; in cxgbit_ddp_reserve() 184 xferlen, ttinfo->nents); in cxgbit_ddp_reserve() 246 ttinfo->nents = cmd->se_cmd.t_data_nents; in cxgbit_get_r2t_ttt() 251 csk, cmd, cmd->se_cmd.data_length, ttinfo->nents); in cxgbit_get_r2t_ttt() 254 ttinfo->nents = 0; in cxgbit_get_r2t_ttt() 286 ttinfo->nents, DMA_FROM_DEVICE); in cxgbit_unmap_cmd()
|
/drivers/mmc/core/ |
D | sdio_ops.c | 122 unsigned int nents, left_size, i; in mmc_io_rw_extended() local 151 nents = DIV_ROUND_UP(left_size, seg_size); in mmc_io_rw_extended() 152 if (nents > 1) { in mmc_io_rw_extended() 153 if (sg_alloc_table(&sgtable, nents, GFP_KERNEL)) in mmc_io_rw_extended() 157 data.sg_len = nents; in mmc_io_rw_extended() 175 if (nents > 1) in mmc_io_rw_extended()
|
/drivers/spi/ |
D | spi-ep93xx.c | 281 int i, ret, nents; in ep93xx_spi_dma_prepare() local 321 nents = DIV_ROUND_UP(len, PAGE_SIZE); in ep93xx_spi_dma_prepare() 322 if (nents != sgt->nents) { in ep93xx_spi_dma_prepare() 325 ret = sg_alloc_table(sgt, nents, GFP_KERNEL); in ep93xx_spi_dma_prepare() 331 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in ep93xx_spi_dma_prepare() 351 nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare() 352 if (!nents) in ep93xx_spi_dma_prepare() 355 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, conf.direction, in ep93xx_spi_dma_prepare() 358 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare() 387 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_finish()
|
/drivers/gpu/drm/i915/ |
D | i915_scatterlist.c | 15 if (orig_st->nents == orig_st->orig_nents) in i915_sg_trim() 18 if (sg_alloc_table(&new_st, orig_st->nents, GFP_KERNEL | __GFP_NOWARN)) in i915_sg_trim() 22 for_each_sg(orig_st->sgl, sg, orig_st->nents, i) { in i915_sg_trim()
|
/drivers/infiniband/hw/usnic/ |
D | usnic_uiom.c | 75 for_each_sg(chunk->page_list, sg, chunk->nents, i) { in usnic_uiom_put_pages() 165 chunk->nents = min_t(int, ret, USNIC_UIOM_PAGE_CHUNK); in usnic_uiom_get_pages() 166 sg_init_table(chunk->page_list, chunk->nents); in usnic_uiom_get_pages() 167 for_each_sg(chunk->page_list, sg, chunk->nents, i) { in usnic_uiom_get_pages() 174 cur_base += chunk->nents * PAGE_SIZE; in usnic_uiom_get_pages() 175 ret -= chunk->nents; in usnic_uiom_get_pages() 176 off += chunk->nents; in usnic_uiom_get_pages() 264 for (i = 0; i < chunk->nents; i++, va += PAGE_SIZE) { in usnic_uiom_map_sorted_intervals() 315 if (i == chunk->nents) { in usnic_uiom_map_sorted_intervals()
|
/drivers/gpu/drm/i915/selftests/ |
D | scatterlist.c | 51 for_each_sg(pt->st.sgl, sg, pt->st.nents, n) { in expect_pfn_sg() 53 unsigned int npages = npages_fn(n, pt->st.nents, rnd); in expect_pfn_sg() 89 for_each_sg_page(pt->st.sgl, &sgiter, pt->st.nents, 0) { in expect_pfn_sg_page_iter() 259 pt->st.nents = n; in alloc_table() 346 pt.st.nents != prime) { in igt_sg_trim() 348 pt.st.nents, pt.st.orig_nents, prime); in igt_sg_trim()
|
/drivers/pci/ |
D | p2pdma.c | 742 unsigned int *nents, u32 length) in pci_p2pmem_alloc_sgl() argument 758 *nents = 1; in pci_p2pmem_alloc_sgl() 816 struct device *dev, struct scatterlist *sg, int nents) in __pci_p2pdma_map_sg() argument 832 for_each_sg(sg, s, nents, i) { in __pci_p2pdma_map_sg() 839 return nents; in __pci_p2pdma_map_sg() 856 int nents, enum dma_data_direction dir, unsigned long attrs) in pci_p2pdma_map_sg_attrs() argument 869 return dma_map_sg_attrs(dev, sg, nents, dir, attrs); in pci_p2pdma_map_sg_attrs() 871 return __pci_p2pdma_map_sg(p2p_pgmap, dev, sg, nents); in pci_p2pdma_map_sg_attrs() 889 int nents, enum dma_data_direction dir, unsigned long attrs) in pci_p2pdma_unmap_sg_attrs() argument 904 dma_unmap_sg_attrs(dev, sg, nents, dir, attrs); in pci_p2pdma_unmap_sg_attrs()
|
/drivers/staging/media/ipu3/ |
D | ipu3-dmamap.c | 199 int nents, struct imgu_css_map *map) in imgu_dmamap_map_sg() argument 207 for_each_sg(sglist, sg, nents, i) { in imgu_dmamap_map_sg() 211 if (i != nents - 1 && !PAGE_ALIGNED(sg->length)) in imgu_dmamap_map_sg() 219 nents, size >> shift); in imgu_dmamap_map_sg() 230 sglist, nents) < size) in imgu_dmamap_map_sg()
|
/drivers/staging/android/ion/ |
D | ion_dma_buf.c | 26 ret = sg_alloc_table(new_table, table->nents, GFP_KERNEL); in dup_sg_table() 33 for_each_sg(table->sgl, sg, table->nents, i) { in dup_sg_table() 112 if (!dma_map_sg(attachment->dev, table->sgl, table->nents, direction)) in ion_map_dma_buf() 129 dma_unmap_sg(attachment->dev, table->sgl, table->nents, direction); in ion_unmap_dma_buf() 168 dma_sync_sg_for_cpu(a->dev, a->table->sgl, a->table->nents, in ion_dma_buf_begin_cpu_access() 210 dma_sync_sg_for_device(a->dev, a->table->sgl, a->table->nents, in ion_dma_buf_end_cpu_access()
|
/drivers/usb/storage/ |
D | protocol.c | 128 unsigned int nents = scsi_sg_count(srb); in usb_stor_access_xfer_buf() local 131 nents = sg_nents(sg); in usb_stor_access_xfer_buf() 135 sg_miter_start(&miter, sg, nents, dir == FROM_XFER_BUF ? in usb_stor_access_xfer_buf()
|
/drivers/misc/mic/host/ |
D | mic_boot.c | 188 int nents, enum dma_data_direction dir, in __mic_dma_map_sg() argument 197 ret = dma_map_sg(&mdev->pdev->dev, sg, nents, dir); in __mic_dma_map_sg() 201 for_each_sg(sg, s, nents, i) { in __mic_dma_map_sg() 207 return nents; in __mic_dma_map_sg() 213 dma_unmap_sg(&mdev->pdev->dev, sg, nents, dir); in __mic_dma_map_sg() 218 struct scatterlist *sg, int nents, in __mic_dma_unmap_sg() argument 228 for_each_sg(sg, s, nents, i) { in __mic_dma_unmap_sg() 233 dma_unmap_sg(&mdev->pdev->dev, sg, nents, dir); in __mic_dma_unmap_sg()
|
/drivers/crypto/qce/ |
D | dma.c | 75 int nents, unsigned long flags, in qce_dma_prep_sg() argument 82 if (!sg || !nents) in qce_dma_prep_sg() 85 desc = dmaengine_prep_slave_sg(chan, sg, nents, dir, flags); in qce_dma_prep_sg()
|
/drivers/gpu/drm/tegra/ |
D | gem.c | 139 bo->sgt->nents, prot); in tegra_bo_iommu_map() 206 dma_unmap_sg(drm->dev, bo->sgt->sgl, bo->sgt->nents, in tegra_bo_free() 232 err = dma_map_sg(drm->dev, bo->sgt->sgl, bo->sgt->nents, in tegra_bo_get_pages() 363 if (bo->sgt->nents > 1) { in tegra_bo_import() 520 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) in tegra_gem_prime_map_dma_buf() 546 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir); in tegra_gem_prime_unmap_dma_buf() 565 dma_sync_sg_for_cpu(drm->dev, bo->sgt->sgl, bo->sgt->nents, in tegra_gem_prime_begin_cpu_access() 579 dma_sync_sg_for_device(drm->dev, bo->sgt->sgl, bo->sgt->nents, in tegra_gem_prime_end_cpu_access()
|
/drivers/crypto/bcm/ |
D | util.c | 54 unsigned int nents = sg_nents(src); in sg_copy_part_to_buf() local 56 copied = sg_pcopy_to_buffer(src, nents, dest, len, skip); in sg_copy_part_to_buf() 60 flow_log("sg with %u entries and skip %u\n", nents, skip); in sg_copy_part_to_buf() 73 unsigned int nents = sg_nents(dest); in sg_copy_part_from_buf() local 75 copied = sg_pcopy_from_buffer(dest, nents, src, len, skip); in sg_copy_part_from_buf() 79 flow_log("sg with %u entries and skip %u\n", nents, skip); in sg_copy_part_from_buf()
|
/drivers/iommu/ |
D | dma-iommu.c | 735 static int __finalise_sg(struct device *dev, struct scatterlist *sg, int nents, in __finalise_sg() argument 743 for_each_sg(sg, s, nents, i) { in __finalise_sg() 788 static void __invalidate_sg(struct scatterlist *sg, int nents) in __invalidate_sg() argument 793 for_each_sg(sg, s, nents, i) { in __invalidate_sg() 811 int nents, enum dma_data_direction dir, unsigned long attrs) in iommu_dma_map_sg() argument 824 iommu_dma_sync_sg_for_device(dev, sg, nents, dir); in iommu_dma_map_sg() 832 for_each_sg(sg, s, nents, i) { in iommu_dma_map_sg() 873 if (iommu_map_sg(domain, iova, sg, nents, prot) < iova_len) in iommu_dma_map_sg() 876 return __finalise_sg(dev, sg, nents, iova); in iommu_dma_map_sg() 881 __invalidate_sg(sg, nents); in iommu_dma_map_sg() [all …]
|
/drivers/gpu/drm/udl/ |
D | udl_dmabuf.c | 54 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, in udl_detach_dma_buf() 73 int nents, ret; in udl_map_dma_buf() local 116 nents = dma_map_sg(attach->dev, sgt->sgl, sgt->orig_nents, dir); in udl_map_dma_buf() 117 if (!nents) { in udl_map_dma_buf()
|
/drivers/gpu/drm/rockchip/ |
D | rockchip_drm_gem.c | 39 rk_obj->sgt->nents, prot); in rockchip_gem_iommu_map() 100 for_each_sg(rk_obj->sgt->sgl, s, rk_obj->sgt->nents, i) in rockchip_gem_get_pages() 103 dma_sync_sg_for_device(drm->dev, rk_obj->sgt->sgl, rk_obj->sgt->nents, in rockchip_gem_get_pages() 353 rk_obj->sgt->nents, DMA_BIDIRECTIONAL); in rockchip_gem_free_object() 495 int count = dma_map_sg(drm->dev, sg->sgl, sg->nents, in rockchip_gem_dma_map_sg() 502 dma_unmap_sg(drm->dev, sg->sgl, sg->nents, in rockchip_gem_dma_map_sg()
|