Home
last modified time | relevance | path

Searched refs:sgl (Results 1 – 25 of 426) sorted by relevance

12345678910>>...18

/drivers/misc/genwqe/
Dcard_utils.c291 int genwqe_alloc_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl, in genwqe_alloc_sync_sgl() argument
297 sgl->fpage_offs = offset_in_page((unsigned long)user_addr); in genwqe_alloc_sync_sgl()
298 sgl->fpage_size = min_t(size_t, PAGE_SIZE-sgl->fpage_offs, user_size); in genwqe_alloc_sync_sgl()
299 sgl->nr_pages = DIV_ROUND_UP(sgl->fpage_offs + user_size, PAGE_SIZE); in genwqe_alloc_sync_sgl()
300 sgl->lpage_size = (user_size - sgl->fpage_size) % PAGE_SIZE; in genwqe_alloc_sync_sgl()
303 __func__, user_addr, user_size, sgl->nr_pages, in genwqe_alloc_sync_sgl()
304 sgl->fpage_offs, sgl->fpage_size, sgl->lpage_size); in genwqe_alloc_sync_sgl()
306 sgl->user_addr = user_addr; in genwqe_alloc_sync_sgl()
307 sgl->user_size = user_size; in genwqe_alloc_sync_sgl()
308 sgl->write = write; in genwqe_alloc_sync_sgl()
[all …]
/drivers/crypto/hisilicon/
Dsgl.c37 struct hisi_acc_hw_sgl *sgl; member
92 block[i].sgl = dma_alloc_coherent(dev, block_size, in hisi_acc_create_sgl_pool()
95 if (!block[i].sgl) { in hisi_acc_create_sgl_pool()
104 block[i].sgl = dma_alloc_coherent(dev, remain_sgl * sgl_size, in hisi_acc_create_sgl_pool()
107 if (!block[i].sgl) { in hisi_acc_create_sgl_pool()
125 dma_free_coherent(dev, block_size, block[j].sgl, in hisi_acc_create_sgl_pool()
151 dma_free_coherent(dev, block[i].size, block[i].sgl, in hisi_acc_free_sgl_pool()
172 return (void *)block[block_index].sgl + pool->sgl_size * offset; in acc_get_sgl()
175 static void sg_map_to_hw_sg(struct scatterlist *sgl, in sg_map_to_hw_sg() argument
178 hw_sge->buf = sg_dma_address(sgl); in sg_map_to_hw_sg()
[all …]
/drivers/target/iscsi/cxgbit/
Dcxgbit_ddp.c133 struct scatterlist *sg = ttinfo->sgl; in cxgbit_ddp_set_map()
176 struct scatterlist *sgl = ttinfo->sgl; in cxgbit_ddp_reserve() local
178 unsigned int sg_offset = sgl->offset; in cxgbit_ddp_reserve()
188 if (cxgbit_ddp_sgl_check(sgl, sgcnt) < 0) in cxgbit_ddp_reserve()
191 ttinfo->nr_pages = (xferlen + sgl->offset + in cxgbit_ddp_reserve()
203 sgl->offset = 0; in cxgbit_ddp_reserve()
204 ret = dma_map_sg(&ppm->pdev->dev, sgl, sgcnt, DMA_FROM_DEVICE); in cxgbit_ddp_reserve()
205 sgl->offset = sg_offset; in cxgbit_ddp_reserve()
212 cxgbi_ppm_make_ppod_hdr(ppm, ttinfo->tag, csk->tid, sgl->offset, in cxgbit_ddp_reserve()
218 dma_unmap_sg(&ppm->pdev->dev, sgl, sgcnt, DMA_FROM_DEVICE); in cxgbit_ddp_reserve()
[all …]
/drivers/crypto/intel/qat/qat_common/
Dqat_bl.c51 struct scatterlist *sgl, in __qat_bl_sgl_to_bufl() argument
62 int n = sg_nents(sgl); in __qat_bl_sgl_to_bufl()
89 bufl_dma_dir = sgl != sglout ? DMA_TO_DEVICE : DMA_BIDIRECTIONAL; in __qat_bl_sgl_to_bufl()
96 for_each_sg(sgl, sg, n, i) { in __qat_bl_sgl_to_bufl()
126 if (sgl != sglout) { in __qat_bl_sgl_to_bufl()
215 n = sg_nents(sgl); in __qat_bl_sgl_to_bufl()
230 struct scatterlist *sgl, in qat_bl_sgl_to_bufl() argument
248 return __qat_bl_sgl_to_bufl(accel_dev, sgl, sglout, buf, in qat_bl_sgl_to_bufl()
267 struct scatterlist *sgl, in qat_bl_sgl_map() argument
277 n = sg_nents(sgl); in qat_bl_sgl_map()
[all …]
/drivers/scsi/lpfc/
Dlpfc_scsi.c103 struct sli4_sge *sgl = (struct sli4_sge *)lpfc_cmd->dma_sgl; in lpfc_sli4_set_rsp_sgl_last() local
104 if (sgl) { in lpfc_sli4_set_rsp_sgl_last()
105 sgl += 1; in lpfc_sli4_set_rsp_sgl_last()
106 sgl->word2 = le32_to_cpu(sgl->word2); in lpfc_sli4_set_rsp_sgl_last()
107 bf_set(lpfc_sli4_sge_last, sgl, 1); in lpfc_sli4_set_rsp_sgl_last()
108 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_sli4_set_rsp_sgl_last()
601 struct sli4_sge *sgl; in lpfc_get_scsi_buf_s4() local
652 sgl = (struct sli4_sge *)lpfc_cmd->dma_sgl; in lpfc_get_scsi_buf_s4()
654 sgl->addr_hi = cpu_to_le32(putPaddrHigh(pdma_phys_fcp_cmd)); in lpfc_get_scsi_buf_s4()
655 sgl->addr_lo = cpu_to_le32(putPaddrLow(pdma_phys_fcp_cmd)); in lpfc_get_scsi_buf_s4()
[all …]
Dlpfc_nvme.c830 struct sli4_sge *sgl; in lpfc_nvme_adj_fcp_sgls() local
849 sgl = lpfc_ncmd->dma_sgl; in lpfc_nvme_adj_fcp_sgls()
850 sgl->sge_len = cpu_to_le32(nCmd->cmdlen); in lpfc_nvme_adj_fcp_sgls()
852 sgl->addr_hi = 0; in lpfc_nvme_adj_fcp_sgls()
853 sgl->addr_lo = 0; in lpfc_nvme_adj_fcp_sgls()
892 sgl->addr_hi = cpu_to_le32(putPaddrHigh(nCmd->cmddma)); in lpfc_nvme_adj_fcp_sgls()
893 sgl->addr_lo = cpu_to_le32(putPaddrLow(nCmd->cmddma)); in lpfc_nvme_adj_fcp_sgls()
898 wqe->generic.bde.addrHigh = sgl->addr_hi; in lpfc_nvme_adj_fcp_sgls()
899 wqe->generic.bde.addrLow = sgl->addr_lo; in lpfc_nvme_adj_fcp_sgls()
906 sgl++; in lpfc_nvme_adj_fcp_sgls()
[all …]
Dlpfc_nvmet.c2704 struct sli4_sge *sgl; in lpfc_nvmet_prep_fcp_wqe() local
2774 sgl = (struct sli4_sge *)ctxp->ctxbuf->sglq->sgl; in lpfc_nvmet_prep_fcp_wqe()
2824 sgl->addr_hi = 0; in lpfc_nvmet_prep_fcp_wqe()
2825 sgl->addr_lo = 0; in lpfc_nvmet_prep_fcp_wqe()
2826 sgl->word2 = 0; in lpfc_nvmet_prep_fcp_wqe()
2827 bf_set(lpfc_sli4_sge_type, sgl, LPFC_SGE_TYPE_SKIP); in lpfc_nvmet_prep_fcp_wqe()
2828 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_nvmet_prep_fcp_wqe()
2829 sgl->sge_len = 0; in lpfc_nvmet_prep_fcp_wqe()
2830 sgl++; in lpfc_nvmet_prep_fcp_wqe()
2831 sgl->addr_hi = 0; in lpfc_nvmet_prep_fcp_wqe()
[all …]
/drivers/spi/
Dspi-bcm2835.c493 if (bs->tx_buf && !sg_is_last(&tfr->tx_sg.sgl[0])) in bcm2835_spi_transfer_prologue()
494 bs->tx_prologue = sg_dma_len(&tfr->tx_sg.sgl[0]) & 3; in bcm2835_spi_transfer_prologue()
496 if (bs->rx_buf && !sg_is_last(&tfr->rx_sg.sgl[0])) { in bcm2835_spi_transfer_prologue()
497 bs->rx_prologue = sg_dma_len(&tfr->rx_sg.sgl[0]) & 3; in bcm2835_spi_transfer_prologue()
500 if (!bs->tx_buf || sg_is_last(&tfr->tx_sg.sgl[0])) { in bcm2835_spi_transfer_prologue()
505 !(sg_dma_len(&tfr->tx_sg.sgl[0]) & ~3); in bcm2835_spi_transfer_prologue()
527 sg_dma_address(&tfr->rx_sg.sgl[0]), in bcm2835_spi_transfer_prologue()
530 sg_dma_address(&tfr->rx_sg.sgl[0]) += bs->rx_prologue; in bcm2835_spi_transfer_prologue()
531 sg_dma_len(&tfr->rx_sg.sgl[0]) -= bs->rx_prologue; in bcm2835_spi_transfer_prologue()
553 sg_dma_address(&tfr->tx_sg.sgl[0]) += bs->tx_prologue; in bcm2835_spi_transfer_prologue()
[all …]
/drivers/media/platform/
Dm2m-deinterlace.c236 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma()
237 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma()
243 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma()
244 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma()
250 ctx->xt->sgl[0].size = s_width / 2; in deinterlace_issue_dma()
251 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma()
257 ctx->xt->sgl[0].size = s_width / 2; in deinterlace_issue_dma()
258 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma()
264 ctx->xt->sgl[0].size = s_width / 2; in deinterlace_issue_dma()
265 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma()
[all …]
/drivers/xen/
Dswiotlb-xen.c311 xen_swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, in xen_swiotlb_unmap_sg() argument
319 for_each_sg(sgl, sg, nelems, i) in xen_swiotlb_unmap_sg()
326 xen_swiotlb_map_sg(struct device *dev, struct scatterlist *sgl, int nelems, in xen_swiotlb_map_sg() argument
334 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_map_sg()
344 xen_swiotlb_unmap_sg(dev, sgl, i, dir, attrs | DMA_ATTR_SKIP_CPU_SYNC); in xen_swiotlb_map_sg()
345 sg_dma_len(sgl) = 0; in xen_swiotlb_map_sg()
350 xen_swiotlb_sync_sg_for_cpu(struct device *dev, struct scatterlist *sgl, in xen_swiotlb_sync_sg_for_cpu() argument
356 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_sync_sg_for_cpu()
363 xen_swiotlb_sync_sg_for_device(struct device *dev, struct scatterlist *sgl, in xen_swiotlb_sync_sg_for_device() argument
369 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_sync_sg_for_device()
/drivers/vfio/pci/pds/
Dcmds.c206 struct pds_lm_sg_elem *sgl, *sge; in pds_vfio_dma_map_lm_file() local
225 sgl = kzalloc(sgl_size, GFP_KERNEL); in pds_vfio_dma_map_lm_file()
226 if (!sgl) { in pds_vfio_dma_map_lm_file()
232 sge = sgl; in pds_vfio_dma_map_lm_file()
240 sgl_addr = dma_map_single(dev, sgl, sgl_size, DMA_TO_DEVICE); in pds_vfio_dma_map_lm_file()
246 lm_file->sgl = sgl; in pds_vfio_dma_map_lm_file()
252 kfree(sgl); in pds_vfio_dma_map_lm_file()
267 if (lm_file->sgl) { in pds_vfio_dma_unmap_lm_file()
269 lm_file->num_sge * sizeof(*lm_file->sgl), in pds_vfio_dma_unmap_lm_file()
271 kfree(lm_file->sgl); in pds_vfio_dma_unmap_lm_file()
[all …]
Ddirty.c111 kfree(bmp_info->sgl); in __pds_vfio_dirty_free_sgl()
114 bmp_info->sgl = NULL; in __pds_vfio_dirty_free_sgl()
120 if (pds_vfio->dirty.host_seq.sgl) in pds_vfio_dirty_free_sgl()
122 if (pds_vfio->dirty.host_ack.sgl) in pds_vfio_dirty_free_sgl()
132 struct pds_lm_sg_elem *sgl; in __pds_vfio_dirty_alloc_sgl() local
140 sgl = kzalloc(sgl_size, GFP_KERNEL); in __pds_vfio_dirty_alloc_sgl()
141 if (!sgl) in __pds_vfio_dirty_alloc_sgl()
144 sgl_addr = dma_map_single(pdsc_dev, sgl, sgl_size, DMA_BIDIRECTIONAL); in __pds_vfio_dirty_alloc_sgl()
146 kfree(sgl); in __pds_vfio_dirty_alloc_sgl()
150 bmp_info->sgl = sgl; in __pds_vfio_dirty_alloc_sgl()
[all …]
/drivers/gpu/drm/nouveau/nvkm/core/
Dfirmware.c124 .sgl = &fw->mem.sgl, in nvkm_firmware_mem_map()
136 return sg_dma_len(&nvkm_firmware_mem(memory)->mem.sgl); in nvkm_firmware_mem_size()
190 dma_free_noncoherent(fw->device->dev, sg_dma_len(&fw->mem.sgl), in nvkm_firmware_dtor()
227 sg_init_one(&fw->mem.sgl, fw->img, len); in nvkm_firmware_ctor()
228 sg_dma_address(&fw->mem.sgl) = fw->phys; in nvkm_firmware_ctor()
229 sg_dma_len(&fw->mem.sgl) = len; in nvkm_firmware_ctor()
/drivers/net/ethernet/intel/ixgbe/
Dixgbe_fcoe.c29 ddp->sgl = NULL; in ixgbe_fcoe_clear_ddp()
110 if (ddp->sgl) in ixgbe_fcoe_ddp_put()
111 dma_unmap_sg(&adapter->pdev->dev, ddp->sgl, ddp->sgc, in ixgbe_fcoe_ddp_put()
134 struct scatterlist *sgl, unsigned int sgc, in ixgbe_fcoe_ddp_setup() argument
153 if (!netdev || !sgl) in ixgbe_fcoe_ddp_setup()
169 if (ddp->sgl) { in ixgbe_fcoe_ddp_setup()
171 xid, ddp->sgl, ddp->sgc); in ixgbe_fcoe_ddp_setup()
189 dmacount = dma_map_sg(&adapter->pdev->dev, sgl, sgc, DMA_FROM_DEVICE); in ixgbe_fcoe_ddp_setup()
202 ddp->sgl = sgl; in ixgbe_fcoe_ddp_setup()
206 for_each_sg(sgl, sg, dmacount, i) { in ixgbe_fcoe_ddp_setup()
[all …]
/drivers/media/platform/rockchip/rga/
Drga-buf.c118 struct scatterlist *sgl; in rga_buf_map() local
131 for_each_sg(sgt->sgl, sgl, sgt->nents, i) { in rga_buf_map()
132 len = sg_dma_len(sgl) >> PAGE_SHIFT; in rga_buf_map()
133 address = sg_phys(sgl); in rga_buf_map()
/drivers/scsi/esas2r/
Desas2r_io.c224 struct esas2r_mem_desc *sgl; in esas2r_build_sg_list_sge() local
231 sgl = esas2r_alloc_sgl(a); in esas2r_build_sg_list_sge()
233 if (unlikely(sgl == NULL)) in esas2r_build_sg_list_sge()
244 memcpy(sgl->virt_addr, sgc->sge.a64.last, sgelen); in esas2r_build_sg_list_sge()
248 (struct atto_vda_sge *)((u8 *)sgl->virt_addr + in esas2r_build_sg_list_sge()
253 (struct atto_vda_sge *)((u8 *)sgl->virt_addr in esas2r_build_sg_list_sge()
260 cpu_to_le64(sgl->phys_addr); in esas2r_build_sg_list_sge()
302 list_add(&sgl->next_desc, &rq->sg_table_head); in esas2r_build_sg_list_sge()
376 struct esas2r_mem_desc *sgl; in esas2r_build_prd_iblk() local
449 sgl = esas2r_alloc_sgl(a); in esas2r_build_prd_iblk()
[all …]
/drivers/gpu/drm/i915/
Di915_scatterlist.h31 } __sgt_iter(struct scatterlist *sgl, bool dma) { in __sgt_iter() argument
32 struct sgt_iter s = { .sgp = sgl }; in __sgt_iter()
90 for ((__iter) = __sgt_iter((__sgt)->sgl, true); \
102 for ((__iter) = __sgt_iter((__sgt)->sgl, false); \
215 rsgt->table.sgl = NULL; in __i915_refct_sgt_init()
/drivers/dma/
Dmilbeaut-hdmac.c61 struct scatterlist *sgl; member
122 sg = &md->sgl[md->sg_cur]; in milbeaut_chan_start()
256 milbeaut_hdmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, in milbeaut_hdmac_prep_slave_sg() argument
272 md->sgl = kcalloc(sg_len, sizeof(*sgl), GFP_NOWAIT); in milbeaut_hdmac_prep_slave_sg()
273 if (!md->sgl) { in milbeaut_hdmac_prep_slave_sg()
279 md->sgl[i] = sgl[i]; in milbeaut_hdmac_prep_slave_sg()
350 sg = &md->sgl[md->sg_cur]; in milbeaut_hdmac_tx_status()
372 txstate->residue += sg_dma_len(&md->sgl[i]); in milbeaut_hdmac_tx_status()
398 kfree(md->sgl); in milbeaut_hdmac_desc_free()
Daltera-msgdma.c388 msgdma_prep_slave_sg(struct dma_chan *dchan, struct scatterlist *sgl, in msgdma_prep_slave_sg() argument
404 for_each_sg(sgl, sg, sg_len, i) in msgdma_prep_slave_sg()
416 avail = sg_dma_len(sgl); in msgdma_prep_slave_sg()
427 dma_src = sg_dma_address(sgl) + sg_dma_len(sgl) - avail; in msgdma_prep_slave_sg()
432 dma_dst = sg_dma_address(sgl) + sg_dma_len(sgl) - avail; in msgdma_prep_slave_sg()
447 sgl = sg_next(sgl); in msgdma_prep_slave_sg()
448 if (sgl == NULL) in msgdma_prep_slave_sg()
451 avail = sg_dma_len(sgl); in msgdma_prep_slave_sg()
/drivers/scsi/bnx2fc/
Dbnx2fc_hwi.c1496 struct fcoe_ext_mul_sges_ctx *sgl; in bnx2fc_init_seq_cleanup_task() local
1543 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.lo = in bnx2fc_init_seq_cleanup_task()
1545 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.hi = in bnx2fc_init_seq_cleanup_task()
1547 task->txwr_only.sgl_ctx.sgl.mul_sgl.sgl_size = in bnx2fc_init_seq_cleanup_task()
1549 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_off = in bnx2fc_init_seq_cleanup_task()
1551 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_idx = i; in bnx2fc_init_seq_cleanup_task()
1555 sgl = &task->rxwr_only.union_ctx.read_info.sgl_ctx.sgl; in bnx2fc_init_seq_cleanup_task()
1556 sgl->mul_sgl.cur_sge_addr.lo = (u32)phys_addr; in bnx2fc_init_seq_cleanup_task()
1557 sgl->mul_sgl.cur_sge_addr.hi = (u32)((u64)phys_addr >> 32); in bnx2fc_init_seq_cleanup_task()
1558 sgl->mul_sgl.sgl_size = bd_count; in bnx2fc_init_seq_cleanup_task()
[all …]
/drivers/dma/sh/
Dshdma-base.c561 struct scatterlist *sgl, unsigned int sg_len, dma_addr_t *addr, in shdma_prep_sg() argument
571 for_each_sg(sgl, sg, sg_len, i) in shdma_prep_sg()
588 for_each_sg(sgl, sg, sg_len, i) { in shdma_prep_sg()
662 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in shdma_prep_slave_sg() argument
685 return shdma_prep_sg(schan, sgl, sg_len, &slave_addr, in shdma_prep_slave_sg()
703 struct scatterlist *sgl; in shdma_prep_dma_cyclic() local
731 sgl = kmalloc_array(sg_len, sizeof(*sgl), GFP_KERNEL); in shdma_prep_dma_cyclic()
732 if (!sgl) in shdma_prep_dma_cyclic()
735 sg_init_table(sgl, sg_len); in shdma_prep_dma_cyclic()
740 sg_set_page(&sgl[i], pfn_to_page(PFN_DOWN(src)), period_len, in shdma_prep_dma_cyclic()
[all …]
/drivers/scsi/elx/efct/
Defct_scsi.c202 struct efct_scsi_sgl *sgl, u32 sgl_count, in efct_scsi_build_sgls() argument
218 rc = efct_hw_io_add_sge(hw, hio, sgl[i].addr, sgl[i].len); in efct_scsi_build_sgls()
240 n_sge = (hio->sgl == &hio->def_sgl) ? hio->n_sge : hio->def_sgl_count; in efc_log_sgl()
303 io->sgl, io->sgl_count, io->hio_type); in efct_scsi_io_dispatch_hw_io()
619 struct efct_scsi_sgl *sgl, u32 sgl_count, u64 xwire_len, in efct_scsi_xfer_data() argument
672 struct efct_scsi_sgl *sgl_ptr = &io->sgl[sgl_count - 1]; in efct_scsi_xfer_data()
712 struct efct_scsi_sgl *sgl, u32 sgl_count, u64 len, in efct_scsi_send_rd_data() argument
715 return efct_scsi_xfer_data(io, flags, sgl, sgl_count, in efct_scsi_send_rd_data()
722 struct efct_scsi_sgl *sgl, u32 sgl_count, u64 len, in efct_scsi_recv_wr_data() argument
725 return efct_scsi_xfer_data(io, flags, sgl, sgl_count, len, in efct_scsi_recv_wr_data()
[all …]
/drivers/scsi/qedf/
Ddrv_scsi_fw_funcs.c34 val = cpu_to_le32(sgl_task_params->sgl[sge_index].sge_addr.lo); in init_scsi_sgl_context()
36 val = cpu_to_le32(sgl_task_params->sgl[sge_index].sge_addr.hi); in init_scsi_sgl_context()
38 val = cpu_to_le32(sgl_task_params->sgl[sge_index].sge_len); in init_scsi_sgl_context()
/drivers/gpu/drm/i915/gem/
Di915_gem_phys.c56 sg = st->sgl; in i915_gem_object_get_pages_phys()
103 dma_addr_t dma = sg_dma_address(pages->sgl); in i915_gem_object_put_pages_phys()
104 void *vaddr = sg_page(pages->sgl); in i915_gem_object_put_pages_phys()
147 void *vaddr = sg_page(obj->mm.pages->sgl) + args->offset; in i915_gem_object_pwrite_phys()
178 void *vaddr = sg_page(obj->mm.pages->sgl) + args->offset; in i915_gem_object_pread_phys()
/drivers/gpu/drm/virtio/
Dvirtgpu_vram.c104 sg_set_page(sgt->sgl, NULL, vram->vram_node.size, 0); in virtio_gpu_vram_map_dma_buf()
105 sg_dma_address(sgt->sgl) = addr; in virtio_gpu_vram_map_dma_buf()
106 sg_dma_len(sgt->sgl) = vram->vram_node.size; in virtio_gpu_vram_map_dma_buf()
120 dma_unmap_resource(dev, sg_dma_address(sgt->sgl), in virtio_gpu_vram_unmap_dma_buf()
121 sg_dma_len(sgt->sgl), dir, in virtio_gpu_vram_unmap_dma_buf()

12345678910>>...18