Home
last modified time | relevance | path

Searched refs:dma_addrs (Results 1 – 6 of 6) sorted by relevance

/drivers/gpu/drm/nouveau/
Dnouveau_dmem.c379 dma_addr_t *dma_addrs; in nouveau_dmem_evict_chunk() local
384 dma_addrs = kcalloc(npages, sizeof(*dma_addrs), GFP_KERNEL); in nouveau_dmem_evict_chunk()
402 &dma_addrs[i]); in nouveau_dmem_evict_chunk()
413 dma_unmap_page(chunk->drm->dev->dev, dma_addrs[i], PAGE_SIZE, DMA_BIDIRECTIONAL); in nouveau_dmem_evict_chunk()
414 kfree(dma_addrs); in nouveau_dmem_evict_chunk()
666 dma_addr_t *dma_addrs, u64 *pfns) in nouveau_dmem_migrate_chunk() argument
673 args->src[i], dma_addrs + nr_dma, pfns + i); in nouveau_dmem_migrate_chunk()
674 if (!dma_mapping_error(drm->dev->dev, dma_addrs[nr_dma])) in nouveau_dmem_migrate_chunk()
685 dma_unmap_page(drm->dev->dev, dma_addrs[nr_dma], PAGE_SIZE, in nouveau_dmem_migrate_chunk()
700 dma_addr_t *dma_addrs; in nouveau_dmem_migrate_vma() local
[all …]
/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/
Dgk20a.c85 dma_addr_t *dma_addrs; member
338 dma_unmap_page(dev, node->dma_addrs[i], PAGE_SIZE, in gk20a_instobj_dtor_iommu()
436 sizeof(*node->dma_addrs)) * npages), GFP_KERNEL))) in gk20a_instobj_ctor_iommu()
439 node->dma_addrs = (void *)(node->pages + npages); in gk20a_instobj_ctor_iommu()
460 node->dma_addrs[i] = dma_adr; in gk20a_instobj_ctor_iommu()
477 ret = iommu_map(imem->domain, offset, node->dma_addrs[i], in gk20a_instobj_ctor_iommu()
503 dma_addr_t dma_addr = node->dma_addrs[i]; in gk20a_instobj_ctor_iommu()
/drivers/gpu/drm/omapdrm/
Domap_gem.c90 dma_addr_t *dma_addrs; member
280 omap_obj->dma_addrs = addrs; in omap_gem_attach_pages()
303 if (omap_obj->dma_addrs[i]) in omap_gem_detach_pages()
304 dma_unmap_page(obj->dev->dev, omap_obj->dma_addrs[i], in omap_gem_detach_pages()
308 kfree(omap_obj->dma_addrs); in omap_gem_detach_pages()
309 omap_obj->dma_addrs = NULL; in omap_gem_detach_pages()
709 if (omap_obj->dma_addrs[pgoff]) { in omap_gem_cpu_sync_page()
710 dma_unmap_page(dev->dev, omap_obj->dma_addrs[pgoff], in omap_gem_cpu_sync_page()
712 omap_obj->dma_addrs[pgoff] = 0; in omap_gem_cpu_sync_page()
730 if (!omap_obj->dma_addrs[i]) { in omap_gem_dma_sync_buffer()
[all …]
/drivers/vfio/pci/mlx5/
Dcmd.c244 *mtt++ = cpu_to_be64(recv_buf->dma_addrs[i]); in _create_mkey()
982 recv_buf->dma_addrs = kvcalloc(recv_buf->npages, in register_dma_recv_pages()
983 sizeof(*recv_buf->dma_addrs), in register_dma_recv_pages()
985 if (!recv_buf->dma_addrs) in register_dma_recv_pages()
989 recv_buf->dma_addrs[i] = dma_map_page(mdev->device, in register_dma_recv_pages()
993 if (dma_mapping_error(mdev->device, recv_buf->dma_addrs[i])) in register_dma_recv_pages()
1000 dma_unmap_single(mdev->device, recv_buf->dma_addrs[j], in register_dma_recv_pages()
1003 kvfree(recv_buf->dma_addrs); in register_dma_recv_pages()
1013 dma_unmap_single(mdev->device, recv_buf->dma_addrs[i], in unregister_dma_recv_pages()
1016 kvfree(recv_buf->dma_addrs); in unregister_dma_recv_pages()
Dcmd.h61 dma_addr_t *dma_addrs; member
/drivers/net/ethernet/mediatek/
Dmtk_star_emac.c243 dma_addr_t dma_addrs[MTK_STAR_RING_NUM_DESCS]; member
325 desc_data->dma_addr = ring->dma_addrs[ring->tail]; in mtk_star_ring_pop_tail()
328 ring->dma_addrs[ring->tail] = 0; in mtk_star_ring_pop_tail()
351 ring->dma_addrs[ring->head] = desc_data->dma_addr; in mtk_star_ring_push_head()
720 ring->dma_addrs[i] = dma_addr; in mtk_star_prepare_rx_skbs()
735 if (!ring->dma_addrs[i]) in mtk_star_ring_free_skbs()
738 desc_data.dma_addr = ring->dma_addrs[i]; in mtk_star_ring_free_skbs()