Home
last modified time | relevance | path

Searched refs:dma_handle (Results 1 – 19 of 19) sorted by relevance

/arch/powerpc/kernel/
Ddma-iommu.c27 bool arch_dma_unmap_page_direct(struct device *dev, dma_addr_t dma_handle) in arch_dma_unmap_page_direct() argument
32 return is_direct_handle(dev, dma_handle); in arch_dma_unmap_page_direct()
79 dma_addr_t *dma_handle, gfp_t flag, in dma_iommu_alloc_coherent() argument
83 dma_handle, dev->coherent_dma_mask, flag, in dma_iommu_alloc_coherent()
88 void *vaddr, dma_addr_t dma_handle, in dma_iommu_free_coherent() argument
91 iommu_free_coherent(get_iommu_table_base(dev), size, vaddr, dma_handle); in dma_iommu_free_coherent()
109 static void dma_iommu_unmap_page(struct device *dev, dma_addr_t dma_handle, in dma_iommu_unmap_page() argument
113 iommu_unmap_page(get_iommu_table_base(dev), dma_handle, size, direction, in dma_iommu_unmap_page()
Diommu.c627 dma_addr_t dma_handle = sg->dma_address; in ppc_iommu_unmap_sg() local
631 npages = iommu_num_pages(dma_handle, sg->dma_length, in ppc_iommu_unmap_sg()
633 __iommu_free(tbl, dma_handle, npages); in ppc_iommu_unmap_sg()
857 dma_addr_t dma_handle = DMA_MAPPING_ERROR; in iommu_map_page() local
874 dma_handle = iommu_alloc(dev, tbl, vaddr, npages, direction, in iommu_map_page()
877 if (dma_handle == DMA_MAPPING_ERROR) { in iommu_map_page()
885 dma_handle |= (uaddr & ~IOMMU_PAGE_MASK(tbl)); in iommu_map_page()
888 return dma_handle; in iommu_map_page()
891 void iommu_unmap_page(struct iommu_table *tbl, dma_addr_t dma_handle, in iommu_unmap_page() argument
900 npages = iommu_num_pages(dma_handle, size, in iommu_unmap_page()
[all …]
/arch/sh/mm/
Dconsistent.c41 dma_addr_t dma_handle; in platform_resource_setup_memory() local
54 buf = dma_alloc_coherent(&pdev->dev, memsize, &dma_handle, GFP_KERNEL); in platform_resource_setup_memory()
61 r->start = dma_handle; in platform_resource_setup_memory()
/arch/m68k/kernel/
Ddma.c37 void *arch_dma_alloc(struct device *dev, size_t size, dma_addr_t *dma_handle, in arch_dma_alloc() argument
48 *dma_handle = virt_to_phys(ret); in arch_dma_alloc()
54 dma_addr_t dma_handle, unsigned long attrs) in arch_dma_free() argument
/arch/x86/include/asm/xen/
Dswiotlb-xen.h8 dma_addr_t *dma_handle);
/arch/mips/jazz/
Djazzdma.c492 dma_addr_t *dma_handle, gfp_t gfp, unsigned long attrs) in jazz_dma_alloc() argument
506 *dma_handle = vdma_alloc(virt_to_phys(ret), size); in jazz_dma_alloc()
507 if (*dma_handle == DMA_MAPPING_ERROR) in jazz_dma_alloc()
518 dma_addr_t dma_handle, unsigned long attrs) in jazz_dma_free() argument
520 vdma_free(dma_handle); in jazz_dma_free()
/arch/powerpc/include/asm/
Diommu.h263 size_t size, dma_addr_t *dma_handle,
266 void *vaddr, dma_addr_t dma_handle);
272 extern void iommu_unmap_page(struct iommu_table *tbl, dma_addr_t dma_handle,
/arch/parisc/kernel/
Dpci-dma.c402 dma_addr_t *dma_handle, gfp_t gfp, unsigned long attrs) in arch_dma_alloc() argument
418 *dma_handle = (dma_addr_t) paddr; in arch_dma_alloc()
424 dma_addr_t dma_handle, unsigned long attrs) in arch_dma_free() argument
435 free_pages((unsigned long)__va(dma_handle), order); in arch_dma_free()
/arch/sparc/kernel/
Diommu.c625 dma_addr_t dma_handle = sg->dma_address; in dma_4u_unmap_sg() local
633 npages = iommu_num_pages(dma_handle, len, IO_PAGE_SIZE); in dma_4u_unmap_sg()
635 entry = ((dma_handle - iommu->tbl.table_map_base) in dma_4u_unmap_sg()
639 dma_handle &= IO_PAGE_MASK; in dma_4u_unmap_sg()
641 strbuf_flush(strbuf, iommu, dma_handle, ctx, in dma_4u_unmap_sg()
647 iommu_tbl_range_free(&iommu->tbl, dma_handle, npages, in dma_4u_unmap_sg()
Dpci_sun4v.c647 dma_addr_t dma_handle = sg->dma_address; in dma_4v_unmap_sg() local
655 npages = iommu_num_pages(dma_handle, len, IO_PAGE_SIZE); in dma_4v_unmap_sg()
657 if (dma_handle <= DMA_BIT_MASK(32)) { in dma_4v_unmap_sg()
664 entry = ((dma_handle - tbl->table_map_base) >> shift); in dma_4v_unmap_sg()
665 dma_4v_iommu_demap(dev, devhandle, dma_handle, iotsb_num, in dma_4v_unmap_sg()
667 iommu_tbl_range_free(tbl, dma_handle, npages, in dma_4v_unmap_sg()
/arch/powerpc/platforms/pseries/
Dibmebus.c69 dma_addr_t *dma_handle, in ibmebus_alloc_coherent() argument
76 *dma_handle = (dma_addr_t)mem; in ibmebus_alloc_coherent()
83 dma_addr_t dma_handle, in ibmebus_free_coherent() argument
Dvio.c483 dma_addr_t *dma_handle, gfp_t flag, in vio_dma_iommu_alloc_coherent() argument
495 dma_handle, dev->coherent_dma_mask, flag, in vio_dma_iommu_alloc_coherent()
506 void *vaddr, dma_addr_t dma_handle, in vio_dma_iommu_free_coherent() argument
511 iommu_free_coherent(get_iommu_table_base(dev), size, vaddr, dma_handle); in vio_dma_iommu_free_coherent()
539 static void vio_dma_iommu_unmap_page(struct device *dev, dma_addr_t dma_handle, in vio_dma_iommu_unmap_page() argument
547 iommu_unmap_page(tbl, dma_handle, size, direction, attrs); in vio_dma_iommu_unmap_page()
/arch/powerpc/platforms/ps3/
Dsystem-bus.c507 dma_addr_t *dma_handle, gfp_t flag, in ps3_alloc_coherent() argument
524 result = ps3_dma_map(dev->d_region, virt_addr, size, dma_handle, in ps3_alloc_coherent()
540 dma_handle = NULL; in ps3_alloc_coherent()
545 dma_addr_t dma_handle, unsigned long attrs) in ps3_free_coherent() argument
549 ps3_dma_unmap(dev->d_region, dma_handle, size); in ps3_free_coherent()
/arch/s390/pci/
Dpci_dma.c418 dma_addr_t *dma_handle, gfp_t flag, in s390_dma_alloc() argument
439 if (dma_handle) in s390_dma_alloc()
440 *dma_handle = map; in s390_dma_alloc()
445 void *vaddr, dma_addr_t dma_handle, in s390_dma_free() argument
452 s390_dma_unmap_pages(dev, dma_handle, size, DMA_BIDIRECTIONAL, 0); in s390_dma_free()
/arch/sparc/mm/
Dio-unit.c217 dma_addr_t *dma_handle, gfp_t gfp, unsigned long attrs) in iounit_alloc() argument
236 *dma_handle = addr; in iounit_alloc()
Diommu.c314 dma_addr_t *dma_handle, gfp_t gfp, unsigned long attrs) in sbus_iommu_alloc() argument
387 *dma_handle = iommu->start + (ioptex << PAGE_SHIFT); in sbus_iommu_alloc()
/arch/ia64/hp/common/
Dsba_iommu.c1118 sba_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_handle, in sba_alloc_coherent() argument
1138 *dma_handle = page_to_phys(page); in sba_alloc_coherent()
1145 if (likely((*dma_handle & ~dev->coherent_dma_mask) == 0)) { in sba_alloc_coherent()
1147 dev->coherent_dma_mask, *dma_handle); in sba_alloc_coherent()
1157 *dma_handle = sba_map_page(&ioc->sac_only_dev->dev, page, 0, size, in sba_alloc_coherent()
1159 if (dma_mapping_error(dev, *dma_handle)) in sba_alloc_coherent()
1175 dma_addr_t dma_handle, unsigned long attrs) in sba_free_coherent() argument
1177 sba_unmap_page(dev, dma_handle, size, 0, 0); in sba_free_coherent()
/arch/arm/mm/
Ddma-mapping.c1470 static void arm_iommu_unmap_resource(struct device *dev, dma_addr_t dma_handle, in arm_iommu_unmap_resource() argument
1475 dma_addr_t iova = dma_handle & PAGE_MASK; in arm_iommu_unmap_resource()
1476 unsigned int offset = dma_handle & ~PAGE_MASK; in arm_iommu_unmap_resource()
1810 void *arch_dma_alloc(struct device *dev, size_t size, dma_addr_t *dma_handle, in arch_dma_alloc() argument
1813 return __dma_alloc(dev, size, dma_handle, gfp, in arch_dma_alloc()
1819 dma_addr_t dma_handle, unsigned long attrs) in arch_dma_free() argument
1821 __arm_dma_free(dev, size, cpu_addr, dma_handle, attrs, false); in arch_dma_free()
/arch/x86/xen/
Dmmu_pv.c2323 dma_addr_t *dma_handle) in xen_create_contiguous_region() argument
2354 *dma_handle = virt_to_machine(vstart).maddr; in xen_create_contiguous_region()