Lines Matching refs:iova
1148 dma_addr_t iova; in __alloc_iova() local
1192 iova = mapping->base + (mapping_size * i); in __alloc_iova()
1193 iova += start << PAGE_SHIFT; in __alloc_iova()
1195 return iova; in __alloc_iova()
1354 dma_addr_t dma_addr, iova; in __iommu_create_mapping() local
1361 iova = dma_addr; in __iommu_create_mapping()
1374 ret = iommu_map(mapping->domain, iova, phys, len, in __iommu_create_mapping()
1378 iova += len; in __iommu_create_mapping()
1383 iommu_unmap(mapping->domain, dma_addr, iova-dma_addr); in __iommu_create_mapping()
1388 static int __iommu_remove_mapping(struct device *dev, dma_addr_t iova, size_t size) in __iommu_remove_mapping() argument
1396 size = PAGE_ALIGN((iova & ~PAGE_MASK) + size); in __iommu_remove_mapping()
1397 iova &= PAGE_MASK; in __iommu_remove_mapping()
1399 iommu_unmap(mapping->domain, iova, size); in __iommu_remove_mapping()
1400 __free_iova(mapping, iova, size); in __iommu_remove_mapping()
1621 dma_addr_t iova, iova_base; in __map_sg_chunk() local
1630 iova_base = iova = __alloc_iova(mapping, size); in __map_sg_chunk()
1631 if (iova == DMA_MAPPING_ERROR) in __map_sg_chunk()
1643 ret = iommu_map(mapping->domain, iova, phys, len, prot); in __map_sg_chunk()
1647 iova += len; in __map_sg_chunk()
1894 dma_addr_t iova = handle & PAGE_MASK; in arm_coherent_iommu_unmap_page() local
1898 if (!iova) in arm_coherent_iommu_unmap_page()
1901 iommu_unmap(mapping->domain, iova, len); in arm_coherent_iommu_unmap_page()
1902 __free_iova(mapping, iova, len); in arm_coherent_iommu_unmap_page()
1918 dma_addr_t iova = handle & PAGE_MASK; in arm_iommu_unmap_page() local
1919 struct page *page = phys_to_page(iommu_iova_to_phys(mapping->domain, iova)); in arm_iommu_unmap_page()
1923 if (!iova) in arm_iommu_unmap_page()
1929 iommu_unmap(mapping->domain, iova, len); in arm_iommu_unmap_page()
1930 __free_iova(mapping, iova, len); in arm_iommu_unmap_page()
1979 dma_addr_t iova = dma_handle & PAGE_MASK; in arm_iommu_unmap_resource() local
1983 if (!iova) in arm_iommu_unmap_resource()
1986 iommu_unmap(mapping->domain, iova, len); in arm_iommu_unmap_resource()
1987 __free_iova(mapping, iova, len); in arm_iommu_unmap_resource()
1994 dma_addr_t iova = handle & PAGE_MASK; in arm_iommu_sync_single_for_cpu() local
1995 struct page *page = phys_to_page(iommu_iova_to_phys(mapping->domain, iova)); in arm_iommu_sync_single_for_cpu()
1998 if (!iova) in arm_iommu_sync_single_for_cpu()
2008 dma_addr_t iova = handle & PAGE_MASK; in arm_iommu_sync_single_for_device() local
2009 struct page *page = phys_to_page(iommu_iova_to_phys(mapping->domain, iova)); in arm_iommu_sync_single_for_device()
2012 if (!iova) in arm_iommu_sync_single_for_device()