/drivers/iommu/ |
D | tegra-gart.c | 60 #define for_each_gart_pte(gart, iova) \ argument 66 unsigned long iova, unsigned long pte) in gart_set_pte() 73 unsigned long iova) in gart_read_pte() 85 unsigned long iova; in do_gart_setup() local 95 unsigned long iova, size_t bytes) in gart_iova_range_invalid() 101 static inline bool gart_pte_valid(struct gart_device *gart, unsigned long iova) in gart_pte_valid() 167 static inline int __gart_iommu_map(struct gart_device *gart, unsigned long iova, in __gart_iommu_map() 180 static int gart_iommu_map(struct iommu_domain *domain, unsigned long iova, in gart_iommu_map() 197 unsigned long iova) in __gart_iommu_unmap() 209 static size_t gart_iommu_unmap(struct iommu_domain *domain, unsigned long iova, in gart_iommu_unmap() [all …]
|
D | iova.c | 235 iova_insert_rbtree(struct rb_root *root, struct iova *iova, in iova_insert_rbtree() 395 static void free_iova_mem(struct iova *iova) in free_iova_mem() 497 struct iova *iova = to_iova(node); in private_find_iova() local 510 static void remove_iova(struct iova_domain *iovad, struct iova *iova) in remove_iova() 527 struct iova *iova; in find_iova() local 544 __free_iova(struct iova_domain *iovad, struct iova *iova) in __free_iova() 566 struct iova *iova; in free_iova() local 784 struct iova *iova, *tmp; in put_iova_domain() local 800 struct iova *iova = to_iova(node); in __is_range_overlap() local 810 struct iova *iova; in alloc_and_init_iova() local [all …]
|
D | tegra-smmu.c | 156 static unsigned int iova_pd_index(unsigned long iova) in iova_pd_index() 161 static unsigned int iova_pt_index(unsigned long iova) in iova_pt_index() 223 unsigned long iova) in smmu_flush_tlb_section() 238 unsigned long iova) in smmu_flush_tlb_group() 535 static void tegra_smmu_set_pde(struct tegra_smmu_as *as, unsigned long iova, in tegra_smmu_set_pde() 556 static u32 *tegra_smmu_pte_offset(struct page *pt_page, unsigned long iova) in tegra_smmu_pte_offset() 563 static u32 *tegra_smmu_pte_lookup(struct tegra_smmu_as *as, unsigned long iova, in tegra_smmu_pte_lookup() 581 static u32 *as_get_pte(struct tegra_smmu_as *as, dma_addr_t iova, in as_get_pte() 619 static void tegra_smmu_pte_get_use(struct tegra_smmu_as *as, unsigned long iova) in tegra_smmu_pte_get_use() 626 static void tegra_smmu_pte_put_use(struct tegra_smmu_as *as, unsigned long iova) in tegra_smmu_pte_put_use() [all …]
|
D | exynos-iommu.c | 97 #define section_offs(iova) (iova & (SECT_SIZE - 1)) argument 99 #define lpage_offs(iova) (iova & (LPAGE_SIZE - 1)) argument 101 #define spage_offs(iova) (iova & (SPAGE_SIZE - 1)) argument 106 static u32 lv1ent_offset(sysmmu_iova_t iova) in lv1ent_offset() 111 static u32 lv2ent_offset(sysmmu_iova_t iova) in lv2ent_offset() 181 static sysmmu_pte_t *section_entry(sysmmu_pte_t *pgtable, sysmmu_iova_t iova) in section_entry() 186 static sysmmu_pte_t *page_entry(sysmmu_pte_t *sent, sysmmu_iova_t iova) in page_entry() 314 sysmmu_iova_t iova, unsigned int num_inv) in __sysmmu_tlb_invalidate_entry() 513 sysmmu_iova_t iova) in sysmmu_tlb_invalidate_flpdcache() 533 sysmmu_iova_t iova, size_t size) in sysmmu_tlb_invalidate_entry() [all …]
|
D | io-pgtable-arm-v7s.c | 432 unsigned long iova, phys_addr_t paddr, int prot, in arm_v7s_init_pte() 497 static int __arm_v7s_map(struct arm_v7s_io_pgtable *data, unsigned long iova, in __arm_v7s_map() 544 static int arm_v7s_map_pages(struct io_pgtable_ops *ops, unsigned long iova, in arm_v7s_map_pages() 579 static int arm_v7s_map(struct io_pgtable_ops *ops, unsigned long iova, in arm_v7s_map() 603 unsigned long iova, int idx, int lvl, in arm_v7s_split_cont() 630 unsigned long iova, size_t size, in arm_v7s_split_blk_unmap() 675 unsigned long iova, size_t size, int lvl, in __arm_v7s_unmap() 745 static size_t arm_v7s_unmap_pages(struct io_pgtable_ops *ops, unsigned long iova, in arm_v7s_unmap_pages() 767 static size_t arm_v7s_unmap(struct io_pgtable_ops *ops, unsigned long iova, in arm_v7s_unmap() 774 unsigned long iova) in arm_v7s_iova_to_phys() [all …]
|
D | rockchip-iommu.c | 318 static u32 rk_iova_dte_index(dma_addr_t iova) in rk_iova_dte_index() 323 static u32 rk_iova_pte_index(dma_addr_t iova) in rk_iova_pte_index() 328 static u32 rk_iova_page_offset(dma_addr_t iova) in rk_iova_page_offset() 365 dma_addr_t iova; in rk_iommu_zap_lines() local 531 static void log_iova(struct rk_iommu *iommu, int index, dma_addr_t iova) in log_iova() 583 dma_addr_t iova; in rk_iommu_irq() local 648 dma_addr_t iova) in rk_iommu_iova_to_phys() 676 dma_addr_t iova, size_t size) in rk_iommu_zap_iova() 705 dma_addr_t iova, size_t size) in rk_iommu_zap_iova_first_last() 714 dma_addr_t iova) in rk_dte_get_page_table() [all …]
|
D | io-pgtable-arm.c | 288 unsigned long iova, phys_addr_t paddr, in arm_lpae_init_pte() 351 static int __arm_lpae_map(struct arm_lpae_io_pgtable *data, unsigned long iova, in __arm_lpae_map() 491 static int arm_lpae_map_pages(struct io_pgtable_ops *ops, unsigned long iova, in arm_lpae_map_pages() 526 static int arm_lpae_map(struct io_pgtable_ops *ops, unsigned long iova, in arm_lpae_map() 574 unsigned long iova, size_t size, in arm_lpae_split_blk_unmap() 634 unsigned long iova, size_t size, size_t pgcount, in __arm_lpae_unmap() 691 static size_t arm_lpae_unmap_pages(struct io_pgtable_ops *ops, unsigned long iova, in arm_lpae_unmap_pages() 712 static size_t arm_lpae_unmap(struct io_pgtable_ops *ops, unsigned long iova, in arm_lpae_unmap() 719 unsigned long iova) in arm_lpae_iova_to_phys() 1213 static void __init dummy_tlb_flush(unsigned long iova, size_t size, in dummy_tlb_flush() [all …]
|
D | sun50i-iommu.c | 163 static u32 sun50i_iova_get_dte_index(dma_addr_t iova) in sun50i_iova_get_dte_index() 168 static u32 sun50i_iova_get_pte_index(dma_addr_t iova) in sun50i_iova_get_pte_index() 173 static u32 sun50i_iova_get_page_offset(dma_addr_t iova) in sun50i_iova_get_page_offset() 483 dma_addr_t iova, gfp_t gfp) in sun50i_dte_get_page_table() 521 static int sun50i_iommu_map(struct iommu_domain *domain, unsigned long iova, in sun50i_iommu_map() 554 static size_t sun50i_iommu_unmap(struct iommu_domain *domain, unsigned long iova, in sun50i_iommu_unmap() 579 dma_addr_t iova) in sun50i_iommu_iova_to_phys() 779 unsigned master, phys_addr_t iova, in sun50i_iommu_report_fault() 795 phys_addr_t iova; in sun50i_iommu_handle_pt_irq() local 818 phys_addr_t iova; in sun50i_iommu_handle_perm_irq() local
|
D | msm_iommu.c | 138 static void __flush_iotlb_range(unsigned long iova, size_t size, in __flush_iotlb_range() 170 static void __flush_iotlb_walk(unsigned long iova, size_t size, in __flush_iotlb_walk() 177 unsigned long iova, size_t granule, void *cookie) in __flush_iotlb_page() 477 static int msm_iommu_map(struct iommu_domain *domain, unsigned long iova, in msm_iommu_map() 491 static void msm_iommu_sync_map(struct iommu_domain *domain, unsigned long iova, in msm_iommu_sync_map() 499 static size_t msm_iommu_unmap(struct iommu_domain *domain, unsigned long iova, in msm_iommu_unmap()
|
D | iommu.c | 2412 phys_addr_t iommu_iova_to_phys(struct iommu_domain *domain, dma_addr_t iova) in iommu_iova_to_phys() 2424 static size_t iommu_pgsize(struct iommu_domain *domain, unsigned long iova, in iommu_pgsize() 2478 static int __iommu_map_pages(struct iommu_domain *domain, unsigned long iova, in __iommu_map_pages() 2502 static int __iommu_map(struct iommu_domain *domain, unsigned long iova, in __iommu_map() 2562 static int _iommu_map(struct iommu_domain *domain, unsigned long iova, in _iommu_map() 2575 int iommu_map(struct iommu_domain *domain, unsigned long iova, in iommu_map() 2583 int iommu_map_atomic(struct iommu_domain *domain, unsigned long iova, in iommu_map_atomic() 2591 unsigned long iova, size_t size, in __iommu_unmap_pages() 2604 unsigned long iova, size_t size, in __iommu_unmap() 2658 unsigned long iova, size_t size) in iommu_unmap() [all …]
|
/drivers/vdpa/vdpa_user/ |
D | iova_domain.c | 104 u64 iova, u64 size, u64 paddr) in vduse_domain_map_bounce_page() 124 u64 iova, u64 size) in vduse_domain_unmap_bounce_page() 162 dma_addr_t iova, size_t size, in vduse_domain_bounce() 190 vduse_domain_get_coherent_page(struct vduse_iova_domain *domain, u64 iova) in vduse_domain_get_coherent_page() 211 vduse_domain_get_bounce_page(struct vduse_iova_domain *domain, u64 iova) in vduse_domain_get_bounce_page() 309 dma_addr_t iova, size_t size) in vduse_domain_free_iova() 325 dma_addr_t iova = vduse_domain_alloc_iova(iovad, size, limit); in vduse_domain_map_page() local 364 dma_addr_t iova = vduse_domain_alloc_iova(iovad, size, limit); in vduse_domain_alloc_coherent() local 422 unsigned long iova = vmf->pgoff << PAGE_SHIFT; in vduse_domain_mmap_fault() local
|
/drivers/gpu/drm/msm/ |
D | msm_iommu.c | 32 static int msm_iommu_pagetable_unmap(struct msm_mmu *mmu, u64 iova, in msm_iommu_pagetable_unmap() 51 static int msm_iommu_pagetable_map(struct msm_mmu *mmu, u64 iova, in msm_iommu_pagetable_map() 129 static void msm_iommu_tlb_flush_walk(unsigned long iova, size_t size, in msm_iommu_tlb_flush_walk() 135 unsigned long iova, size_t granule, void *cookie) in msm_iommu_tlb_add_page() 229 unsigned long iova, int flags, void *arg) in msm_fault_handler() 261 static int msm_iommu_map(struct msm_mmu *mmu, uint64_t iova, in msm_iommu_map() 277 static int msm_iommu_unmap(struct msm_mmu *mmu, uint64_t iova, size_t len) in msm_iommu_unmap()
|
D | msm_gem.c | 379 struct msm_gem_address_space *aspace, uint64_t *iova, in get_iova_locked() 446 struct msm_gem_address_space *aspace, uint64_t *iova, in get_and_pin_iova_range_locked() 471 struct msm_gem_address_space *aspace, uint64_t *iova, in msm_gem_get_and_pin_iova_range() 484 struct msm_gem_address_space *aspace, uint64_t *iova) in msm_gem_get_and_pin_iova_locked() 491 struct msm_gem_address_space *aspace, uint64_t *iova) in msm_gem_get_and_pin_iova() 501 struct msm_gem_address_space *aspace, uint64_t *iova) in msm_gem_get_iova() 1295 struct drm_gem_object **bo, uint64_t *iova) in msm_gem_kernel_new()
|
/drivers/fpga/ |
D | dfl-afu-dma-region.c | 125 u64 iova, u64 size) in dma_region_check_iova() 245 afu_dma_region_find(struct dfl_feature_platform_data *pdata, u64 iova, u64 size) in afu_dma_region_find() 285 afu_dma_region_find_iova(struct dfl_feature_platform_data *pdata, u64 iova) in afu_dma_region_find_iova() 302 u64 user_addr, u64 length, u64 *iova) in afu_dma_map_region() 380 int afu_dma_unmap_region(struct dfl_feature_platform_data *pdata, u64 iova) in afu_dma_unmap_region()
|
/drivers/vfio/ |
D | vfio_iommu_type1.c | 93 dma_addr_t iova; /* Device address */ member 133 dma_addr_t iova; /* Device address */ member 140 dma_addr_t iova; member 332 static struct vfio_pfn *vfio_find_vpfn(struct vfio_dma *dma, dma_addr_t iova) in vfio_find_vpfn() 376 static int vfio_add_to_pfn_list(struct vfio_dma *dma, dma_addr_t iova, in vfio_add_to_pfn_list() 400 unsigned long iova) in vfio_iova_get_vfio_pfn() 682 dma_addr_t iova = vaddr - dma->vaddr + dma->iova; in vfio_pin_pages_remote() local 785 static long vfio_unpin_pages_remote(struct vfio_dma *dma, dma_addr_t iova, in vfio_unpin_pages_remote() 840 static int vfio_unpin_page_external(struct vfio_dma *dma, dma_addr_t iova, in vfio_unpin_page_external() 869 dma_addr_t iova; in vfio_iommu_type1_pin_pages() local [all …]
|
/drivers/staging/media/ipu3/ |
D | ipu3-mmu.c | 154 static inline void address_to_pte_idx(unsigned long iova, u32 *l1pt_idx, in address_to_pte_idx() 210 static int __imgu_mmu_map(struct imgu_mmu *mmu, unsigned long iova, in __imgu_mmu_map() 251 int imgu_mmu_map(struct imgu_mmu_info *info, unsigned long iova, in imgu_mmu_map() 299 size_t imgu_mmu_map_sg(struct imgu_mmu_info *info, unsigned long iova, in imgu_mmu_map_sg() 339 unsigned long iova, size_t size) in __imgu_mmu_unmap() 379 size_t imgu_mmu_unmap(struct imgu_mmu_info *info, unsigned long iova, in imgu_mmu_unmap()
|
D | ipu3-dmamap.c | 102 struct iova *iova; in imgu_dmamap_alloc() local 153 struct iova *iova; in imgu_dmamap_unmap() local 189 struct iova *iova; in imgu_dmamap_map_sg() local
|
/drivers/staging/media/tegra-vde/ |
D | iommu.c | 24 struct iova *iova; in tegra_vde_iommu_map() local 51 void tegra_vde_iommu_unmap(struct tegra_vde *vde, struct iova *iova) in tegra_vde_iommu_unmap() 64 struct iova *iova; in tegra_vde_iommu_init() local
|
/drivers/infiniband/sw/rxe/ |
D | rxe_mr.c | 25 int mr_check_range(struct rxe_mr *mr, u64 iova, size_t length) in mr_check_range() 112 int rxe_mr_init_user(struct rxe_pd *pd, u64 start, u64 length, u64 iova, in rxe_mr_init_user() 218 static void lookup_iova(struct rxe_mr *mr, u64 iova, int *m_out, int *n_out, in lookup_iova() 254 void *iova_to_vaddr(struct rxe_mr *mr, u64 iova, int length) in iova_to_vaddr() 294 int rxe_mr_copy(struct rxe_mr *mr, u64 iova, void *addr, int length, in rxe_mr_copy() 384 u64 iova; in copy_data() local
|
/drivers/gpu/drm/panfrost/ |
D | panfrost_mmu.c | 61 u64 iova, u64 size) in lock_region() 81 u64 iova, u64 size, u32 op) in mmu_hw_do_operation_locked() 98 u64 iova, u64 size, u32 op) in mmu_hw_do_operation() 245 u64 iova, u64 size) in panfrost_mmu_flush_range() 260 u64 iova, int prot, struct sg_table *sgt) in mmu_map_sg() 320 u64 iova = mapping->mmnode.start << PAGE_SHIFT; in panfrost_mmu_unmap() local 356 static void mmu_tlb_flush_walk(unsigned long iova, size_t size, size_t granule, in mmu_tlb_flush_walk()
|
/drivers/gpu/drm/etnaviv/ |
D | etnaviv_mmu.c | 17 unsigned long iova, size_t size) in etnaviv_context_unmap() 40 unsigned long iova, phys_addr_t paddr, in etnaviv_context_map() 72 static int etnaviv_iommu_map(struct etnaviv_iommu_context *context, u32 iova, in etnaviv_iommu_map() 102 static void etnaviv_iommu_unmap(struct etnaviv_iommu_context *context, u32 iova, in etnaviv_iommu_unmap() 243 u32 iova; in etnaviv_iommu_map_gem() local
|
/drivers/iommu/amd/ |
D | io_pgtable.c | 30 static void v1_tlb_flush_walk(unsigned long iova, size_t size, in v1_tlb_flush_walk() 36 unsigned long iova, size_t granule, in v1_tlb_add_page() 391 static int iommu_v1_map_page(struct io_pgtable_ops *ops, unsigned long iova, in iommu_v1_map_page() 458 unsigned long iova, in iommu_v1_unmap_page() 490 static phys_addr_t iommu_v1_iova_to_phys(struct io_pgtable_ops *ops, unsigned long iova) in iommu_v1_iova_to_phys()
|
/drivers/iommu/arm/arm-smmu/ |
D | arm-smmu.c | 279 static void arm_smmu_tlb_inv_range_s1(unsigned long iova, size_t size, in arm_smmu_tlb_inv_range_s1() 307 static void arm_smmu_tlb_inv_range_s2(unsigned long iova, size_t size, in arm_smmu_tlb_inv_range_s2() 327 static void arm_smmu_tlb_inv_walk_s1(unsigned long iova, size_t size, in arm_smmu_tlb_inv_walk_s1() 343 unsigned long iova, size_t granule, in arm_smmu_tlb_add_page_s1() 350 static void arm_smmu_tlb_inv_walk_s2(unsigned long iova, size_t size, in arm_smmu_tlb_inv_walk_s2() 359 unsigned long iova, size_t granule, in arm_smmu_tlb_add_page_s2() 366 static void arm_smmu_tlb_inv_walk_s2_v1(unsigned long iova, size_t size, in arm_smmu_tlb_inv_walk_s2_v1() 379 unsigned long iova, size_t granule, in arm_smmu_tlb_add_page_s2_v1() 412 unsigned long iova; in arm_smmu_context_fault() local 1199 static int arm_smmu_map_pages(struct iommu_domain *domain, unsigned long iova, in arm_smmu_map_pages() [all …]
|
D | qcom_iommu.c | 157 static void qcom_iommu_tlb_inv_range_nosync(unsigned long iova, size_t size, in qcom_iommu_tlb_inv_range_nosync() 179 static void qcom_iommu_tlb_flush_walk(unsigned long iova, size_t size, in qcom_iommu_tlb_flush_walk() 187 unsigned long iova, size_t granule, in qcom_iommu_tlb_add_page() 203 u64 iova; in qcom_iommu_fault() local 424 static int qcom_iommu_map(struct iommu_domain *domain, unsigned long iova, in qcom_iommu_map() 441 static size_t qcom_iommu_unmap(struct iommu_domain *domain, unsigned long iova, in qcom_iommu_unmap() 486 dma_addr_t iova) in qcom_iommu_iova_to_phys()
|
/drivers/s390/cio/ |
D | vfio_ccw_cp.c | 59 static int pfn_array_alloc(struct pfn_array *pa, u64 iova, unsigned int len) in pfn_array_alloc() 134 static bool pfn_array_iova_pinned(struct pfn_array *pa, unsigned long iova) in pfn_array_iova_pinned() 194 void *to, u64 iova, in copy_from_iova() 371 static int ccwchain_calc_length(u64 iova, struct channel_program *cp) in ccwchain_calc_length() 513 u64 iova; in ccwchain_fetch_direct() local 860 bool cp_iova_pinned(struct channel_program *cp, u64 iova) in cp_iova_pinned()
|