Searched refs:iovad (Results 1 – 7 of 7) sorted by relevance
/drivers/iommu/ |
D | iova.c | 27 static bool iova_rcache_insert(struct iova_domain *iovad, 30 static unsigned long iova_rcache_get(struct iova_domain *iovad, 33 static void init_iova_rcaches(struct iova_domain *iovad); 34 static void free_iova_rcaches(struct iova_domain *iovad); 35 static void fq_destroy_all_entries(struct iova_domain *iovad); 39 init_iova_domain(struct iova_domain *iovad, unsigned long granule, in init_iova_domain() argument 49 spin_lock_init(&iovad->iova_rbtree_lock); in init_iova_domain() 50 iovad->rbroot = RB_ROOT; in init_iova_domain() 51 iovad->cached32_node = NULL; in init_iova_domain() 52 iovad->granule = granule; in init_iova_domain() [all …]
|
D | dma-iommu.c | 51 struct iova_domain iovad; member 62 return cookie->iovad.granule; in cookie_msi_granule() 151 if (cookie->type == IOMMU_DMA_IOVA_COOKIE && cookie->iovad.granule) in iommu_put_dma_cookie() 152 put_iova_domain(&cookie->iovad); in iommu_put_dma_cookie() 204 struct iova_domain *iovad = &cookie->iovad; in cookie_init_hw_msi_region() local 208 start -= iova_offset(iovad, start); in cookie_init_hw_msi_region() 209 num_pages = iova_align(iovad, end - start) >> iova_shift(iovad); in cookie_init_hw_msi_region() 220 start += iovad->granule; in cookie_init_hw_msi_region() 230 struct iova_domain *iovad = &cookie->iovad; in iova_reserve_iommu_regions() local 243 lo = iova_pfn(iovad, region->start); in iova_reserve_iommu_regions() [all …]
|
D | intel-iommu.c | 397 struct iova_domain iovad; /* iova's that belong to this domain */ member 1624 static void iommu_flush_iova(struct iova_domain *iovad) in iommu_flush_iova() argument 1629 domain = container_of(iovad, struct dmar_domain, iovad); in iommu_flush_iova() 1934 copy_reserved_iova(&reserved_iova_list, &domain->iovad); in domain_reserve_special_ranges() 1958 init_iova_domain(&domain->iovad, VTD_PAGE_SIZE, IOVA_START_PFN, in domain_init() 1961 err = init_iova_flush_queue(&domain->iovad, in domain_init() 2023 put_iova_domain(&domain->iovad); in domain_exit() 2666 if (!reserve_iova(&domain->iovad, dma_to_mm_pfn(first_vpfn), in iommu_domain_identity_map() 3497 iova_pfn = alloc_iova_fast(&domain->iovad, nrpages, in intel_alloc_iova() 3502 iova_pfn = alloc_iova_fast(&domain->iovad, nrpages, IOVA_PFN(dma_mask)); in intel_alloc_iova() [all …]
|
D | amd_iommu.c | 117 static void iova_domain_flush_tlb(struct iova_domain *iovad); 127 struct iova_domain iovad; member 1585 pfn = alloc_iova_fast(&dma_dom->iovad, pages, in dma_ops_alloc_iova() 1589 pfn = alloc_iova_fast(&dma_dom->iovad, pages, IOVA_PFN(dma_mask)); in dma_ops_alloc_iova() 1601 free_iova_fast(&dma_dom->iovad, address, pages); in dma_ops_free_iova() 1775 static void iova_domain_flush_tlb(struct iova_domain *iovad) in iova_domain_flush_tlb() argument 1779 dom = container_of(iovad, struct dma_ops_domain, iovad); in iova_domain_flush_tlb() 1795 put_iova_domain(&dom->iovad); in dma_ops_domain_free() 1827 init_iova_domain(&dma_dom->iovad, PAGE_SIZE, in dma_ops_domain_alloc() 1830 if (init_iova_flush_queue(&dma_dom->iovad, iova_domain_flush_tlb, NULL)) in dma_ops_domain_alloc() [all …]
|
/drivers/misc/mic/scif/ |
D | scif_rma.h | 102 struct iova_domain iovad; member
|
D | scif_rma.c | 42 init_iova_domain(&rma->iovad, PAGE_SIZE, SCIF_IOVA_START_PFN, in scif_rma_ep_init() 1020 iova_ptr = reserve_iova(&ep->rma_info.iovad, page_index, in scif_get_window_offset() 1025 iova_ptr = alloc_iova(&ep->rma_info.iovad, num_pages, in scif_get_window_offset() 1048 free_iova(&ep->rma_info.iovad, offset >> PAGE_SHIFT); in scif_free_window_offset()
|
D | scif_epd.c | 107 put_iova_domain(&ep->rma_info.iovad); in scif_cleanup_zombie_epd()
|