Home
last modified time | relevance | path

Searched refs:iovad (Results 1 – 7 of 7) sorted by relevance

/drivers/iommu/
Diova.c18 static bool iova_rcache_insert(struct iova_domain *iovad,
21 static unsigned long iova_rcache_get(struct iova_domain *iovad,
24 static void init_iova_rcaches(struct iova_domain *iovad);
25 static void free_iova_rcaches(struct iova_domain *iovad);
26 static void fq_destroy_all_entries(struct iova_domain *iovad);
30 init_iova_domain(struct iova_domain *iovad, unsigned long granule, in init_iova_domain() argument
40 spin_lock_init(&iovad->iova_rbtree_lock); in init_iova_domain()
41 iovad->rbroot = RB_ROOT; in init_iova_domain()
42 iovad->cached_node = &iovad->anchor.node; in init_iova_domain()
43 iovad->cached32_node = &iovad->anchor.node; in init_iova_domain()
[all …]
Ddma-iommu.c42 struct iova_domain iovad; member
55 return cookie->iovad.granule; in cookie_msi_granule()
138 if (cookie->type == IOMMU_DMA_IOVA_COOKIE && cookie->iovad.granule) in iommu_put_dma_cookie()
139 put_iova_domain(&cookie->iovad); in iommu_put_dma_cookie()
172 struct iova_domain *iovad = &cookie->iovad; in cookie_init_hw_msi_region() local
176 start -= iova_offset(iovad, start); in cookie_init_hw_msi_region()
177 num_pages = iova_align(iovad, end - start) >> iova_shift(iovad); in cookie_init_hw_msi_region()
188 start += iovad->granule; in cookie_init_hw_msi_region()
195 struct iova_domain *iovad) in iova_reserve_pci_windows() argument
206 lo = iova_pfn(iovad, window->res->start - window->offset); in iova_reserve_pci_windows()
[all …]
Dintel-iommu.c1532 static void iommu_flush_iova(struct iova_domain *iovad) in iommu_flush_iova() argument
1537 domain = container_of(iovad, struct dmar_domain, iovad); in iommu_flush_iova()
1830 copy_reserved_iova(&reserved_iova_list, &domain->iovad); in domain_reserve_special_ranges()
1854 init_iova_domain(&domain->iovad, VTD_PAGE_SIZE, IOVA_START_PFN); in domain_init()
1856 err = init_iova_flush_queue(&domain->iovad, in domain_init()
1911 put_iova_domain(&domain->iovad); in domain_exit()
2675 if (!reserve_iova(&domain->iovad, dma_to_mm_pfn(first_vpfn), in iommu_domain_identity_map()
3396 iova_pfn = alloc_iova_fast(&domain->iovad, nrpages, in intel_alloc_iova()
3401 iova_pfn = alloc_iova_fast(&domain->iovad, nrpages, in intel_alloc_iova()
3555 free_iova_fast(&domain->iovad, iova_pfn, dma_to_mm_pfn(size)); in __intel_map_single()
[all …]
Damd_iommu.c105 static void iova_domain_flush_tlb(struct iova_domain *iovad);
115 struct iova_domain iovad; member
1774 pfn = alloc_iova_fast(&dma_dom->iovad, pages, in dma_ops_alloc_iova()
1778 pfn = alloc_iova_fast(&dma_dom->iovad, pages, in dma_ops_alloc_iova()
1791 free_iova_fast(&dma_dom->iovad, address, pages); in dma_ops_free_iova()
1880 static void iova_domain_flush_tlb(struct iova_domain *iovad) in iova_domain_flush_tlb() argument
1884 dom = container_of(iovad, struct dma_ops_domain, iovad); in iova_domain_flush_tlb()
1898 put_iova_domain(&dom->iovad); in dma_ops_domain_free()
1930 init_iova_domain(&dma_dom->iovad, PAGE_SIZE, IOVA_START_PFN); in dma_ops_domain_alloc()
1932 if (init_iova_flush_queue(&dma_dom->iovad, iova_domain_flush_tlb, NULL)) in dma_ops_domain_alloc()
[all …]
/drivers/misc/mic/scif/
Dscif_rma.h102 struct iova_domain iovad; member
Dscif_rma.c33 init_iova_domain(&rma->iovad, PAGE_SIZE, SCIF_IOVA_START_PFN); in scif_rma_ep_init()
1001 iova_ptr = reserve_iova(&ep->rma_info.iovad, page_index, in scif_get_window_offset()
1006 iova_ptr = alloc_iova(&ep->rma_info.iovad, num_pages, in scif_get_window_offset()
1029 free_iova(&ep->rma_info.iovad, offset >> PAGE_SHIFT); in scif_free_window_offset()
Dscif_epd.c98 put_iova_domain(&ep->rma_info.iovad); in scif_cleanup_zombie_epd()