Home
last modified time | relevance | path

Searched refs:iovad (Results 1 – 6 of 6) sorted by relevance

/kernel/linux/linux-5.10/drivers/iommu/
Diova.c18 static bool iova_rcache_insert(struct iova_domain *iovad,
21 static unsigned long iova_rcache_get(struct iova_domain *iovad,
24 static void init_iova_rcaches(struct iova_domain *iovad);
25 static void free_iova_rcaches(struct iova_domain *iovad);
26 static void fq_destroy_all_entries(struct iova_domain *iovad);
30 init_iova_domain(struct iova_domain *iovad, unsigned long granule, in init_iova_domain() argument
40 spin_lock_init(&iovad->iova_rbtree_lock); in init_iova_domain()
41 iovad->rbroot = RB_ROOT; in init_iova_domain()
42 iovad->cached_node = &iovad->anchor.node; in init_iova_domain()
43 iovad->cached32_node = &iovad->anchor.node; in init_iova_domain()
[all …]
Ddma-iommu.c42 struct iova_domain iovad; member
55 return cookie->iovad.granule; in cookie_msi_granule()
138 if (cookie->type == IOMMU_DMA_IOVA_COOKIE && cookie->iovad.granule) in iommu_put_dma_cookie()
139 put_iova_domain(&cookie->iovad); in iommu_put_dma_cookie()
172 struct iova_domain *iovad = &cookie->iovad; in cookie_init_hw_msi_region() local
176 start -= iova_offset(iovad, start); in cookie_init_hw_msi_region()
177 num_pages = iova_align(iovad, end - start) >> iova_shift(iovad); in cookie_init_hw_msi_region()
188 start += iovad->granule; in cookie_init_hw_msi_region()
195 struct iova_domain *iovad) in iova_reserve_pci_windows() argument
206 lo = iova_pfn(iovad, window->res->start - window->offset); in iova_reserve_pci_windows()
[all …]
/kernel/linux/linux-5.10/include/linux/
Diova.h105 static inline unsigned long iova_shift(struct iova_domain *iovad) in iova_shift() argument
107 return __ffs(iovad->granule); in iova_shift()
110 static inline unsigned long iova_mask(struct iova_domain *iovad) in iova_mask() argument
112 return iovad->granule - 1; in iova_mask()
115 static inline size_t iova_offset(struct iova_domain *iovad, dma_addr_t iova) in iova_offset() argument
117 return iova & iova_mask(iovad); in iova_offset()
120 static inline size_t iova_align(struct iova_domain *iovad, size_t size) in iova_align() argument
122 return ALIGN(size, iovad->granule); in iova_align()
125 static inline dma_addr_t iova_dma_addr(struct iova_domain *iovad, struct iova *iova) in iova_dma_addr() argument
127 return (dma_addr_t)iova->pfn_lo << iova_shift(iovad); in iova_dma_addr()
[all …]
Dintel-iommu.h544 struct iova_domain iovad; /* iova's that belong to this domain */ member
/kernel/linux/linux-5.10/drivers/iommu/intel/
Diommu.c1721 static void iommu_flush_iova(struct iova_domain *iovad) in iommu_flush_iova() argument
1726 domain = container_of(iovad, struct dmar_domain, iovad); in iommu_flush_iova()
2079 put_iova_domain(&domain->iovad); in domain_exit()
3551 iova_pfn = alloc_iova_fast(&domain->iovad, nrpages, in intel_alloc_iova()
3556 iova_pfn = alloc_iova_fast(&domain->iovad, nrpages, in intel_alloc_iova()
3623 free_iova_fast(&domain->iovad, iova_pfn, dma_to_mm_pfn(size)); in __intel_map_single()
3671 !has_iova_flush_queue(&domain->iovad)) { in intel_unmap()
3675 free_iova_fast(&domain->iovad, iova_pfn, dma_to_mm_pfn(nrpages)); in intel_unmap()
3678 queue_iova(&domain->iovad, iova_pfn, nrpages, in intel_unmap()
3822 free_iova_fast(&domain->iovad, iova_pfn, dma_to_mm_pfn(size)); in intel_map_sg()
[all …]
/kernel/linux/patches/linux-5.10/yangfan_patch/
Ddrivers.patch39149 + struct iova_domain *iovad;
39157 + iovad = &cookie->iovad;
39160 + pfn_lo = iova_pfn(iovad, base);
39161 + pfn_hi = iova_pfn(iovad, base + size - 1);
39162 + if (!reserve_iova(iovad, pfn_lo, pfn_hi))
39175 + struct iova_domain *iovad;
39181 + iovad = &((struct iommu_dma_cookie *)domain->iova_cookie)->iovad;
39182 + iovad->best_fit = true;