Home
last modified time | relevance | path

Searched refs:pte (Results 1 – 25 of 49) sorted by relevance

12

/drivers/iommu/amd/
Dio_pgtable_v2.c44 static inline bool is_large_pte(u64 pte) in is_large_pte() argument
46 return (pte & IOMMU_PAGE_PSE); in is_large_pte()
64 static inline void *get_pgtable_pte(u64 pte) in get_pgtable_pte() argument
66 return iommu_phys_to_virt(pte & PM_ADDR_MASK); in get_pgtable_pte()
71 u64 pte; in set_pte_attr() local
73 pte = __sme_set(paddr & PM_ADDR_MASK); in set_pte_attr()
74 pte |= IOMMU_PAGE_PRESENT | IOMMU_PAGE_USER; in set_pte_attr()
75 pte |= IOMMU_PAGE_ACCESS | IOMMU_PAGE_DIRTY; in set_pte_attr()
78 pte |= IOMMU_PAGE_RW; in set_pte_attr()
82 pte |= IOMMU_PAGE_PSE; in set_pte_attr()
[all …]
Dio_pgtable.c50 static u64 *first_pte_l7(u64 *pte, unsigned long *page_size, in first_pte_l7() argument
56 pg_size = PTE_PAGE_SIZE(*pte); in first_pte_l7()
59 fpte = (u64 *)(((unsigned long)pte) & pte_mask); in first_pte_l7()
157 u64 *pte; in increase_address_space() local
159 pte = (void *)get_zeroed_page(gfp); in increase_address_space()
160 if (!pte) in increase_address_space()
172 *pte = PM_LEVEL_PDE(domain->iop.mode, iommu_virt_to_phys(domain->iop.root)); in increase_address_space()
174 domain->iop.root = pte; in increase_address_space()
183 amd_iommu_domain_set_pgtable(domain, pte, domain->iop.mode); in increase_address_space()
185 pte = NULL; in increase_address_space()
[all …]
Damd_iommu_types.h317 #define PM_PTE_LEVEL(pte) (((pte) >> 9) & 0x7ULL) argument
356 #define PTE_PAGE_SIZE(pte) \ argument
357 (1ULL << (1 + ffz(((pte) | 0xfffULL))))
405 #define IOMMU_PTE_PRESENT(pte) ((pte) & IOMMU_PTE_PR) argument
406 #define IOMMU_PTE_PAGE(pte) (iommu_phys_to_virt((pte) & IOMMU_PAGE_MASK)) argument
407 #define IOMMU_PTE_MODE(pte) (((pte) >> 9) & 0x07) argument
/drivers/iommu/
Dio-pgtable-arm-v7s.c83 #define ARM_V7S_PTE_IS_VALID(pte) (((pte) & 0x3) != 0) argument
84 #define ARM_V7S_PTE_IS_TABLE(pte, lvl) \ argument
85 ((lvl) == 1 && (((pte) & 0x3) == ARM_V7S_PTE_TYPE_TABLE))
172 static bool arm_v7s_pte_is_cont(arm_v7s_iopte pte, int lvl);
185 static arm_v7s_iopte to_mtk_iopte(phys_addr_t paddr, arm_v7s_iopte pte) in to_mtk_iopte() argument
188 pte |= ARM_V7S_ATTR_MTK_PA_BIT32; in to_mtk_iopte()
190 pte |= ARM_V7S_ATTR_MTK_PA_BIT33; in to_mtk_iopte()
192 pte |= ARM_V7S_ATTR_MTK_PA_BIT34; in to_mtk_iopte()
193 return pte; in to_mtk_iopte()
199 arm_v7s_iopte pte = paddr & ARM_V7S_LVL_MASK(lvl); in paddr_to_iopte() local
[all …]
Dio-pgtable-dart.c65 #define iopte_deref(pte, d) __va(iopte_to_paddr(pte, d)) argument
82 dart_iopte pte; in paddr_to_iopte() local
88 pte = paddr >> APPLE_DART2_PADDR_SHIFT; in paddr_to_iopte()
89 pte &= APPLE_DART2_PADDR_MASK; in paddr_to_iopte()
91 return pte; in paddr_to_iopte()
94 static phys_addr_t iopte_to_paddr(dart_iopte pte, in iopte_to_paddr() argument
100 return pte & APPLE_DART1_PADDR_MASK; in iopte_to_paddr()
103 paddr = pte & APPLE_DART2_PADDR_MASK; in iopte_to_paddr()
129 dart_iopte pte = prot; in dart_init_pte() local
140 pte |= FIELD_PREP(APPLE_DART_PTE_SUBPAGE_START, 0); in dart_init_pte()
[all …]
Dio-pgtable-arm.c138 #define iopte_deref(pte,d) __va(iopte_to_paddr(pte, d)) argument
140 #define iopte_type(pte) \ argument
141 (((pte) >> ARM_LPAE_PTE_TYPE_SHIFT) & ARM_LPAE_PTE_TYPE_MASK)
143 #define iopte_prot(pte) ((pte) & ARM_LPAE_PTE_ATTR_MASK) argument
157 static inline bool iopte_leaf(arm_lpae_iopte pte, int lvl, in iopte_leaf() argument
161 return iopte_type(pte) == ARM_LPAE_PTE_TYPE_PAGE; in iopte_leaf()
163 return iopte_type(pte) == ARM_LPAE_PTE_TYPE_BLOCK; in iopte_leaf()
169 arm_lpae_iopte pte = paddr; in paddr_to_iopte() local
172 return (pte | (pte >> (48 - 12))) & ARM_LPAE_PTE_ADDR_MASK; in paddr_to_iopte()
175 static phys_addr_t iopte_to_paddr(arm_lpae_iopte pte, in iopte_to_paddr() argument
[all …]
Dtegra-gart.c66 unsigned long iova, unsigned long pte) in gart_set_pte() argument
69 writel_relaxed(pte, gart->regs + GART_ENTRY_DATA); in gart_set_pte()
75 unsigned long pte; in gart_read_pte() local
78 pte = readl_relaxed(gart->regs + GART_ENTRY_DATA); in gart_read_pte()
80 return pte; in gart_read_pte()
229 unsigned long pte; in gart_iommu_iova_to_phys() local
235 pte = gart_read_pte(gart, iova); in gart_iommu_iova_to_phys()
238 return pte & GART_PAGE_MASK; in gart_iommu_iova_to_phys()
Drockchip-iommu.c260 static inline bool rk_pte_is_page_valid(u32 pte) in rk_pte_is_page_valid() argument
262 return pte & RK_PTE_PAGE_VALID; in rk_pte_is_page_valid()
296 static u32 rk_mk_pte_invalid(u32 pte) in rk_mk_pte_invalid() argument
298 return pte & ~RK_PTE_PAGE_VALID; in rk_mk_pte_invalid()
541 u32 pte = 0; in log_iova() local
561 pte = *pte_addr; in log_iova()
563 if (!rk_pte_is_page_valid(pte)) in log_iova()
566 page_addr_phys = rk_ops->pt_address(pte) + page_offset; in log_iova()
567 page_flags = pte & RK_PTE_PAGE_FLAGS_MASK; in log_iova()
574 rk_dte_is_pt_valid(dte), &pte_addr_phys, pte, in log_iova()
[all …]
Dtegra-smmu.c644 u32 *pte, dma_addr_t pte_dma, u32 val) in tegra_smmu_set_pte() argument
647 unsigned long offset = SMMU_OFFSET_IN_PAGE(pte); in tegra_smmu_set_pte()
649 *pte = val; in tegra_smmu_set_pte()
706 u32 *pte; in __tegra_smmu_map() local
712 pte = as_get_pte(as, iova, &pte_dma, page); in __tegra_smmu_map()
713 if (!pte) in __tegra_smmu_map()
717 if (*pte == 0) in __tegra_smmu_map()
728 tegra_smmu_set_pte(as, iova, pte, pte_dma, in __tegra_smmu_map()
740 u32 *pte; in __tegra_smmu_unmap() local
742 pte = tegra_smmu_pte_lookup(as, iova, &pte_dma); in __tegra_smmu_unmap()
[all …]
Dsun50i-iommu.c256 static phys_addr_t sun50i_pte_get_page_address(u32 pte) in sun50i_pte_get_page_address() argument
258 return (phys_addr_t)pte & SUN50I_PTE_PAGE_ADDRESS_MASK; in sun50i_pte_get_page_address()
261 static enum sun50i_iommu_aci sun50i_get_pte_aci(u32 pte) in sun50i_get_pte_aci() argument
263 return FIELD_GET(SUN50I_PTE_ACI_MASK, pte); in sun50i_get_pte_aci()
266 static bool sun50i_pte_is_page_valid(u32 pte) in sun50i_pte_is_page_valid() argument
268 return pte & SUN50I_PTE_PAGE_VALID; in sun50i_pte_is_page_valid()
654 u32 dte, pte; in sun50i_iommu_iova_to_phys() local
662 pte = page_table[sun50i_iova_get_pte_index(iova)]; in sun50i_iommu_iova_to_phys()
663 if (!sun50i_pte_is_page_valid(pte)) in sun50i_iommu_iova_to_phys()
666 return sun50i_pte_get_page_address(pte) + in sun50i_iommu_iova_to_phys()
/drivers/iommu/intel/
Dpasid.c457 struct pasid_entry *pte; in intel_pasid_tear_down_entry() local
461 pte = intel_pasid_get_entry(dev, pasid); in intel_pasid_tear_down_entry()
462 if (WARN_ON(!pte) || !pasid_pte_is_present(pte)) { in intel_pasid_tear_down_entry()
467 did = pasid_get_domain_id(pte); in intel_pasid_tear_down_entry()
468 pgtt = pasid_pte_get_pgtt(pte); in intel_pasid_tear_down_entry()
473 clflush_cache_range(pte, sizeof(*pte)); in intel_pasid_tear_down_entry()
492 struct pasid_entry *pte, in pasid_flush_caches() argument
496 clflush_cache_range(pte, sizeof(*pte)); in pasid_flush_caches()
514 struct pasid_entry *pte; in intel_pasid_setup_first_level() local
546 pte = intel_pasid_get_entry(dev, pasid); in intel_pasid_setup_first_level()
[all …]
Diommu.c824 struct dma_pte *pte; in pgtable_walk() local
829 pte = &parent[offset]; in pgtable_walk()
830 if (!pte || (dma_pte_superpage(pte) || !dma_pte_present(pte))) { in pgtable_walk()
835 pr_info("pte level: %d, pte value: 0x%016llx\n", level, pte->val); in pgtable_walk()
840 parent = phys_to_virt(dma_pte_addr(pte)); in pgtable_walk()
849 struct pasid_entry *entries, *pte; in dmar_fault_dump_ptes() local
910 pte = &entries[index]; in dmar_fault_dump_ptes()
911 for (i = 0; i < ARRAY_SIZE(pte->val); i++) in dmar_fault_dump_ptes()
912 pr_info("pasid table entry[%d]: 0x%016llx\n", i, pte->val[i]); in dmar_fault_dump_ptes()
914 if (pasid_pte_get_pgtt(pte) == PASID_ENTRY_PGTT_FL_ONLY) { in dmar_fault_dump_ptes()
[all …]
Diommu.h668 static inline void dma_clear_pte(struct dma_pte *pte) in dma_clear_pte() argument
670 pte->val = 0; in dma_clear_pte()
673 static inline u64 dma_pte_addr(struct dma_pte *pte) in dma_pte_addr() argument
676 return pte->val & VTD_PAGE_MASK & (~DMA_FL_PTE_XD); in dma_pte_addr()
679 return __cmpxchg64(&pte->val, 0ULL, 0ULL) & in dma_pte_addr()
684 static inline bool dma_pte_present(struct dma_pte *pte) in dma_pte_present() argument
686 return (pte->val & 3) != 0; in dma_pte_present()
689 static inline bool dma_pte_superpage(struct dma_pte *pte) in dma_pte_superpage() argument
691 return (pte->val & DMA_PTE_LARGE_PAGE); in dma_pte_superpage()
694 static inline bool first_pte_in_page(struct dma_pte *pte) in first_pte_in_page() argument
[all …]
Dpasid.h97 static inline bool pasid_pte_is_present(struct pasid_entry *pte) in pasid_pte_is_present() argument
99 return READ_ONCE(pte->val[0]) & PASID_PTE_PRESENT; in pasid_pte_is_present()
103 static inline u16 pasid_pte_get_pgtt(struct pasid_entry *pte) in pasid_pte_get_pgtt() argument
105 return (u16)((READ_ONCE(pte->val[0]) >> 6) & 0x7); in pasid_pte_get_pgtt()
/drivers/staging/media/atomisp/pci/mmu/
Disp_mmu.c68 unsigned int idx, unsigned int pte) in atomisp_set_pte() argument
71 *(pt_virt + idx) = pte; in atomisp_set_pte()
80 unsigned int pte) in isp_pte_to_pgaddr() argument
82 return mmu->driver->pte_to_phys(mmu, pte); in isp_pte_to_pgaddr()
88 unsigned int pte = mmu->driver->phys_to_pte(mmu, phys); in isp_pgaddr_to_pte_valid() local
90 return (unsigned int)(pte | ISP_PTE_VALID_MASK(mmu)); in isp_pgaddr_to_pte_valid()
164 unsigned int isp_virt, unsigned int pte) in mmu_unmap_l2_pte_error() argument
174 pte); in mmu_unmap_l2_pte_error()
179 unsigned int isp_virt, unsigned int pte) in mmu_unmap_l1_pte_error() argument
186 pte); in mmu_unmap_l1_pte_error()
[all …]
Dsh_mmu_mrfld.c38 unsigned int pte) in sh_pte_to_phys() argument
42 return (phys_addr_t)((pte & ~mask) << ISP_PAGE_OFFSET); in sh_pte_to_phys()
48 unsigned int pte = sh_phys_to_pte(mmu, phys); in sh_get_pd_base() local
50 return HOST_ADDRESS(pte); in sh_get_pd_base()
/drivers/gpu/drm/i915/gt/
Dintel_ggtt.c244 gen8_pte_t pte = addr | GEN8_PAGE_PRESENT; in gen8_ggtt_pte_encode() local
247 pte |= GEN12_GGTT_PTE_LM; in gen8_ggtt_pte_encode()
249 return pte; in gen8_ggtt_pte_encode()
252 static void gen8_set_pte(void __iomem *addr, gen8_pte_t pte) in gen8_set_pte() argument
254 writeq(pte, addr); in gen8_set_pte()
264 gen8_pte_t __iomem *pte = in gen8_ggtt_insert_page() local
267 gen8_set_pte(pte, gen8_ggtt_pte_encode(addr, level, flags)); in gen8_ggtt_insert_page()
315 gen6_pte_t __iomem *pte = in gen6_ggtt_insert_page() local
318 iowrite32(vm->pte_encode(addr, level, flags), pte); in gen6_ggtt_insert_page()
1009 gen6_pte_t pte = GEN6_PTE_ADDR_ENCODE(addr) | GEN6_PTE_VALID; in snb_pte_encode() local
[all …]
Dgen8_ppgtt.c35 gen8_pte_t pte = addr | GEN8_PAGE_PRESENT | GEN8_PAGE_RW; in gen8_pte_encode() local
38 pte &= ~GEN8_PAGE_RW; in gen8_pte_encode()
41 pte |= GEN12_PPGTT_PTE_LM; in gen8_pte_encode()
45 pte |= PPAT_UNCACHED; in gen8_pte_encode()
48 pte |= PPAT_DISPLAY_ELLC; in gen8_pte_encode()
51 pte |= PPAT_CACHED; in gen8_pte_encode()
55 return pte; in gen8_pte_encode()
239 unsigned int pte = gen8_pd_index(start, 0); in __gen8_ppgtt_clear() local
253 GEM_BUG_ON(pte % 16); in __gen8_ppgtt_clear()
255 pte /= 16; in __gen8_ppgtt_clear()
[all …]
/drivers/gpu/drm/gma500/
Dgtt.c83 u32 pte; in psb_gtt_insert_pages() local
93 pte = psb_gtt_mask_pte(page_to_pfn(pages[i]), PSB_MMU_CACHED_MEMORY); in psb_gtt_insert_pages()
94 iowrite32(pte, gtt_slot); in psb_gtt_insert_pages()
108 u32 pte; in psb_gtt_remove_pages() local
114 pte = psb_gtt_mask_pte(page_to_pfn(pdev->scratch_page), PSB_MMU_CACHED_MEMORY); in psb_gtt_remove_pages()
120 iowrite32(pte, gtt_slot); in psb_gtt_remove_pages()
174 uint32_t pte; in psb_gtt_clear() local
177 pte = psb_gtt_mask_pte(pfn_base, PSB_MMU_CACHED_MEMORY); in psb_gtt_clear()
180 iowrite32(pte, pdev->gtt_map + i); in psb_gtt_clear()
/drivers/gpu/drm/i915/
Di915_mm.c51 static int remap_sg(pte_t *pte, unsigned long addr, void *data) in remap_sg() argument
59 set_pte_at(r->mm, addr, pte, in remap_sg()
73 static int remap_pfn(pte_t *pte, unsigned long addr, void *data) in remap_pfn() argument
78 set_pte_at(r->mm, addr, pte, pte_mkspecial(pfn_pte(r->pfn, r->prot))); in remap_pfn()
/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
Dvmm.c78 u32 pte[NVKM_VMM_LEVELS_MAX]; member
103 buf += sprintf(buf, "%05x:", it->pte[lvl]); in nvkm_vmm_trace()
146 u32 pdei = it->pte[it->lvl + 1]; in nvkm_vmm_unref_pdes()
211 pgt->pte[lpti] -= pten; in nvkm_vmm_unref_sptes()
221 if (pgt->pte[pteb] & NVKM_VMM_PTE_SPTES) { in nvkm_vmm_unref_sptes()
223 if (!(pgt->pte[ptei] & NVKM_VMM_PTE_SPTES)) in nvkm_vmm_unref_sptes()
235 pgt->pte[ptei] &= ~NVKM_VMM_PTE_VALID; in nvkm_vmm_unref_sptes()
237 if (pgt->pte[ptei] & NVKM_VMM_PTE_SPTES) in nvkm_vmm_unref_sptes()
239 pgt->pte[ptei] &= ~NVKM_VMM_PTE_VALID; in nvkm_vmm_unref_sptes()
242 if (pgt->pte[pteb] & NVKM_VMM_PTE_SPARSE) { in nvkm_vmm_unref_sptes()
[all …]
/drivers/staging/media/ipu3/
Dipu3-mmu.c29 #define IPU3_PTE2ADDR(pte) ((phys_addr_t)(pte) << IPU3_PAGE_SHIFT) argument
124 int pte; in imgu_mmu_alloc_page_table() local
130 for (pte = 0; pte < IPU3_PT_PTES; pte++) in imgu_mmu_alloc_page_table()
131 pt[pte] = pteval; in imgu_mmu_alloc_page_table()
/drivers/staging/media/atomisp/include/mmu/
Disp_mmu.h106 unsigned int pte);
123 #define ISP_PTE_VALID(mmu, pte) \ argument
124 ((pte) & ISP_PTE_VALID_MASK(mmu))
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_vm_sdma.c218 uint64_t *pte; in amdgpu_vm_sdma_update() local
277 pte = (uint64_t *)&(p->job->ibs->ptr[p->num_dw_left]); in amdgpu_vm_sdma_update()
279 pte[i] = amdgpu_vm_map_gart(p->pages_addr, addr); in amdgpu_vm_sdma_update()
280 pte[i] |= flags; in amdgpu_vm_sdma_update()
/drivers/xen/
Dxlate_mmu.c100 pte_t pte = pte_mkspecial(pfn_pte(page_to_pfn(page), info->prot)); in remap_pte_fn() local
138 set_pte_at(info->vma->vm_mm, addr, ptep, pte); in remap_pte_fn()
276 pte_t pte = pte_mkspecial(pfn_pte(page_to_pfn(page), r->prot)); in remap_pfn_fn() local
278 set_pte_at(r->mm, addr, ptep, pte); in remap_pfn_fn()

12