/drivers/iommu/ |
D | io-pgtable-arm-v7s.c | 83 #define ARM_V7S_PTE_IS_VALID(pte) (((pte) & 0x3) != 0) argument 84 #define ARM_V7S_PTE_IS_TABLE(pte, lvl) \ argument 85 ((lvl) == 1 && (((pte) & 0x3) == ARM_V7S_PTE_TYPE_TABLE)) 172 static bool arm_v7s_pte_is_cont(arm_v7s_iopte pte, int lvl); 185 static arm_v7s_iopte to_mtk_iopte(phys_addr_t paddr, arm_v7s_iopte pte) in to_mtk_iopte() argument 188 pte |= ARM_V7S_ATTR_MTK_PA_BIT32; in to_mtk_iopte() 190 pte |= ARM_V7S_ATTR_MTK_PA_BIT33; in to_mtk_iopte() 192 pte |= ARM_V7S_ATTR_MTK_PA_BIT34; in to_mtk_iopte() 193 return pte; in to_mtk_iopte() 199 arm_v7s_iopte pte = paddr & ARM_V7S_LVL_MASK(lvl); in paddr_to_iopte() local [all …]
|
D | io-pgtable-arm.c | 141 #define iopte_deref(pte,d) __va(iopte_to_paddr(pte, d)) argument 143 #define iopte_type(pte) \ argument 144 (((pte) >> ARM_LPAE_PTE_TYPE_SHIFT) & ARM_LPAE_PTE_TYPE_MASK) 146 #define iopte_prot(pte) ((pte) & ARM_LPAE_PTE_ATTR_MASK) argument 160 static inline bool iopte_leaf(arm_lpae_iopte pte, int lvl, in iopte_leaf() argument 164 return iopte_type(pte) == ARM_LPAE_PTE_TYPE_PAGE; in iopte_leaf() 166 return iopte_type(pte) == ARM_LPAE_PTE_TYPE_BLOCK; in iopte_leaf() 172 arm_lpae_iopte pte = paddr; in paddr_to_iopte() local 175 return (pte | (pte >> (48 - 12))) & ARM_LPAE_PTE_ADDR_MASK; in paddr_to_iopte() 178 static phys_addr_t iopte_to_paddr(arm_lpae_iopte pte, in iopte_to_paddr() argument [all …]
|
D | tegra-gart.c | 66 unsigned long iova, unsigned long pte) in gart_set_pte() argument 69 writel_relaxed(pte, gart->regs + GART_ENTRY_DATA); in gart_set_pte() 75 unsigned long pte; in gart_read_pte() local 78 pte = readl_relaxed(gart->regs + GART_ENTRY_DATA); in gart_read_pte() 80 return pte; in gart_read_pte() 229 unsigned long pte; in gart_iommu_iova_to_phys() local 235 pte = gart_read_pte(gart, iova); in gart_iommu_iova_to_phys() 238 return pte & GART_PAGE_MASK; in gart_iommu_iova_to_phys()
|
D | rockchip-iommu.c | 260 static inline bool rk_pte_is_page_valid(u32 pte) in rk_pte_is_page_valid() argument 262 return pte & RK_PTE_PAGE_VALID; in rk_pte_is_page_valid() 296 static u32 rk_mk_pte_invalid(u32 pte) in rk_mk_pte_invalid() argument 298 return pte & ~RK_PTE_PAGE_VALID; in rk_mk_pte_invalid() 541 u32 pte = 0; in log_iova() local 561 pte = *pte_addr; in log_iova() 563 if (!rk_pte_is_page_valid(pte)) in log_iova() 566 page_addr_phys = rk_ops->pt_address(pte) + page_offset; in log_iova() 567 page_flags = pte & RK_PTE_PAGE_FLAGS_MASK; in log_iova() 574 rk_dte_is_pt_valid(dte), &pte_addr_phys, pte, in log_iova() [all …]
|
D | tegra-smmu.c | 649 u32 *pte, dma_addr_t pte_dma, u32 val) in tegra_smmu_set_pte() argument 652 unsigned long offset = SMMU_OFFSET_IN_PAGE(pte); in tegra_smmu_set_pte() 654 *pte = val; in tegra_smmu_set_pte() 711 u32 *pte; in __tegra_smmu_map() local 717 pte = as_get_pte(as, iova, &pte_dma, page); in __tegra_smmu_map() 718 if (!pte) in __tegra_smmu_map() 722 if (*pte == 0) in __tegra_smmu_map() 733 tegra_smmu_set_pte(as, iova, pte, pte_dma, in __tegra_smmu_map() 745 u32 *pte; in __tegra_smmu_unmap() local 747 pte = tegra_smmu_pte_lookup(as, iova, &pte_dma); in __tegra_smmu_unmap() [all …]
|
D | sun50i-iommu.c | 254 static phys_addr_t sun50i_pte_get_page_address(u32 pte) in sun50i_pte_get_page_address() argument 256 return (phys_addr_t)pte & SUN50I_PTE_PAGE_ADDRESS_MASK; in sun50i_pte_get_page_address() 259 static enum sun50i_iommu_aci sun50i_get_pte_aci(u32 pte) in sun50i_get_pte_aci() argument 261 return FIELD_GET(SUN50I_PTE_ACI_MASK, pte); in sun50i_get_pte_aci() 264 static bool sun50i_pte_is_page_valid(u32 pte) in sun50i_pte_is_page_valid() argument 266 return pte & SUN50I_PTE_PAGE_VALID; in sun50i_pte_is_page_valid() 584 u32 dte, pte; in sun50i_iommu_iova_to_phys() local 592 pte = page_table[sun50i_iova_get_pte_index(iova)]; in sun50i_iommu_iova_to_phys() 593 if (!sun50i_pte_is_page_valid(pte)) in sun50i_iommu_iova_to_phys() 596 return sun50i_pte_get_page_address(pte) + in sun50i_iommu_iova_to_phys()
|
/drivers/iommu/intel/ |
D | pasid.c | 530 struct pasid_entry *pte; in intel_pasid_tear_down_entry() local 533 pte = intel_pasid_get_entry(dev, pasid); in intel_pasid_tear_down_entry() 534 if (WARN_ON(!pte)) in intel_pasid_tear_down_entry() 537 if (!pasid_pte_is_present(pte)) in intel_pasid_tear_down_entry() 540 did = pasid_get_domain_id(pte); in intel_pasid_tear_down_entry() 541 pgtt = pasid_pte_get_pgtt(pte); in intel_pasid_tear_down_entry() 546 clflush_cache_range(pte, sizeof(*pte)); in intel_pasid_tear_down_entry() 565 struct pasid_entry *pte, in pasid_flush_caches() argument 569 clflush_cache_range(pte, sizeof(*pte)); in pasid_flush_caches() 579 static inline int pasid_enable_wpe(struct pasid_entry *pte) in pasid_enable_wpe() argument [all …]
|
D | iommu.c | 1019 struct dma_pte *parent, *pte; in pfn_to_dma_pte() local 1035 pte = &parent[offset]; in pfn_to_dma_pte() 1036 if (!*target_level && (dma_pte_superpage(pte) || !dma_pte_present(pte))) in pfn_to_dma_pte() 1041 if (!dma_pte_present(pte)) { in pfn_to_dma_pte() 1054 if (cmpxchg64(&pte->val, 0ULL, pteval)) in pfn_to_dma_pte() 1058 domain_flush_cache(domain, pte, sizeof(*pte)); in pfn_to_dma_pte() 1063 parent = phys_to_virt(dma_pte_addr(pte)); in pfn_to_dma_pte() 1070 return pte; in pfn_to_dma_pte() 1078 struct dma_pte *parent, *pte; in dma_pfn_level_pte() local 1085 pte = &parent[offset]; in dma_pfn_level_pte() [all …]
|
D | pasid.h | 97 static inline bool pasid_pte_is_present(struct pasid_entry *pte) in pasid_pte_is_present() argument 99 return READ_ONCE(pte->val[0]) & PASID_PTE_PRESENT; in pasid_pte_is_present() 103 static inline u16 pasid_pte_get_pgtt(struct pasid_entry *pte) in pasid_pte_get_pgtt() argument 105 return (u16)((READ_ONCE(pte->val[0]) >> 6) & 0x7); in pasid_pte_get_pgtt()
|
/drivers/iommu/amd/ |
D | io_pgtable.c | 50 static u64 *first_pte_l7(u64 *pte, unsigned long *page_size, in first_pte_l7() argument 56 pg_size = PTE_PAGE_SIZE(*pte); in first_pte_l7() 59 fpte = (u64 *)(((unsigned long)pte) & pte_mask); in first_pte_l7() 183 u64 *pte; in increase_address_space() local 185 pte = (void *)get_zeroed_page(gfp); in increase_address_space() 186 if (!pte) in increase_address_space() 198 *pte = PM_LEVEL_PDE(domain->iop.mode, iommu_virt_to_phys(domain->iop.root)); in increase_address_space() 200 domain->iop.root = pte; in increase_address_space() 209 amd_iommu_domain_set_pgtable(domain, pte, domain->iop.mode); in increase_address_space() 211 pte = NULL; in increase_address_space() [all …]
|
D | amd_iommu_types.h | 303 #define PM_PTE_LEVEL(pte) (((pte) >> 9) & 0x7ULL) argument 342 #define PTE_PAGE_SIZE(pte) \ argument 343 (1ULL << (1 + ffz(((pte) | 0xfffULL)))) 390 #define IOMMU_PTE_PRESENT(pte) ((pte) & IOMMU_PTE_PR) argument 391 #define IOMMU_PTE_PAGE(pte) (iommu_phys_to_virt((pte) & IOMMU_PAGE_MASK)) argument 392 #define IOMMU_PTE_MODE(pte) (((pte) >> 9) & 0x07) argument
|
/drivers/staging/media/atomisp/pci/mmu/ |
D | isp_mmu.c | 68 unsigned int idx, unsigned int pte) in atomisp_set_pte() argument 71 *(pt_virt + idx) = pte; in atomisp_set_pte() 80 unsigned int pte) in isp_pte_to_pgaddr() argument 82 return mmu->driver->pte_to_phys(mmu, pte); in isp_pte_to_pgaddr() 88 unsigned int pte = mmu->driver->phys_to_pte(mmu, phys); in isp_pgaddr_to_pte_valid() local 90 return (unsigned int)(pte | ISP_PTE_VALID_MASK(mmu)); in isp_pgaddr_to_pte_valid() 164 unsigned int isp_virt, unsigned int pte) in mmu_unmap_l2_pte_error() argument 174 pte); in mmu_unmap_l2_pte_error() 179 unsigned int isp_virt, unsigned int pte) in mmu_unmap_l1_pte_error() argument 186 pte); in mmu_unmap_l1_pte_error() [all …]
|
D | sh_mmu_mrfld.c | 38 unsigned int pte) in sh_pte_to_phys() argument 42 return (phys_addr_t)((pte & ~mask) << ISP_PAGE_OFFSET); in sh_pte_to_phys() 48 unsigned int pte = sh_phys_to_pte(mmu, phys); in sh_get_pd_base() local 50 return HOST_ADDRESS(pte); in sh_get_pd_base()
|
/drivers/gpu/drm/gma500/ |
D | gtt.c | 78 u32 pte; in psb_gtt_insert() local 99 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]), in psb_gtt_insert() 101 iowrite32(pte, gtt_slot++); in psb_gtt_insert() 123 u32 pte; in psb_gtt_remove() local 129 pte = psb_gtt_mask_pte(page_to_pfn(dev_priv->scratch_page), in psb_gtt_remove() 133 iowrite32(pte, gtt_slot++); in psb_gtt_remove() 359 uint32_t pte; in psb_gtt_init() local 476 pte = psb_gtt_mask_pte(pfn_base + i, PSB_MMU_CACHED_MEMORY); in psb_gtt_init() 477 iowrite32(pte, dev_priv->gtt_map + i); in psb_gtt_init() 485 pte = psb_gtt_mask_pte(pfn_base, PSB_MMU_CACHED_MEMORY); in psb_gtt_init() [all …]
|
D | mmu.c | 387 uint32_t pte) in psb_mmu_set_pte() argument 389 pt->v[psb_mmu_pt_index(addr)] = pte; in psb_mmu_set_pte() 625 uint32_t pte; in psb_mmu_insert_pfn_sequence() local 645 pte = psb_mmu_mask_pte(start_pfn++, type); in psb_mmu_insert_pfn_sequence() 646 psb_mmu_set_pte(pt, addr, pte); in psb_mmu_insert_pfn_sequence() 674 uint32_t pte; in psb_mmu_insert_pages() local 707 pte = psb_mmu_mask_pte(page_to_pfn(*pages++), in psb_mmu_insert_pages() 709 psb_mmu_set_pte(pt, addr, pte); in psb_mmu_insert_pages()
|
/drivers/gpu/drm/i915/gt/ |
D | intel_ggtt.c | 195 gen8_pte_t pte = addr | _PAGE_PRESENT; in gen8_ggtt_pte_encode() local 198 pte |= GEN12_GGTT_PTE_LM; in gen8_ggtt_pte_encode() 200 return pte; in gen8_ggtt_pte_encode() 203 static void gen8_set_pte(void __iomem *addr, gen8_pte_t pte) in gen8_set_pte() argument 205 writeq(pte, addr); in gen8_set_pte() 215 gen8_pte_t __iomem *pte = in gen8_ggtt_insert_page() local 218 gen8_set_pte(pte, gen8_ggtt_pte_encode(addr, level, flags)); in gen8_ggtt_insert_page() 266 gen6_pte_t __iomem *pte = in gen6_ggtt_insert_page() local 269 iowrite32(vm->pte_encode(addr, level, flags), pte); in gen6_ggtt_insert_page() 952 gen6_pte_t pte = GEN6_PTE_ADDR_ENCODE(addr) | GEN6_PTE_VALID; in snb_pte_encode() local [all …]
|
/drivers/gpu/drm/i915/ |
D | i915_mm.c | 40 static int remap_pfn(pte_t *pte, unsigned long addr, void *data) in remap_pfn() argument 45 set_pte_at(r->mm, addr, pte, pte_mkspecial(pfn_pte(r->pfn, r->prot))); in remap_pfn() 61 static int remap_sg(pte_t *pte, unsigned long addr, void *data) in remap_sg() argument 69 set_pte_at(r->mm, addr, pte, in remap_sg()
|
/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
D | vmm.c | 78 u32 pte[NVKM_VMM_LEVELS_MAX]; member 103 buf += sprintf(buf, "%05x:", it->pte[lvl]); in nvkm_vmm_trace() 146 u32 pdei = it->pte[it->lvl + 1]; in nvkm_vmm_unref_pdes() 211 pgt->pte[lpti] -= pten; in nvkm_vmm_unref_sptes() 221 if (pgt->pte[pteb] & NVKM_VMM_PTE_SPTES) { in nvkm_vmm_unref_sptes() 223 if (!(pgt->pte[ptei] & NVKM_VMM_PTE_SPTES)) in nvkm_vmm_unref_sptes() 235 pgt->pte[ptei] &= ~NVKM_VMM_PTE_VALID; in nvkm_vmm_unref_sptes() 237 if (pgt->pte[ptei] & NVKM_VMM_PTE_SPTES) in nvkm_vmm_unref_sptes() 239 pgt->pte[ptei] &= ~NVKM_VMM_PTE_VALID; in nvkm_vmm_unref_sptes() 242 if (pgt->pte[pteb] & NVKM_VMM_PTE_SPARSE) { in nvkm_vmm_unref_sptes() [all …]
|
/drivers/staging/media/ipu3/ |
D | ipu3-mmu.c | 29 #define IPU3_PTE2ADDR(pte) ((phys_addr_t)(pte) << IPU3_PAGE_SHIFT) argument 124 int pte; in imgu_mmu_alloc_page_table() local 130 for (pte = 0; pte < IPU3_PT_PTES; pte++) in imgu_mmu_alloc_page_table() 131 pt[pte] = pteval; in imgu_mmu_alloc_page_table()
|
/drivers/staging/media/atomisp/include/mmu/ |
D | isp_mmu.h | 106 unsigned int pte); 123 #define ISP_PTE_VALID(mmu, pte) \ argument 124 ((pte) & ISP_PTE_VALID_MASK(mmu))
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_vm_sdma.c | 208 uint64_t *pte; in amdgpu_vm_sdma_update() local 261 pte = (uint64_t *)&(p->job->ibs->ptr[p->num_dw_left]); in amdgpu_vm_sdma_update() 263 pte[i] = amdgpu_vm_map_gart(p->pages_addr, addr); in amdgpu_vm_sdma_update() 264 pte[i] |= flags; in amdgpu_vm_sdma_update()
|
/drivers/xen/ |
D | xlate_mmu.c | 100 pte_t pte = pte_mkspecial(pfn_pte(page_to_pfn(page), info->prot)); in remap_pte_fn() local 138 set_pte_at(info->vma->vm_mm, addr, ptep, pte); in remap_pte_fn() 276 pte_t pte = pte_mkspecial(pfn_pte(page_to_pfn(page), r->prot)); in remap_pfn_fn() local 278 set_pte_at(r->mm, addr, ptep, pte); in remap_pfn_fn()
|
/drivers/md/ |
D | dm-switch.c | 179 region_table_slot_t pte; in switch_region_table_write() local 183 pte = sctx->region_table[region_index]; in switch_region_table_write() 184 pte &= ~((((region_table_slot_t)1 << sctx->region_table_entry_bits) - 1) << bit); in switch_region_table_write() 185 pte |= (region_table_slot_t)value << bit; in switch_region_table_write() 186 sctx->region_table[region_index] = pte; in switch_region_table_write()
|
/drivers/misc/sgi-gru/ |
D | grufault.c | 212 pte_t pte; in atomic_pte_lookup() local 231 pte = *(pte_t *) pmdp; in atomic_pte_lookup() 234 pte = *pte_offset_kernel(pmdp, vaddr); in atomic_pte_lookup() 236 if (unlikely(!pte_present(pte) || in atomic_pte_lookup() 237 (write && (!pte_write(pte) || !pte_dirty(pte))))) in atomic_pte_lookup() 240 *paddr = pte_pfn(pte) << PAGE_SHIFT; in atomic_pte_lookup()
|
/drivers/gpu/drm/v3d/ |
D | v3d_mmu.c | 98 u32 pte = page_prot | page_address; in v3d_mmu_insert_ptes() local 104 v3d->pt[page++] = pte + i; in v3d_mmu_insert_ptes()
|