Home
last modified time | relevance | path

Searched refs:pte (Results 1 – 25 of 38) sorted by relevance

12

/drivers/iommu/
Dio-pgtable-arm-v7s.c96 #define ARM_V7S_PTE_IS_VALID(pte) (((pte) & 0x3) != 0) argument
97 #define ARM_V7S_PTE_IS_TABLE(pte, lvl) \ argument
98 ((lvl) == 1 && (((pte) & 0x3) == ARM_V7S_PTE_TYPE_TABLE))
181 static arm_v7s_iopte *iopte_deref(arm_v7s_iopte pte, int lvl) in iopte_deref() argument
183 if (ARM_V7S_PTE_IS_TABLE(pte, lvl)) in iopte_deref()
184 pte &= ARM_V7S_TABLE_MASK; in iopte_deref()
186 pte &= ARM_V7S_LVL_MASK(lvl); in iopte_deref()
187 return phys_to_virt(pte); in iopte_deref()
260 static void __arm_v7s_set_pte(arm_v7s_iopte *ptep, arm_v7s_iopte pte, in __arm_v7s_set_pte() argument
266 ptep[i] = pte; in __arm_v7s_set_pte()
[all …]
Dio-pgtable-arm.c173 #define iopte_deref(pte,d) \ argument
174 (__va((pte) & ((1ULL << ARM_LPAE_MAX_ADDR_BITS) - 1) \
177 #define iopte_type(pte,l) \ argument
178 (((pte) >> ARM_LPAE_PTE_TYPE_SHIFT) & ARM_LPAE_PTE_TYPE_MASK)
180 #define iopte_prot(pte) ((pte) & ARM_LPAE_PTE_ATTR_MASK) argument
182 #define iopte_leaf(pte,l) \ argument
184 (iopte_type(pte,l) == ARM_LPAE_PTE_TYPE_PAGE) : \
185 (iopte_type(pte,l) == ARM_LPAE_PTE_TYPE_BLOCK))
187 #define iopte_to_pfn(pte,d) \ argument
188 (((pte) & ((1ULL << ARM_LPAE_MAX_ADDR_BITS) - 1)) >> (d)->pg_shift)
[all …]
Dtegra-gart.c96 unsigned long offs, u32 pte) in gart_set_pte() argument
99 writel(pte, gart->regs + GART_ENTRY_DATA); in gart_set_pte()
102 pte ? "map" : "unmap", offs, pte & GART_PAGE_MASK); in gart_set_pte()
108 unsigned long pte; in gart_read_pte() local
111 pte = readl(gart->regs + GART_ENTRY_DATA); in gart_read_pte()
113 return pte; in gart_read_pte()
135 unsigned long pte; in gart_dump_table() local
137 pte = gart_read_pte(gart, iova); in gart_dump_table()
140 (GART_ENTRY_PHYS_ADDR_VALID & pte) ? "v" : " ", in gart_dump_table()
141 iova, pte & GART_PAGE_MASK); in gart_dump_table()
[all …]
Dintel-iommu.c331 static inline void dma_clear_pte(struct dma_pte *pte) in dma_clear_pte() argument
333 pte->val = 0; in dma_clear_pte()
336 static inline u64 dma_pte_addr(struct dma_pte *pte) in dma_pte_addr() argument
339 return pte->val & VTD_PAGE_MASK; in dma_pte_addr()
342 return __cmpxchg64(&pte->val, 0ULL, 0ULL) & VTD_PAGE_MASK; in dma_pte_addr()
346 static inline bool dma_pte_present(struct dma_pte *pte) in dma_pte_present() argument
348 return (pte->val & 3) != 0; in dma_pte_present()
351 static inline bool dma_pte_superpage(struct dma_pte *pte) in dma_pte_superpage() argument
353 return (pte->val & DMA_PTE_LARGE_PAGE); in dma_pte_superpage()
356 static inline int first_pte_in_page(struct dma_pte *pte) in first_pte_in_page() argument
[all …]
Drockchip-iommu.c220 static inline phys_addr_t rk_pte_page_address(u32 pte) in rk_pte_page_address() argument
222 return (phys_addr_t)pte & RK_PTE_PAGE_ADDRESS_MASK; in rk_pte_page_address()
225 static inline bool rk_pte_is_page_valid(u32 pte) in rk_pte_is_page_valid() argument
227 return pte & RK_PTE_PAGE_VALID; in rk_pte_is_page_valid()
240 static u32 rk_mk_pte_invalid(u32 pte) in rk_mk_pte_invalid() argument
242 return pte & ~RK_PTE_PAGE_VALID; in rk_mk_pte_invalid()
460 u32 pte = 0; in log_iova() local
480 pte = *pte_addr; in log_iova()
482 if (!rk_pte_is_page_valid(pte)) in log_iova()
485 page_addr_phys = rk_pte_page_address(pte) + page_offset; in log_iova()
[all …]
Damd_iommu_types.h277 #define PM_PTE_LEVEL(pte) (((pte) >> 9) & 0x7ULL) argument
316 #define PTE_PAGE_SIZE(pte) \ argument
317 (1ULL << (1 + ffz(((pte) | 0xfffULL))))
364 #define IOMMU_PTE_PRESENT(pte) ((pte) & IOMMU_PTE_PR) argument
365 #define IOMMU_PTE_PAGE(pte) (iommu_phys_to_virt((pte) & IOMMU_PAGE_MASK)) argument
366 #define IOMMU_PTE_MODE(pte) (((pte) >> 9) & 0x07) argument
Dtegra-smmu.c633 u32 *pte, dma_addr_t pte_dma, u32 val) in tegra_smmu_set_pte() argument
636 unsigned long offset = offset_in_page(pte); in tegra_smmu_set_pte()
638 *pte = val; in tegra_smmu_set_pte()
652 u32 *pte; in tegra_smmu_map() local
654 pte = as_get_pte(as, iova, &pte_dma); in tegra_smmu_map()
655 if (!pte) in tegra_smmu_map()
659 if (*pte == 0) in tegra_smmu_map()
662 tegra_smmu_set_pte(as, iova, pte, pte_dma, in tegra_smmu_map()
673 u32 *pte; in tegra_smmu_unmap() local
675 pte = tegra_smmu_pte_lookup(as, iova, &pte_dma); in tegra_smmu_unmap()
[all …]
Damd_iommu.c1348 u64 *pte; in increase_address_space() local
1356 pte = (void *)get_zeroed_page(gfp); in increase_address_space()
1357 if (!pte) in increase_address_space()
1360 *pte = PM_LEVEL_PDE(domain->mode, in increase_address_space()
1362 domain->pt_root = pte; in increase_address_space()
1379 u64 *pte, *page; in alloc_pte() local
1387 pte = &domain->pt_root[PM_LEVEL_INDEX(level, address)]; in alloc_pte()
1394 __pte = *pte; in alloc_pte()
1404 if (cmpxchg64(pte, __pte, __npte) != __pte) { in alloc_pte()
1411 if (PM_PTE_LEVEL(*pte) != level) in alloc_pte()
[all …]
/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
Dnv44.c39 dma_addr_t *list, u32 pte, u32 cnt) in nv44_vm_fill() argument
41 u32 base = (pte << 2) & ~0x0000000f; in nv44_vm_fill()
51 switch (pte++ & 0x3) { in nv44_vm_fill()
85 struct nvkm_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv44_vm_map_sg() argument
92 if (pte & 3) { in nv44_vm_map_sg()
93 u32 max = 4 - (pte & 3); in nv44_vm_map_sg()
95 nv44_vm_fill(pgt, mmu->null, list, pte, part); in nv44_vm_map_sg()
96 pte += part; in nv44_vm_map_sg()
104 nvkm_wo32(pgt, pte++ * 4, tmp[0] >> 0 | tmp[1] << 27); in nv44_vm_map_sg()
105 nvkm_wo32(pgt, pte++ * 4, tmp[1] >> 5 | tmp[2] << 22); in nv44_vm_map_sg()
[all …]
Dgf100.c104 struct nvkm_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta) in gf100_vm_map() argument
109 pte <<= 3; in gf100_vm_map()
121 nvkm_wo32(pgt, pte + 0, lower_32_bits(phys)); in gf100_vm_map()
122 nvkm_wo32(pgt, pte + 4, upper_32_bits(phys)); in gf100_vm_map()
124 pte += 8; in gf100_vm_map()
131 struct nvkm_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in gf100_vm_map_sg() argument
138 pte <<= 3; in gf100_vm_map_sg()
141 nvkm_wo32(pgt, pte + 0, lower_32_bits(phys)); in gf100_vm_map_sg()
142 nvkm_wo32(pgt, pte + 4, upper_32_bits(phys)); in gf100_vm_map_sg()
143 pte += 8; in gf100_vm_map_sg()
[all …]
Dnv50.c78 struct nvkm_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta) in nv50_vm_map() argument
93 pte <<= 3; in nv50_vm_map()
103 if (cnt >= block && !(pte & (block - 1))) in nv50_vm_map()
117 nvkm_wo32(pgt, pte + 0, offset_l); in nv50_vm_map()
118 nvkm_wo32(pgt, pte + 4, offset_h); in nv50_vm_map()
119 pte += 8; in nv50_vm_map()
128 struct nvkm_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv50_vm_map_sg() argument
131 pte <<= 3; in nv50_vm_map_sg()
135 nvkm_wo32(pgt, pte + 0, lower_32_bits(phys)); in nv50_vm_map_sg()
136 nvkm_wo32(pgt, pte + 4, upper_32_bits(phys)); in nv50_vm_map_sg()
[all …]
Dnv41.c39 struct nvkm_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv41_vm_map_sg() argument
41 pte = pte * 4; in nv41_vm_map_sg()
47 nvkm_wo32(pgt, pte, (phys >> 7) | 1); in nv41_vm_map_sg()
49 pte += 4; in nv41_vm_map_sg()
57 nv41_vm_unmap(struct nvkm_vma *vma, struct nvkm_memory *pgt, u32 pte, u32 cnt) in nv41_vm_unmap() argument
59 pte = pte * 4; in nv41_vm_unmap()
62 nvkm_wo32(pgt, pte, 0x00000000); in nv41_vm_unmap()
63 pte += 4; in nv41_vm_unmap()
Dnv04.c37 struct nvkm_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv04_vm_map_sg() argument
39 pte = 0x00008 + (pte * 4); in nv04_vm_map_sg()
45 nvkm_wo32(pgt, pte, phys | 3); in nv04_vm_map_sg()
47 pte += 4; in nv04_vm_map_sg()
55 nv04_vm_unmap(struct nvkm_vma *vma, struct nvkm_memory *pgt, u32 pte, u32 cnt) in nv04_vm_unmap() argument
57 pte = 0x00008 + (pte * 4); in nv04_vm_unmap()
60 nvkm_wo32(pgt, pte, 0x00000000); in nv04_vm_unmap()
61 pte += 4; in nv04_vm_unmap()
Dbase.c39 u32 pte = (offset & ((1 << mmu->func->pgt_bits) - 1)) >> bits; in nvkm_vm_map_at() local
51 end = (pte + num); in nvkm_vm_map_at()
54 len = end - pte; in nvkm_vm_map_at()
56 mmu->func->map(vma, pgt, node, pte, len, phys, delta); in nvkm_vm_map_at()
59 pte += len; in nvkm_vm_map_at()
63 pte = 0; in nvkm_vm_map_at()
85 u32 pte = (offset & ((1 << mmu->func->pgt_bits) - 1)) >> bits; in nvkm_vm_map_sg_table() local
96 end = pte + sglen; in nvkm_vm_map_sg_table()
99 len = end - pte; in nvkm_vm_map_sg_table()
104 mmu->func->map_sg(vma, pgt, mem, pte, 1, &addr); in nvkm_vm_map_sg_table()
[all …]
Dpriv.h29 struct nvkm_mem *, u32 pte, u32 cnt,
32 struct nvkm_mem *, u32 pte, u32 cnt, dma_addr_t *);
34 u32 pte, u32 cnt);
/drivers/staging/media/atomisp/pci/atomisp2/mmu/
Disp_mmu.c70 unsigned int idx, unsigned int pte) in atomisp_set_pte() argument
73 *(pt_virt + idx) = pte; in atomisp_set_pte()
82 unsigned int pte) in isp_pte_to_pgaddr() argument
84 return mmu->driver->pte_to_phys(mmu, pte); in isp_pte_to_pgaddr()
90 unsigned int pte = mmu->driver->phys_to_pte(mmu, phys); in isp_pgaddr_to_pte_valid() local
91 return (unsigned int) (pte | ISP_PTE_VALID_MASK(mmu)); in isp_pgaddr_to_pte_valid()
173 unsigned int isp_virt, unsigned int pte) in mmu_unmap_l2_pte_error() argument
185 pte); in mmu_unmap_l2_pte_error()
190 unsigned int isp_virt, unsigned int pte) in mmu_unmap_l1_pte_error() argument
198 pte); in mmu_unmap_l1_pte_error()
[all …]
Dsh_mmu_mrfld.c41 unsigned int pte) in sh_pte_to_phys() argument
44 return (phys_addr_t)((pte & ~mask) << ISP_PAGE_OFFSET); in sh_pte_to_phys()
55 unsigned int pte = sh_phys_to_pte(mmu, phys); in sh_set_pd_base() local
57 atomisp_css_mmu_set_page_table_base_index(HOST_ADDRESS(pte)); in sh_set_pd_base()
64 unsigned int pte = sh_phys_to_pte(mmu, phys); in sh_get_pd_base() local
65 return HOST_ADDRESS(pte); in sh_get_pd_base()
/drivers/gpu/drm/gma500/
Dgtt.c90 u32 pte; in psb_gtt_insert() local
111 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]), in psb_gtt_insert()
113 iowrite32(pte, gtt_slot++); in psb_gtt_insert()
116 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]), in psb_gtt_insert()
118 iowrite32(pte, gtt_slot++); in psb_gtt_insert()
139 u32 pte; in psb_gtt_remove() local
145 pte = psb_gtt_mask_pte(page_to_pfn(dev_priv->scratch_page), in psb_gtt_remove()
149 iowrite32(pte, gtt_slot++); in psb_gtt_remove()
167 u32 pte; in psb_gtt_roll() local
185 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]), in psb_gtt_roll()
[all …]
Dmmu.c407 uint32_t pte) in psb_mmu_set_pte() argument
409 pt->v[psb_mmu_pt_index(addr)] = pte; in psb_mmu_set_pte()
665 uint32_t pte; in psb_mmu_insert_pfn_sequence() local
685 pte = psb_mmu_mask_pte(start_pfn++, type); in psb_mmu_insert_pfn_sequence()
686 psb_mmu_set_pte(pt, addr, pte); in psb_mmu_insert_pfn_sequence()
714 uint32_t pte; in psb_mmu_insert_pages() local
747 pte = psb_mmu_mask_pte(page_to_pfn(*pages++), in psb_mmu_insert_pages()
749 psb_mmu_set_pte(pt, addr, pte); in psb_mmu_insert_pages()
/drivers/gpu/drm/i915/
Di915_gem_gtt.c230 gen8_pte_t pte = addr | _PAGE_PRESENT | _PAGE_RW; in gen8_pte_encode() local
233 pte &= ~_PAGE_RW; in gen8_pte_encode()
237 pte |= PPAT_UNCACHED_INDEX; in gen8_pte_encode()
240 pte |= PPAT_DISPLAY_ELLC_INDEX; in gen8_pte_encode()
243 pte |= PPAT_CACHED_INDEX; in gen8_pte_encode()
247 return pte; in gen8_pte_encode()
269 gen6_pte_t pte = GEN6_PTE_VALID; in snb_pte_encode() local
270 pte |= GEN6_PTE_ADDR_ENCODE(addr); in snb_pte_encode()
275 pte |= GEN6_PTE_CACHE_LLC; in snb_pte_encode()
278 pte |= GEN6_PTE_UNCACHED; in snb_pte_encode()
[all …]
Di915_mm.c38 static int remap_pfn(pte_t *pte, pgtable_t token, in remap_pfn() argument
44 set_pte_at(r->mm, addr, pte, pte_mkspecial(pfn_pte(r->pfn, r->prot))); in remap_pfn()
/drivers/usb/host/
Dohci-tilegx.c99 pte_t pte = { 0 }; in ohci_hcd_tilegx_drv_probe() local
150 pte = pte_set_home(pte, PAGE_HOME_HASH); in ohci_hcd_tilegx_drv_probe()
151 ret = gxio_usb_host_register_client_memory(&pdata->usb_ctx, pte, 0); in ohci_hcd_tilegx_drv_probe()
Dehci-tilegx.c105 pte_t pte = { 0 }; in ehci_hcd_tilegx_drv_probe() local
163 pte = pte_set_home(pte, PAGE_HOME_HASH); in ehci_hcd_tilegx_drv_probe()
164 ret = gxio_usb_host_register_client_memory(&pdata->usb_ctx, pte, 0); in ehci_hcd_tilegx_drv_probe()
/drivers/staging/media/atomisp/pci/atomisp2/include/mmu/
Disp_mmu.h111 unsigned int pte);
129 #define ISP_PTE_VALID(mmu, pte) \ argument
130 ((pte) & ISP_PTE_VALID_MASK(mmu))
/drivers/net/ethernet/tile/
Dtilepro.c464 HV_PTE pte = *virt_to_pte(current->mm, (unsigned long)va); in tile_net_provide_needed_buffer() local
465 if (hv_pte_get_mode(pte) != HV_PTE_MODE_CACHE_HASH_L3) in tile_net_provide_needed_buffer()
467 va, hv_pte_get_mode(pte), hv_pte_val(pte)); in tile_net_provide_needed_buffer()
938 .pte = hv_pte(0), in tile_net_open_aux()
941 ea.pte = hv_pte_set_lotar(ea.pte, epp_lotar); in tile_net_open_aux()
942 ea.pte = hv_pte_set_mode(ea.pte, HV_PTE_MODE_CACHE_TILE_L3); in tile_net_open_aux()
1892 HV_PTE pte = *virt_to_pte(current->mm, (unsigned long)data); in tile_net_tx() local
1893 if (hv_pte_get_mode(pte) != HV_PTE_MODE_CACHE_HASH_L3) in tile_net_tx()
1895 data, hv_pte_get_mode(pte), hv_pte_val(pte)); in tile_net_tx()

12