Home
last modified time | relevance | path

Searched refs:pte (Results 1 – 25 of 33) sorted by relevance

12

/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
Dnv44.c39 dma_addr_t *list, u32 pte, u32 cnt) in nv44_vm_fill() argument
41 u32 base = (pte << 2) & ~0x0000000f; in nv44_vm_fill()
51 switch (pte++ & 0x3) { in nv44_vm_fill()
85 struct nvkm_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv44_vm_map_sg() argument
92 if (pte & 3) { in nv44_vm_map_sg()
93 u32 max = 4 - (pte & 3); in nv44_vm_map_sg()
95 nv44_vm_fill(pgt, mmu->null, list, pte, part); in nv44_vm_map_sg()
96 pte += part; in nv44_vm_map_sg()
104 nvkm_wo32(pgt, pte++ * 4, tmp[0] >> 0 | tmp[1] << 27); in nv44_vm_map_sg()
105 nvkm_wo32(pgt, pte++ * 4, tmp[1] >> 5 | tmp[2] << 22); in nv44_vm_map_sg()
[all …]
Dgf100.c104 struct nvkm_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta) in gf100_vm_map() argument
109 pte <<= 3; in gf100_vm_map()
121 nvkm_wo32(pgt, pte + 0, lower_32_bits(phys)); in gf100_vm_map()
122 nvkm_wo32(pgt, pte + 4, upper_32_bits(phys)); in gf100_vm_map()
124 pte += 8; in gf100_vm_map()
131 struct nvkm_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in gf100_vm_map_sg() argument
138 pte <<= 3; in gf100_vm_map_sg()
141 nvkm_wo32(pgt, pte + 0, lower_32_bits(phys)); in gf100_vm_map_sg()
142 nvkm_wo32(pgt, pte + 4, upper_32_bits(phys)); in gf100_vm_map_sg()
143 pte += 8; in gf100_vm_map_sg()
[all …]
Dnv50.c78 struct nvkm_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta) in nv50_vm_map() argument
93 pte <<= 3; in nv50_vm_map()
103 if (cnt >= block && !(pte & (block - 1))) in nv50_vm_map()
117 nvkm_wo32(pgt, pte + 0, offset_l); in nv50_vm_map()
118 nvkm_wo32(pgt, pte + 4, offset_h); in nv50_vm_map()
119 pte += 8; in nv50_vm_map()
128 struct nvkm_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv50_vm_map_sg() argument
131 pte <<= 3; in nv50_vm_map_sg()
135 nvkm_wo32(pgt, pte + 0, lower_32_bits(phys)); in nv50_vm_map_sg()
136 nvkm_wo32(pgt, pte + 4, upper_32_bits(phys)); in nv50_vm_map_sg()
[all …]
Dnv41.c39 struct nvkm_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv41_vm_map_sg() argument
41 pte = pte * 4; in nv41_vm_map_sg()
47 nvkm_wo32(pgt, pte, (phys >> 7) | 1); in nv41_vm_map_sg()
49 pte += 4; in nv41_vm_map_sg()
57 nv41_vm_unmap(struct nvkm_vma *vma, struct nvkm_memory *pgt, u32 pte, u32 cnt) in nv41_vm_unmap() argument
59 pte = pte * 4; in nv41_vm_unmap()
62 nvkm_wo32(pgt, pte, 0x00000000); in nv41_vm_unmap()
63 pte += 4; in nv41_vm_unmap()
Dnv04.c37 struct nvkm_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv04_vm_map_sg() argument
39 pte = 0x00008 + (pte * 4); in nv04_vm_map_sg()
45 nvkm_wo32(pgt, pte, phys | 3); in nv04_vm_map_sg()
47 pte += 4; in nv04_vm_map_sg()
55 nv04_vm_unmap(struct nvkm_vma *vma, struct nvkm_memory *pgt, u32 pte, u32 cnt) in nv04_vm_unmap() argument
57 pte = 0x00008 + (pte * 4); in nv04_vm_unmap()
60 nvkm_wo32(pgt, pte, 0x00000000); in nv04_vm_unmap()
61 pte += 4; in nv04_vm_unmap()
Dbase.c39 u32 pte = (offset & ((1 << mmu->func->pgt_bits) - 1)) >> bits; in nvkm_vm_map_at() local
51 end = (pte + num); in nvkm_vm_map_at()
54 len = end - pte; in nvkm_vm_map_at()
56 mmu->func->map(vma, pgt, node, pte, len, phys, delta); in nvkm_vm_map_at()
59 pte += len; in nvkm_vm_map_at()
63 pte = 0; in nvkm_vm_map_at()
84 u32 pte = (offset & ((1 << mmu->func->pgt_bits) - 1)) >> bits; in nvkm_vm_map_sg_table() local
95 end = pte + sglen; in nvkm_vm_map_sg_table()
98 len = end - pte; in nvkm_vm_map_sg_table()
103 mmu->func->map_sg(vma, pgt, mem, pte, 1, &addr); in nvkm_vm_map_sg_table()
[all …]
Dpriv.h28 struct nvkm_mem *, u32 pte, u32 cnt,
31 struct nvkm_mem *, u32 pte, u32 cnt, dma_addr_t *);
33 u32 pte, u32 cnt);
/drivers/iommu/
Dio-pgtable-arm.c170 #define iopte_deref(pte,d) \ argument
171 (__va((pte) & ((1ULL << ARM_LPAE_MAX_ADDR_BITS) - 1) \
174 #define iopte_type(pte,l) \ argument
175 (((pte) >> ARM_LPAE_PTE_TYPE_SHIFT) & ARM_LPAE_PTE_TYPE_MASK)
177 #define iopte_prot(pte) ((pte) & ARM_LPAE_PTE_ATTR_MASK) argument
179 #define iopte_leaf(pte,l) \ argument
181 (iopte_type(pte,l) == ARM_LPAE_PTE_TYPE_PAGE) : \
182 (iopte_type(pte,l) == ARM_LPAE_PTE_TYPE_BLOCK))
184 #define iopte_to_pfn(pte,d) \ argument
185 (((pte) & ((1ULL << ARM_LPAE_MAX_ADDR_BITS) - 1)) >> (d)->pg_shift)
[all …]
Damd_iommu.c1198 u64 *pte; in increase_address_space() local
1204 pte = (void *)get_zeroed_page(gfp); in increase_address_space()
1205 if (!pte) in increase_address_space()
1208 *pte = PM_LEVEL_PDE(domain->mode, in increase_address_space()
1210 domain->pt_root = pte; in increase_address_space()
1224 u64 *pte, *page; in alloc_pte() local
1232 pte = &domain->pt_root[PM_LEVEL_INDEX(level, address)]; in alloc_pte()
1237 if (!IOMMU_PTE_PRESENT(*pte)) { in alloc_pte()
1241 *pte = PM_LEVEL_PDE(level, virt_to_phys(page)); in alloc_pte()
1245 if (PM_PTE_LEVEL(*pte) != level) in alloc_pte()
[all …]
Drockchip-iommu.c217 static inline phys_addr_t rk_pte_page_address(u32 pte) in rk_pte_page_address() argument
219 return (phys_addr_t)pte & RK_PTE_PAGE_ADDRESS_MASK; in rk_pte_page_address()
222 static inline bool rk_pte_is_page_valid(u32 pte) in rk_pte_is_page_valid() argument
224 return pte & RK_PTE_PAGE_VALID; in rk_pte_is_page_valid()
237 static u32 rk_mk_pte_invalid(u32 pte) in rk_mk_pte_invalid() argument
239 return pte & ~RK_PTE_PAGE_VALID; in rk_mk_pte_invalid()
420 u32 pte = 0; in log_iova() local
440 pte = *pte_addr; in log_iova()
442 if (!rk_pte_is_page_valid(pte)) in log_iova()
445 page_addr_phys = rk_pte_page_address(pte) + page_offset; in log_iova()
[all …]
Dtegra-gart.c94 unsigned long offs, u32 pte) in gart_set_pte() argument
97 writel(pte, gart->regs + GART_ENTRY_DATA); in gart_set_pte()
100 pte ? "map" : "unmap", offs, pte & GART_PAGE_MASK); in gart_set_pte()
106 unsigned long pte; in gart_read_pte() local
109 pte = readl(gart->regs + GART_ENTRY_DATA); in gart_read_pte()
111 return pte; in gart_read_pte()
133 unsigned long pte; in gart_dump_table() local
135 pte = gart_read_pte(gart, iova); in gart_dump_table()
138 (GART_ENTRY_PHYS_ADDR_VALID & pte) ? "v" : " ", in gart_dump_table()
139 iova, pte & GART_PAGE_MASK); in gart_dump_table()
[all …]
Damd_iommu_types.h243 #define PM_PTE_LEVEL(pte) (((pte) >> 9) & 0x7ULL) argument
282 #define PTE_PAGE_SIZE(pte) \ argument
283 (1ULL << (1 + ffz(((pte) | 0xfffULL))))
319 #define IOMMU_PTE_PRESENT(pte) ((pte) & IOMMU_PTE_P) argument
320 #define IOMMU_PTE_PAGE(pte) (phys_to_virt((pte) & IOMMU_PAGE_MASK)) argument
321 #define IOMMU_PTE_MODE(pte) (((pte) >> 9) & 0x07) argument
Dintel-iommu.c329 static inline void dma_clear_pte(struct dma_pte *pte) in dma_clear_pte() argument
331 pte->val = 0; in dma_clear_pte()
334 static inline u64 dma_pte_addr(struct dma_pte *pte) in dma_pte_addr() argument
337 return pte->val & VTD_PAGE_MASK; in dma_pte_addr()
340 return __cmpxchg64(&pte->val, 0ULL, 0ULL) & VTD_PAGE_MASK; in dma_pte_addr()
344 static inline bool dma_pte_present(struct dma_pte *pte) in dma_pte_present() argument
346 return (pte->val & 3) != 0; in dma_pte_present()
349 static inline bool dma_pte_superpage(struct dma_pte *pte) in dma_pte_superpage() argument
351 return (pte->val & DMA_PTE_LARGE_PAGE); in dma_pte_superpage()
354 static inline int first_pte_in_page(struct dma_pte *pte) in first_pte_in_page() argument
[all …]
Dtegra-smmu.c630 u32 *pte, dma_addr_t pte_dma, u32 val) in tegra_smmu_set_pte() argument
633 unsigned long offset = offset_in_page(pte); in tegra_smmu_set_pte()
635 *pte = val; in tegra_smmu_set_pte()
649 u32 *pte; in tegra_smmu_map() local
651 pte = as_get_pte(as, iova, &pte_dma); in tegra_smmu_map()
652 if (!pte) in tegra_smmu_map()
656 if (*pte == 0) in tegra_smmu_map()
659 tegra_smmu_set_pte(as, iova, pte, pte_dma, in tegra_smmu_map()
670 u32 *pte; in tegra_smmu_unmap() local
672 pte = tegra_smmu_pte_lookup(as, iova, &pte_dma); in tegra_smmu_unmap()
[all …]
/drivers/gpu/drm/gma500/
Dgtt.c88 u32 pte; in psb_gtt_insert() local
109 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]), in psb_gtt_insert()
111 iowrite32(pte, gtt_slot++); in psb_gtt_insert()
114 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]), in psb_gtt_insert()
116 iowrite32(pte, gtt_slot++); in psb_gtt_insert()
137 u32 pte; in psb_gtt_remove() local
143 pte = psb_gtt_mask_pte(page_to_pfn(dev_priv->scratch_page), in psb_gtt_remove()
147 iowrite32(pte, gtt_slot++); in psb_gtt_remove()
165 u32 pte; in psb_gtt_roll() local
183 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]), in psb_gtt_roll()
[all …]
Dmmu.c407 uint32_t pte) in psb_mmu_set_pte() argument
409 pt->v[psb_mmu_pt_index(addr)] = pte; in psb_mmu_set_pte()
665 uint32_t pte; in psb_mmu_insert_pfn_sequence() local
685 pte = psb_mmu_mask_pte(start_pfn++, type); in psb_mmu_insert_pfn_sequence()
686 psb_mmu_set_pte(pt, addr, pte); in psb_mmu_insert_pfn_sequence()
714 uint32_t pte; in psb_mmu_insert_pages() local
747 pte = psb_mmu_mask_pte(page_to_pfn(*pages++), in psb_mmu_insert_pages()
749 psb_mmu_set_pte(pt, addr, pte); in psb_mmu_insert_pages()
/drivers/lguest/
Dpage_tables.c242 static void release_pte(pte_t pte) in release_pte() argument
248 if (pte_flags(pte) & _PAGE_PRESENT) in release_pte()
249 put_page(pte_page(pte)); in release_pte()
799 pte_t *pte = find_spte(cpu, switcher_addr + i * PAGE_SIZE, true, in allocate_switcher_mapping() local
801 if (!pte) in allocate_switcher_mapping()
810 if (i == 0 && !(pte_flags(*pte) & _PAGE_PRESENT)) { in allocate_switcher_mapping()
814 set_pte(pte, in allocate_switcher_mapping()
1145 pte_t *pte; in remove_switcher_percpu_map() local
1148 pte = find_spte(cpu, base, false, 0, 0); in remove_switcher_percpu_map()
1149 release_pte(*pte); in remove_switcher_percpu_map()
[all …]
/drivers/gpu/drm/i915/
Di915_gem_gtt.c179 gen8_pte_t pte = valid ? _PAGE_PRESENT | _PAGE_RW : 0; in gen8_pte_encode() local
180 pte |= addr; in gen8_pte_encode()
183 pte &= ~_PAGE_RW; in gen8_pte_encode()
187 pte |= PPAT_UNCACHED_INDEX; in gen8_pte_encode()
190 pte |= PPAT_DISPLAY_ELLC_INDEX; in gen8_pte_encode()
193 pte |= PPAT_CACHED_INDEX; in gen8_pte_encode()
197 return pte; in gen8_pte_encode()
219 gen6_pte_t pte = valid ? GEN6_PTE_VALID : 0; in snb_pte_encode() local
220 pte |= GEN6_PTE_ADDR_ENCODE(addr); in snb_pte_encode()
225 pte |= GEN6_PTE_CACHE_LLC; in snb_pte_encode()
[all …]
/drivers/usb/host/
Dohci-tilegx.c99 pte_t pte = { 0 }; in ohci_hcd_tilegx_drv_probe() local
150 pte = pte_set_home(pte, PAGE_HOME_HASH); in ohci_hcd_tilegx_drv_probe()
151 ret = gxio_usb_host_register_client_memory(&pdata->usb_ctx, pte, 0); in ohci_hcd_tilegx_drv_probe()
Dehci-tilegx.c105 pte_t pte = { 0 }; in ehci_hcd_tilegx_drv_probe() local
163 pte = pte_set_home(pte, PAGE_HOME_HASH); in ehci_hcd_tilegx_drv_probe()
164 ret = gxio_usb_host_register_client_memory(&pdata->usb_ctx, pte, 0); in ehci_hcd_tilegx_drv_probe()
/drivers/net/ethernet/tile/
Dtilepro.c464 HV_PTE pte = *virt_to_pte(current->mm, (unsigned long)va); in tile_net_provide_needed_buffer() local
465 if (hv_pte_get_mode(pte) != HV_PTE_MODE_CACHE_HASH_L3) in tile_net_provide_needed_buffer()
467 va, hv_pte_get_mode(pte), hv_pte_val(pte)); in tile_net_provide_needed_buffer()
938 .pte = hv_pte(0), in tile_net_open_aux()
941 ea.pte = hv_pte_set_lotar(ea.pte, epp_lotar); in tile_net_open_aux()
942 ea.pte = hv_pte_set_mode(ea.pte, HV_PTE_MODE_CACHE_TILE_L3); in tile_net_open_aux()
1893 HV_PTE pte = *virt_to_pte(current->mm, (unsigned long)data); in tile_net_tx() local
1894 if (hv_pte_get_mode(pte) != HV_PTE_MODE_CACHE_HASH_L3) in tile_net_tx()
1896 data, hv_pte_get_mode(pte), hv_pte_val(pte)); in tile_net_tx()
/drivers/md/
Ddm-switch.c179 region_table_slot_t pte; in switch_region_table_write() local
183 pte = sctx->region_table[region_index]; in switch_region_table_write()
184 pte &= ~((((region_table_slot_t)1 << sctx->region_table_entry_bits) - 1) << bit); in switch_region_table_write()
185 pte |= (region_table_slot_t)value << bit; in switch_region_table_write()
186 sctx->region_table[region_index] = pte; in switch_region_table_write()
/drivers/misc/sgi-gru/
Dgrufault.c225 pte_t pte; in atomic_pte_lookup() local
240 pte = *(pte_t *) pmdp; in atomic_pte_lookup()
243 pte = *pte_offset_kernel(pmdp, vaddr); in atomic_pte_lookup()
245 if (unlikely(!pte_present(pte) || in atomic_pte_lookup()
246 (write && (!pte_write(pte) || !pte_dirty(pte))))) in atomic_pte_lookup()
249 *paddr = pte_pfn(pte) << PAGE_SHIFT; in atomic_pte_lookup()
/drivers/char/agp/
Damd64-agp.c50 u32 pte; in amd64_insert_memory() local
86 pte = (tmp & 0x000000ff00000000ULL) >> 28; in amd64_insert_memory()
87 pte |=(tmp & 0x00000000fffff000ULL); in amd64_insert_memory()
88 pte |= GPTE_VALID | GPTE_COHERENT; in amd64_insert_memory()
90 writel(pte, agp_bridge->gatt_table+j); in amd64_insert_memory()
/drivers/xen/
Dxlate_mmu.c98 pte_t pte = pte_mkspecial(pfn_pte(page_to_pfn(page), info->prot)); in remap_pte_fn() local
136 set_pte_at(info->vma->vm_mm, addr, ptep, pte); in remap_pte_fn()

12