Home
last modified time | relevance | path

Searched refs:pte (Results 1 – 25 of 30) sorted by relevance

12

/drivers/gpu/drm/nouveau/core/subdev/vm/
Dnv44.c42 dma_addr_t *list, u32 pte, u32 cnt) in nv44_vm_fill() argument
44 u32 base = (pte << 2) & ~0x0000000f; in nv44_vm_fill()
54 switch (pte++ & 0x3) { in nv44_vm_fill()
88 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv44_vm_map_sg() argument
94 if (pte & 3) { in nv44_vm_map_sg()
95 u32 max = 4 - (pte & 3); in nv44_vm_map_sg()
97 nv44_vm_fill(pgt, priv->null, list, pte, part); in nv44_vm_map_sg()
98 pte += part; in nv44_vm_map_sg()
106 nv_wo32(pgt, pte++ * 4, tmp[0] >> 0 | tmp[1] << 27); in nv44_vm_map_sg()
107 nv_wo32(pgt, pte++ * 4, tmp[1] >> 5 | tmp[2] << 22); in nv44_vm_map_sg()
[all …]
Dnvc0.c111 struct nouveau_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta) in nvc0_vm_map() argument
116 pte <<= 3; in nvc0_vm_map()
128 nv_wo32(pgt, pte + 0, lower_32_bits(phys)); in nvc0_vm_map()
129 nv_wo32(pgt, pte + 4, upper_32_bits(phys)); in nvc0_vm_map()
131 pte += 8; in nvc0_vm_map()
137 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nvc0_vm_map_sg() argument
143 pte <<= 3; in nvc0_vm_map_sg()
146 nv_wo32(pgt, pte + 0, lower_32_bits(phys)); in nvc0_vm_map_sg()
147 nv_wo32(pgt, pte + 4, upper_32_bits(phys)); in nvc0_vm_map_sg()
148 pte += 8; in nvc0_vm_map_sg()
[all …]
Dnv50.c81 struct nouveau_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta) in nv50_vm_map() argument
95 pte <<= 3; in nv50_vm_map()
104 if (cnt >= block && !(pte & (block - 1))) in nv50_vm_map()
118 nv_wo32(pgt, pte + 0, offset_l); in nv50_vm_map()
119 nv_wo32(pgt, pte + 4, offset_h); in nv50_vm_map()
120 pte += 8; in nv50_vm_map()
128 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv50_vm_map_sg() argument
131 pte <<= 3; in nv50_vm_map_sg()
134 nv_wo32(pgt, pte + 0, lower_32_bits(phys)); in nv50_vm_map_sg()
135 nv_wo32(pgt, pte + 4, upper_32_bits(phys)); in nv50_vm_map_sg()
[all …]
Dnv04.c38 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv04_vm_map_sg() argument
40 pte = 0x00008 + (pte * 4); in nv04_vm_map_sg()
45 nv_wo32(pgt, pte, phys | 3); in nv04_vm_map_sg()
47 pte += 4; in nv04_vm_map_sg()
54 nv04_vm_unmap(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt) in nv04_vm_unmap() argument
56 pte = 0x00008 + (pte * 4); in nv04_vm_unmap()
58 nv_wo32(pgt, pte, 0x00000000); in nv04_vm_unmap()
59 pte += 4; in nv04_vm_unmap()
Dnv41.c42 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv41_vm_map_sg() argument
44 pte = pte * 4; in nv41_vm_map_sg()
49 nv_wo32(pgt, pte, (phys >> 7) | 1); in nv41_vm_map_sg()
51 pte += 4; in nv41_vm_map_sg()
58 nv41_vm_unmap(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt) in nv41_vm_unmap() argument
60 pte = pte * 4; in nv41_vm_unmap()
62 nv_wo32(pgt, pte, 0x00000000); in nv41_vm_unmap()
63 pte += 4; in nv41_vm_unmap()
Dbase.c41 u32 pte = (offset & ((1 << vmm->pgt_bits) - 1)) >> bits; in nouveau_vm_map_at() local
53 end = (pte + num); in nouveau_vm_map_at()
56 len = end - pte; in nouveau_vm_map_at()
58 vmm->map(vma, pgt, node, pte, len, phys, delta); in nouveau_vm_map_at()
61 pte += len; in nouveau_vm_map_at()
65 pte = 0; in nouveau_vm_map_at()
86 u32 pte = (offset & ((1 << vmm->pgt_bits) - 1)) >> bits; in nouveau_vm_map_sg_table() local
97 end = pte + sglen; in nouveau_vm_map_sg_table()
100 len = end - pte; in nouveau_vm_map_sg_table()
105 vmm->map_sg(vma, pgt, mem, pte, 1, &addr); in nouveau_vm_map_sg_table()
[all …]
/drivers/iommu/
Dipmmu-vmsa.c523 pte_t *pte; in ipmmu_alloc_pte() local
528 pte = (pte_t *)get_zeroed_page(GFP_ATOMIC); in ipmmu_alloc_pte()
529 if (!pte) in ipmmu_alloc_pte()
532 ipmmu_flush_pgtable(mmu, pte, PAGE_SIZE); in ipmmu_alloc_pte()
533 *pmd = __pmd(__pa(pte) | PMD_NSTABLE | PMD_TYPE_TABLE); in ipmmu_alloc_pte()
536 return pte + pte_index(iova); in ipmmu_alloc_pte()
586 pte_t *pte, *start; in ipmmu_alloc_init_pte() local
589 pte = ipmmu_alloc_pte(mmu, pmd, iova); in ipmmu_alloc_init_pte()
590 if (!pte) in ipmmu_alloc_init_pte()
593 start = pte; in ipmmu_alloc_init_pte()
[all …]
Dtegra-gart.c84 unsigned long offs, u32 pte) in gart_set_pte() argument
87 writel(pte, gart->regs + GART_ENTRY_DATA); in gart_set_pte()
90 pte ? "map" : "unmap", offs, pte & GART_PAGE_MASK); in gart_set_pte()
96 unsigned long pte; in gart_read_pte() local
99 pte = readl(gart->regs + GART_ENTRY_DATA); in gart_read_pte()
101 return pte; in gart_read_pte()
123 unsigned long pte; in gart_dump_table() local
125 pte = gart_read_pte(gart, iova); in gart_dump_table()
128 (GART_ENTRY_PHYS_ADDR_VALID & pte) ? "v" : " ", in gart_dump_table()
129 iova, pte & GART_PAGE_MASK); in gart_dump_table()
[all …]
Damd_iommu.c1257 u64 *pte; in increase_address_space() local
1263 pte = (void *)get_zeroed_page(gfp); in increase_address_space()
1264 if (!pte) in increase_address_space()
1267 *pte = PM_LEVEL_PDE(domain->mode, in increase_address_space()
1269 domain->pt_root = pte; in increase_address_space()
1283 u64 *pte, *page; in alloc_pte() local
1291 pte = &domain->pt_root[PM_LEVEL_INDEX(level, address)]; in alloc_pte()
1296 if (!IOMMU_PTE_PRESENT(*pte)) { in alloc_pte()
1300 *pte = PM_LEVEL_PDE(level, virt_to_phys(page)); in alloc_pte()
1304 if (PM_PTE_LEVEL(*pte) != level) in alloc_pte()
[all …]
Dintel-iommu.c285 static inline void dma_clear_pte(struct dma_pte *pte) in dma_clear_pte() argument
287 pte->val = 0; in dma_clear_pte()
290 static inline u64 dma_pte_addr(struct dma_pte *pte) in dma_pte_addr() argument
293 return pte->val & VTD_PAGE_MASK; in dma_pte_addr()
296 return __cmpxchg64(&pte->val, 0ULL, 0ULL) & VTD_PAGE_MASK; in dma_pte_addr()
300 static inline bool dma_pte_present(struct dma_pte *pte) in dma_pte_present() argument
302 return (pte->val & 3) != 0; in dma_pte_present()
305 static inline bool dma_pte_superpage(struct dma_pte *pte) in dma_pte_superpage() argument
307 return (pte->val & DMA_PTE_LARGE_PAGE); in dma_pte_superpage()
310 static inline int first_pte_in_page(struct dma_pte *pte) in first_pte_in_page() argument
[all …]
Damd_iommu_types.h243 #define PM_PTE_LEVEL(pte) (((pte) >> 9) & 0x7ULL) argument
282 #define PTE_PAGE_SIZE(pte) \ argument
283 (1ULL << (1 + ffz(((pte) | 0xfffULL))))
313 #define IOMMU_PTE_PRESENT(pte) ((pte) & IOMMU_PTE_P) argument
314 #define IOMMU_PTE_PAGE(pte) (phys_to_virt((pte) & IOMMU_PAGE_MASK)) argument
315 #define IOMMU_PTE_MODE(pte) (((pte) >> 9) & 0x07) argument
Dtegra-smmu.c491 unsigned long *pte, struct page *page, int is_pde) in flush_ptc_and_tlb() argument
498 val = SMMU_PTC_FLUSH_TYPE_ADR | VA_PAGE_TO_PA(pte, page); in flush_ptc_and_tlb()
688 unsigned long *pte; in __smmu_iommu_unmap() local
692 pte = locate_pte(as, iova, false, &page, &count); in __smmu_iommu_unmap()
693 if (WARN_ON(!pte)) in __smmu_iommu_unmap()
696 if (WARN_ON(*pte == _PTE_VACANT(iova))) in __smmu_iommu_unmap()
699 *pte = _PTE_VACANT(iova); in __smmu_iommu_unmap()
700 FLUSH_CPU_DCACHE(pte, page, sizeof(*pte)); in __smmu_iommu_unmap()
701 flush_ptc_and_tlb(as->smmu, as, iova, pte, page, 0); in __smmu_iommu_unmap()
710 unsigned long *pte; in __smmu_iommu_map_pfn() local
[all …]
Domap-iommu.c1208 u32 *pgd, *pte; in omap_iommu_iova_to_phys() local
1211 iopgtable_lookup_entry(oiommu, da, &pgd, &pte); in omap_iommu_iova_to_phys()
1213 if (pte) { in omap_iommu_iova_to_phys()
1214 if (iopte_is_small(*pte)) in omap_iommu_iova_to_phys()
1215 ret = omap_iommu_translate(*pte, da, IOPTE_MASK); in omap_iommu_iova_to_phys()
1216 else if (iopte_is_large(*pte)) in omap_iommu_iova_to_phys()
1217 ret = omap_iommu_translate(*pte, da, IOLARGE_MASK); in omap_iommu_iova_to_phys()
1219 dev_err(dev, "bogus pte 0x%x, da 0x%llx", *pte, in omap_iommu_iova_to_phys()
Darm-smmu.c1283 pte_t *pte, *start; in arm_smmu_alloc_init_pte() local
1326 pte = start; in arm_smmu_alloc_init_pte()
1357 } else if (pte_val(*pte) & in arm_smmu_alloc_init_pte()
1370 sizeof(*pte) * in arm_smmu_alloc_init_pte()
1375 *pte = pfn_pte(pfn, __pgprot(pteval)); in arm_smmu_alloc_init_pte()
1376 } while (pte++, pfn++, addr += PAGE_SIZE, --i); in arm_smmu_alloc_init_pte()
1379 arm_smmu_flush_pgtable(smmu, start, sizeof(*pte) * (pte - start)); in arm_smmu_alloc_init_pte()
1533 pte_t pte; in arm_smmu_iova_to_phys() local
1553 pte = *(pmd_page_vaddr(pmd) + pte_index(iova)); in arm_smmu_iova_to_phys()
1554 if (pte_none(pte)) in arm_smmu_iova_to_phys()
[all …]
/drivers/gpu/drm/gma500/
Dgtt.c88 u32 pte; in psb_gtt_insert() local
109 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]), in psb_gtt_insert()
111 iowrite32(pte, gtt_slot++); in psb_gtt_insert()
114 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]), in psb_gtt_insert()
116 iowrite32(pte, gtt_slot++); in psb_gtt_insert()
137 u32 pte; in psb_gtt_remove() local
143 pte = psb_gtt_mask_pte(page_to_pfn(dev_priv->scratch_page), in psb_gtt_remove()
147 iowrite32(pte, gtt_slot++); in psb_gtt_remove()
165 u32 pte; in psb_gtt_roll() local
183 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]), in psb_gtt_roll()
[all …]
Dmmu.c407 uint32_t pte) in psb_mmu_set_pte() argument
409 pt->v[psb_mmu_pt_index(addr)] = pte; in psb_mmu_set_pte()
665 uint32_t pte; in psb_mmu_insert_pfn_sequence() local
685 pte = psb_mmu_mask_pte(start_pfn++, type); in psb_mmu_insert_pfn_sequence()
686 psb_mmu_set_pte(pt, addr, pte); in psb_mmu_insert_pfn_sequence()
714 uint32_t pte; in psb_mmu_insert_pages() local
747 pte = psb_mmu_mask_pte(page_to_pfn(*pages++), in psb_mmu_insert_pages()
749 psb_mmu_set_pte(pt, addr, pte); in psb_mmu_insert_pages()
/drivers/gpu/drm/i915/
Di915_gem_gtt.c75 gen8_gtt_pte_t pte = valid ? _PAGE_PRESENT | _PAGE_RW : 0; in gen8_pte_encode() local
76 pte |= addr; in gen8_pte_encode()
80 pte |= PPAT_UNCACHED_INDEX; in gen8_pte_encode()
83 pte |= PPAT_DISPLAY_ELLC_INDEX; in gen8_pte_encode()
86 pte |= PPAT_CACHED_INDEX; in gen8_pte_encode()
90 return pte; in gen8_pte_encode()
110 gen6_gtt_pte_t pte = valid ? GEN6_PTE_VALID : 0; in snb_pte_encode() local
111 pte |= GEN6_PTE_ADDR_ENCODE(addr); in snb_pte_encode()
116 pte |= GEN6_PTE_CACHE_LLC; in snb_pte_encode()
119 pte |= GEN6_PTE_UNCACHED; in snb_pte_encode()
[all …]
/drivers/lguest/
Dpage_tables.c242 static void release_pte(pte_t pte) in release_pte() argument
248 if (pte_flags(pte) & _PAGE_PRESENT) in release_pte()
249 put_page(pte_page(pte)); in release_pte()
757 pte_t *pte = find_spte(cpu, switcher_addr + i * PAGE_SIZE, true, in allocate_switcher_mapping() local
759 if (!pte) in allocate_switcher_mapping()
768 if (i == 0 && !(pte_flags(*pte) & _PAGE_PRESENT)) { in allocate_switcher_mapping()
772 set_pte(pte, in allocate_switcher_mapping()
1102 pte_t *pte; in remove_switcher_percpu_map() local
1105 pte = find_spte(cpu, base, false, 0, 0); in remove_switcher_percpu_map()
1106 release_pte(*pte); in remove_switcher_percpu_map()
[all …]
/drivers/usb/host/
Dohci-tilegx.c99 pte_t pte = { 0 }; in ohci_hcd_tilegx_drv_probe() local
150 pte = pte_set_home(pte, PAGE_HOME_HASH); in ohci_hcd_tilegx_drv_probe()
151 ret = gxio_usb_host_register_client_memory(&pdata->usb_ctx, pte, 0); in ohci_hcd_tilegx_drv_probe()
Dehci-tilegx.c105 pte_t pte = { 0 }; in ehci_hcd_tilegx_drv_probe() local
163 pte = pte_set_home(pte, PAGE_HOME_HASH); in ehci_hcd_tilegx_drv_probe()
164 ret = gxio_usb_host_register_client_memory(&pdata->usb_ctx, pte, 0); in ehci_hcd_tilegx_drv_probe()
/drivers/gpu/drm/nouveau/core/include/subdev/
Dvm.h83 struct nouveau_mem *, u32 pte, u32 cnt,
86 struct nouveau_mem *, u32 pte, u32 cnt, dma_addr_t *);
87 void (*unmap)(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt);
/drivers/net/ethernet/tile/
Dtilepro.c464 HV_PTE pte = *virt_to_pte(current->mm, (unsigned long)va); in tile_net_provide_needed_buffer() local
465 if (hv_pte_get_mode(pte) != HV_PTE_MODE_CACHE_HASH_L3) in tile_net_provide_needed_buffer()
467 va, hv_pte_get_mode(pte), hv_pte_val(pte)); in tile_net_provide_needed_buffer()
941 .pte = hv_pte(0), in tile_net_open_aux()
944 ea.pte = hv_pte_set_lotar(ea.pte, epp_lotar); in tile_net_open_aux()
945 ea.pte = hv_pte_set_mode(ea.pte, HV_PTE_MODE_CACHE_TILE_L3); in tile_net_open_aux()
1896 HV_PTE pte = *virt_to_pte(current->mm, (unsigned long)data); in tile_net_tx() local
1897 if (hv_pte_get_mode(pte) != HV_PTE_MODE_CACHE_HASH_L3) in tile_net_tx()
1899 data, hv_pte_get_mode(pte), hv_pte_val(pte)); in tile_net_tx()
/drivers/misc/sgi-gru/
Dgrufault.c225 pte_t pte; in atomic_pte_lookup() local
240 pte = *(pte_t *) pmdp; in atomic_pte_lookup()
243 pte = *pte_offset_kernel(pmdp, vaddr); in atomic_pte_lookup()
245 if (unlikely(!pte_present(pte) || in atomic_pte_lookup()
246 (write && (!pte_write(pte) || !pte_dirty(pte))))) in atomic_pte_lookup()
249 *paddr = pte_pfn(pte) << PAGE_SHIFT; in atomic_pte_lookup()
/drivers/md/
Ddm-switch.c179 region_table_slot_t pte; in switch_region_table_write() local
183 pte = sctx->region_table[region_index]; in switch_region_table_write()
184 pte &= ~((((region_table_slot_t)1 << sctx->region_table_entry_bits) - 1) << bit); in switch_region_table_write()
185 pte |= (region_table_slot_t)value << bit; in switch_region_table_write()
186 sctx->region_table[region_index] = pte; in switch_region_table_write()
/drivers/char/agp/
Damd64-agp.c50 u32 pte; in amd64_insert_memory() local
86 pte = (tmp & 0x000000ff00000000ULL) >> 28; in amd64_insert_memory()
87 pte |=(tmp & 0x00000000fffff000ULL); in amd64_insert_memory()
88 pte |= GPTE_VALID | GPTE_COHERENT; in amd64_insert_memory()
90 writel(pte, agp_bridge->gatt_table+j); in amd64_insert_memory()

12