/drivers/gpu/drm/nouveau/core/subdev/vm/ |
D | nv44.c | 42 dma_addr_t *list, u32 pte, u32 cnt) in nv44_vm_fill() argument 44 u32 base = (pte << 2) & ~0x0000000f; in nv44_vm_fill() 54 switch (pte++ & 0x3) { in nv44_vm_fill() 88 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv44_vm_map_sg() argument 94 if (pte & 3) { in nv44_vm_map_sg() 95 u32 max = 4 - (pte & 3); in nv44_vm_map_sg() 97 nv44_vm_fill(pgt, priv->null, list, pte, part); in nv44_vm_map_sg() 98 pte += part; in nv44_vm_map_sg() 106 nv_wo32(pgt, pte++ * 4, tmp[0] >> 0 | tmp[1] << 27); in nv44_vm_map_sg() 107 nv_wo32(pgt, pte++ * 4, tmp[1] >> 5 | tmp[2] << 22); in nv44_vm_map_sg() [all …]
|
D | nvc0.c | 111 struct nouveau_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta) in nvc0_vm_map() argument 116 pte <<= 3; in nvc0_vm_map() 128 nv_wo32(pgt, pte + 0, lower_32_bits(phys)); in nvc0_vm_map() 129 nv_wo32(pgt, pte + 4, upper_32_bits(phys)); in nvc0_vm_map() 131 pte += 8; in nvc0_vm_map() 137 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nvc0_vm_map_sg() argument 143 pte <<= 3; in nvc0_vm_map_sg() 146 nv_wo32(pgt, pte + 0, lower_32_bits(phys)); in nvc0_vm_map_sg() 147 nv_wo32(pgt, pte + 4, upper_32_bits(phys)); in nvc0_vm_map_sg() 148 pte += 8; in nvc0_vm_map_sg() [all …]
|
D | nv50.c | 81 struct nouveau_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta) in nv50_vm_map() argument 95 pte <<= 3; in nv50_vm_map() 104 if (cnt >= block && !(pte & (block - 1))) in nv50_vm_map() 118 nv_wo32(pgt, pte + 0, offset_l); in nv50_vm_map() 119 nv_wo32(pgt, pte + 4, offset_h); in nv50_vm_map() 120 pte += 8; in nv50_vm_map() 128 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv50_vm_map_sg() argument 131 pte <<= 3; in nv50_vm_map_sg() 134 nv_wo32(pgt, pte + 0, lower_32_bits(phys)); in nv50_vm_map_sg() 135 nv_wo32(pgt, pte + 4, upper_32_bits(phys)); in nv50_vm_map_sg() [all …]
|
D | nv04.c | 38 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv04_vm_map_sg() argument 40 pte = 0x00008 + (pte * 4); in nv04_vm_map_sg() 45 nv_wo32(pgt, pte, phys | 3); in nv04_vm_map_sg() 47 pte += 4; in nv04_vm_map_sg() 54 nv04_vm_unmap(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt) in nv04_vm_unmap() argument 56 pte = 0x00008 + (pte * 4); in nv04_vm_unmap() 58 nv_wo32(pgt, pte, 0x00000000); in nv04_vm_unmap() 59 pte += 4; in nv04_vm_unmap()
|
D | nv41.c | 42 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) in nv41_vm_map_sg() argument 44 pte = pte * 4; in nv41_vm_map_sg() 49 nv_wo32(pgt, pte, (phys >> 7) | 1); in nv41_vm_map_sg() 51 pte += 4; in nv41_vm_map_sg() 58 nv41_vm_unmap(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt) in nv41_vm_unmap() argument 60 pte = pte * 4; in nv41_vm_unmap() 62 nv_wo32(pgt, pte, 0x00000000); in nv41_vm_unmap() 63 pte += 4; in nv41_vm_unmap()
|
D | base.c | 41 u32 pte = (offset & ((1 << vmm->pgt_bits) - 1)) >> bits; in nouveau_vm_map_at() local 53 end = (pte + num); in nouveau_vm_map_at() 56 len = end - pte; in nouveau_vm_map_at() 58 vmm->map(vma, pgt, node, pte, len, phys, delta); in nouveau_vm_map_at() 61 pte += len; in nouveau_vm_map_at() 65 pte = 0; in nouveau_vm_map_at() 92 u32 pte = (offset & ((1 << vmm->pgt_bits) - 1)) >> bits; in nouveau_vm_map_sg_table() local 103 end = pte + sglen; in nouveau_vm_map_sg_table() 106 len = end - pte; in nouveau_vm_map_sg_table() 111 vmm->map_sg(vma, pgt, mem, pte, 1, &addr); in nouveau_vm_map_sg_table() [all …]
|
/drivers/gpu/drm/gma500/ |
D | gtt.c | 87 u32 pte; in psb_gtt_insert() local 108 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]), 0); in psb_gtt_insert() 109 iowrite32(pte, gtt_slot++); in psb_gtt_insert() 112 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]), 0); in psb_gtt_insert() 113 iowrite32(pte, gtt_slot++); in psb_gtt_insert() 134 u32 pte; in psb_gtt_remove() local 140 pte = psb_gtt_mask_pte(page_to_pfn(dev_priv->scratch_page), 0); in psb_gtt_remove() 143 iowrite32(pte, gtt_slot++); in psb_gtt_remove() 161 u32 pte; in psb_gtt_roll() local 179 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]), 0); in psb_gtt_roll() [all …]
|
D | mmu.c | 436 unsigned long addr, uint32_t pte) in psb_mmu_set_pte() argument 438 pt->v[psb_mmu_pt_index(addr)] = pte; in psb_mmu_set_pte() 702 uint32_t pte; in psb_mmu_insert_pfn_sequence() local 722 pte = psb_mmu_mask_pte(start_pfn++, type); in psb_mmu_insert_pfn_sequence() 723 psb_mmu_set_pte(pt, addr, pte); in psb_mmu_insert_pfn_sequence() 750 uint32_t pte; in psb_mmu_insert_pages() local 785 pte = in psb_mmu_insert_pages() 788 psb_mmu_set_pte(pt, addr, pte); in psb_mmu_insert_pages()
|
/drivers/iommu/ |
D | amd_iommu.c | 1297 u64 *pte; in increase_address_space() local 1303 pte = (void *)get_zeroed_page(gfp); in increase_address_space() 1304 if (!pte) in increase_address_space() 1307 *pte = PM_LEVEL_PDE(domain->mode, in increase_address_space() 1309 domain->pt_root = pte; in increase_address_space() 1323 u64 *pte, *page; in alloc_pte() local 1331 pte = &domain->pt_root[PM_LEVEL_INDEX(level, address)]; in alloc_pte() 1336 if (!IOMMU_PTE_PRESENT(*pte)) { in alloc_pte() 1340 *pte = PM_LEVEL_PDE(level, virt_to_phys(page)); in alloc_pte() 1344 if (PM_PTE_LEVEL(*pte) != level) in alloc_pte() [all …]
|
D | tegra-gart.c | 84 unsigned long offs, u32 pte) in gart_set_pte() argument 87 writel(pte, gart->regs + GART_ENTRY_DATA); in gart_set_pte() 90 pte ? "map" : "unmap", offs, pte & GART_PAGE_MASK); in gart_set_pte() 96 unsigned long pte; in gart_read_pte() local 99 pte = readl(gart->regs + GART_ENTRY_DATA); in gart_read_pte() 101 return pte; in gart_read_pte() 123 unsigned long pte; in gart_dump_table() local 125 pte = gart_read_pte(gart, iova); in gart_dump_table() 128 (GART_ENTRY_PHYS_ADDR_VALID & pte) ? "v" : " ", in gart_dump_table() 129 iova, pte & GART_PAGE_MASK); in gart_dump_table() [all …]
|
D | intel-iommu.c | 286 static inline void dma_clear_pte(struct dma_pte *pte) in dma_clear_pte() argument 288 pte->val = 0; in dma_clear_pte() 291 static inline void dma_set_pte_readable(struct dma_pte *pte) in dma_set_pte_readable() argument 293 pte->val |= DMA_PTE_READ; in dma_set_pte_readable() 296 static inline void dma_set_pte_writable(struct dma_pte *pte) in dma_set_pte_writable() argument 298 pte->val |= DMA_PTE_WRITE; in dma_set_pte_writable() 301 static inline void dma_set_pte_snp(struct dma_pte *pte) in dma_set_pte_snp() argument 303 pte->val |= DMA_PTE_SNP; in dma_set_pte_snp() 306 static inline void dma_set_pte_prot(struct dma_pte *pte, unsigned long prot) in dma_set_pte_prot() argument 308 pte->val = (pte->val & ~3) | (prot & 3); in dma_set_pte_prot() [all …]
|
D | amd_iommu_types.h | 236 #define PM_PTE_LEVEL(pte) (((pte) >> 9) & 0x7ULL) argument 275 #define PTE_PAGE_SIZE(pte) \ argument 276 (1ULL << (1 + ffz(((pte) | 0xfffULL)))) 305 #define IOMMU_PTE_PRESENT(pte) ((pte) & IOMMU_PTE_P) argument 306 #define IOMMU_PTE_PAGE(pte) (phys_to_virt((pte) & IOMMU_PAGE_MASK)) argument 307 #define IOMMU_PTE_MODE(pte) (((pte) >> 9) & 0x07) argument
|
D | tegra-smmu.c | 490 unsigned long *pte, struct page *page, int is_pde) in flush_ptc_and_tlb() argument 497 val = SMMU_PTC_FLUSH_TYPE_ADR | VA_PAGE_TO_PA(pte, page); in flush_ptc_and_tlb() 687 unsigned long *pte; in __smmu_iommu_unmap() local 691 pte = locate_pte(as, iova, false, &page, &count); in __smmu_iommu_unmap() 692 if (WARN_ON(!pte)) in __smmu_iommu_unmap() 695 if (WARN_ON(*pte == _PTE_VACANT(iova))) in __smmu_iommu_unmap() 698 *pte = _PTE_VACANT(iova); in __smmu_iommu_unmap() 699 FLUSH_CPU_DCACHE(pte, page, sizeof(*pte)); in __smmu_iommu_unmap() 700 flush_ptc_and_tlb(as->smmu, as, iova, pte, page, 0); in __smmu_iommu_unmap() 709 unsigned long *pte; in __smmu_iommu_map_pfn() local [all …]
|
D | omap-iommu.c | 1227 u32 *pgd, *pte; in omap_iommu_iova_to_phys() local 1230 iopgtable_lookup_entry(oiommu, da, &pgd, &pte); in omap_iommu_iova_to_phys() 1232 if (pte) { in omap_iommu_iova_to_phys() 1233 if (iopte_is_small(*pte)) in omap_iommu_iova_to_phys() 1234 ret = omap_iommu_translate(*pte, da, IOPTE_MASK); in omap_iommu_iova_to_phys() 1235 else if (iopte_is_large(*pte)) in omap_iommu_iova_to_phys() 1236 ret = omap_iommu_translate(*pte, da, IOLARGE_MASK); in omap_iommu_iova_to_phys() 1238 dev_err(dev, "bogus pte 0x%x, da 0x%lx", *pte, da); in omap_iommu_iova_to_phys()
|
/drivers/lguest/ |
D | page_tables.c | 242 static void release_pte(pte_t pte) in release_pte() argument 248 if (pte_flags(pte) & _PAGE_PRESENT) in release_pte() 249 put_page(pte_page(pte)); in release_pte() 755 pte_t *pte = find_spte(cpu, switcher_addr + i * PAGE_SIZE, true, in allocate_switcher_mapping() local 757 if (!pte) in allocate_switcher_mapping() 766 if (i == 0 && !(pte_flags(*pte) & _PAGE_PRESENT)) { in allocate_switcher_mapping() 770 set_pte(pte, in allocate_switcher_mapping() 1100 pte_t *pte; in remove_switcher_percpu_map() local 1103 pte = find_spte(cpu, base, false, 0, 0); in remove_switcher_percpu_map() 1104 release_pte(*pte); in remove_switcher_percpu_map() [all …]
|
/drivers/usb/host/ |
D | ohci-tilegx.c | 99 pte_t pte = { 0 }; in ohci_hcd_tilegx_drv_probe() local 150 pte = pte_set_home(pte, PAGE_HOME_HASH); in ohci_hcd_tilegx_drv_probe() 151 ret = gxio_usb_host_register_client_memory(&pdata->usb_ctx, pte, 0); in ohci_hcd_tilegx_drv_probe()
|
D | ehci-tilegx.c | 105 pte_t pte = { 0 }; in ehci_hcd_tilegx_drv_probe() local 163 pte = pte_set_home(pte, PAGE_HOME_HASH); in ehci_hcd_tilegx_drv_probe() 164 ret = gxio_usb_host_register_client_memory(&pdata->usb_ctx, pte, 0); in ehci_hcd_tilegx_drv_probe()
|
/drivers/gpu/drm/nouveau/core/include/subdev/ |
D | vm.h | 83 struct nouveau_mem *, u32 pte, u32 cnt, 86 struct nouveau_mem *, u32 pte, u32 cnt, dma_addr_t *); 87 void (*unmap)(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt);
|
/drivers/gpu/drm/i915/ |
D | i915_gem_gtt.c | 51 gen6_gtt_pte_t pte = GEN6_PTE_VALID; in gen6_pte_encode() local 52 pte |= GEN6_PTE_ADDR_ENCODE(addr); in gen6_pte_encode() 58 pte |= GEN6_PTE_CACHE_LLC; in gen6_pte_encode() 60 pte |= GEN6_PTE_CACHE_LLC_MLC; in gen6_pte_encode() 63 pte |= GEN6_PTE_CACHE_LLC; in gen6_pte_encode() 67 pte |= HSW_PTE_UNCACHED; in gen6_pte_encode() 69 pte |= GEN6_PTE_UNCACHED; in gen6_pte_encode() 75 return pte; in gen6_pte_encode()
|
/drivers/net/ethernet/tile/ |
D | tilepro.c | 469 HV_PTE pte = *virt_to_pte(current->mm, (unsigned long)va); in tile_net_provide_needed_buffer() local 470 if (hv_pte_get_mode(pte) != HV_PTE_MODE_CACHE_HASH_L3) in tile_net_provide_needed_buffer() 472 va, hv_pte_get_mode(pte), hv_pte_val(pte)); in tile_net_provide_needed_buffer() 1023 .pte = hv_pte(0), in tile_net_open_aux() 1026 ea.pte = hv_pte_set_lotar(ea.pte, epp_lotar); in tile_net_open_aux() 1027 ea.pte = hv_pte_set_mode(ea.pte, HV_PTE_MODE_CACHE_TILE_L3); in tile_net_open_aux() 1974 HV_PTE pte = *virt_to_pte(current->mm, (unsigned long)data); in tile_net_tx() local 1975 if (hv_pte_get_mode(pte) != HV_PTE_MODE_CACHE_HASH_L3) in tile_net_tx() 1977 data, hv_pte_get_mode(pte), hv_pte_val(pte)); in tile_net_tx()
|
/drivers/misc/sgi-gru/ |
D | grufault.c | 225 pte_t pte; in atomic_pte_lookup() local 240 pte = *(pte_t *) pmdp; in atomic_pte_lookup() 243 pte = *pte_offset_kernel(pmdp, vaddr); in atomic_pte_lookup() 245 if (unlikely(!pte_present(pte) || in atomic_pte_lookup() 246 (write && (!pte_write(pte) || !pte_dirty(pte))))) in atomic_pte_lookup() 249 *paddr = pte_pfn(pte) << PAGE_SHIFT; in atomic_pte_lookup()
|
/drivers/char/agp/ |
D | amd64-agp.c | 50 u32 pte; in amd64_insert_memory() local 86 pte = (tmp & 0x000000ff00000000ULL) >> 28; in amd64_insert_memory() 87 pte |=(tmp & 0x00000000fffff000ULL); in amd64_insert_memory() 88 pte |= GPTE_VALID | GPTE_COHERENT; in amd64_insert_memory() 90 writel(pte, agp_bridge->gatt_table+j); in amd64_insert_memory()
|
/drivers/xen/xenbus/ |
D | xenbus_client.c | 478 pte_t *pte; in xenbus_map_ring_valloc_pv() local 486 area = alloc_vm_area(PAGE_SIZE, &pte); in xenbus_map_ring_valloc_pv() 492 op.host_addr = arbitrary_virt_to_machine(pte).maddr; in xenbus_map_ring_valloc_pv()
|
/drivers/gpu/drm/radeon/ |
D | radeon_gart.c | 1030 uint64_t pte; in radeon_vm_update_ptes() local 1037 pte = radeon_sa_bo_gpu_addr(vm->page_tables[pt_idx]); in radeon_vm_update_ptes() 1038 pte += (addr & mask) * 8; in radeon_vm_update_ptes() 1040 if ((last_pte + 8 * count) != pte) { in radeon_vm_update_ptes() 1050 last_pte = pte; in radeon_vm_update_ptes()
|
/drivers/target/sbp/ |
D | sbp_target.c | 1265 struct sbp_page_table_entry *pte; in sbp_rw_data() local 1294 pte = req->pg_tbl; in sbp_rw_data() 1300 pte = NULL; in sbp_rw_data() 1312 offset = (u64)be16_to_cpu(pte->segment_base_hi) << 32 | in sbp_rw_data() 1313 be32_to_cpu(pte->segment_base_lo); in sbp_rw_data() 1314 length = be16_to_cpu(pte->segment_length); in sbp_rw_data() 1316 pte++; in sbp_rw_data()
|