/arch/x86/xen/ |
D | grant-table.c | 51 pte_t **ptes; member 68 set_pte_at(&init_mm, addr, gnttab_shared_vm_area.ptes[i], in arch_gnttab_map_shared() 84 set_pte_at(&init_mm, addr, gnttab_shared_vm_area.ptes[i], in arch_gnttab_unmap() 92 area->ptes = kmalloc(sizeof(pte_t *) * nr_frames, GFP_KERNEL); in arch_gnttab_valloc() 93 if (area->ptes == NULL) in arch_gnttab_valloc() 96 area->area = alloc_vm_area(PAGE_SIZE * nr_frames, area->ptes); in arch_gnttab_valloc() 98 kfree(area->ptes); in arch_gnttab_valloc()
|
/arch/alpha/kernel/ |
D | pci_iommu.c | 84 arena->ptes = __alloc_bootmem_node(NODE_DATA(nid), mem_size, align, 0); in iommu_arena_new_node() 85 if (!NODE_DATA(nid) || !arena->ptes) { in iommu_arena_new_node() 89 arena->ptes = __alloc_bootmem(mem_size, align, 0); in iommu_arena_new_node() 95 arena->ptes = __alloc_bootmem(mem_size, align, 0); in iommu_arena_new_node() 124 unsigned long *ptes; in iommu_arena_find_pages() local 139 ptes = arena->ptes; in iommu_arena_find_pages() 151 if (ptes[p+i]) in iommu_arena_find_pages() 183 unsigned long *ptes; in iommu_arena_alloc() local 189 ptes = arena->ptes; in iommu_arena_alloc() 202 ptes[p+i] = IOMMU_INVALID_PTE; in iommu_arena_alloc() [all …]
|
D | core_titan.c | 326 port->tba[0].csr = virt_to_phys(hose->sg_isa->ptes); in titan_init_one_pachip_port() 334 port->tba[2].csr = virt_to_phys(hose->sg_pci->ptes); in titan_init_one_pachip_port() 461 unsigned long *ptes; in titan_ioremap() local 514 ptes = hose->sg_pci->ptes; in titan_ioremap() 518 pfn = ptes[baddr >> PAGE_SHIFT]; in titan_ioremap() 707 pte = aper->arena->ptes[baddr >> PAGE_SHIFT]; in titan_agp_translate()
|
D | pci_impl.h | 138 unsigned long *ptes; member
|
D | core_marvel.c | 291 csrs->POx_TBASE[0].csr = virt_to_phys(hose->sg_isa->ptes); in io7_init_hose() 309 csrs->POx_TBASE[2].csr = virt_to_phys(hose->sg_pci->ptes); in io7_init_hose() 685 unsigned long *ptes; in marvel_ioremap() local 740 ptes = hose->sg_pci->ptes; in marvel_ioremap() 744 pfn = ptes[baddr >> PAGE_SHIFT]; in marvel_ioremap() 1043 pte = aper->arena->ptes[baddr >> PAGE_SHIFT]; in marvel_agp_translate()
|
D | core_cia.c | 460 arena->ptes[4] = pte0; in verify_tb_operation() 484 arena->ptes[5] = pte0; in verify_tb_operation() 520 arena->ptes[4] = 0; in verify_tb_operation() 521 arena->ptes[5] = 0; in verify_tb_operation() 733 *(vip)CIA_IOC_PCI_T0_BASE = virt_to_phys(hose->sg_isa->ptes) >> 2; in do_init_arch()
|
D | core_tsunami.c | 334 pchip->tba[0].csr = virt_to_phys(hose->sg_isa->ptes); in tsunami_init_one_pchip() 338 pchip->tba[1].csr = virt_to_phys(hose->sg_pci->ptes); in tsunami_init_one_pchip()
|
D | core_mcpcia.c | 375 *(vuip)MCPCIA_T0_BASE(mid) = virt_to_phys(hose->sg_isa->ptes) >> 8; in mcpcia_startup_hose() 379 *(vuip)MCPCIA_T1_BASE(mid) = virt_to_phys(hose->sg_pci->ptes) >> 8; in mcpcia_startup_hose()
|
D | core_apecs.c | 359 *(vuip)APECS_IOC_TB2R = virt_to_phys(hose->sg_isa->ptes) >> 1; in apecs_init_arch()
|
D | core_wildfire.c | 120 pci->pci_window[0].tbase.csr = virt_to_phys(hose->sg_isa->ptes); in wildfire_init_hose() 132 pci->pci_window[3].tbase.csr = virt_to_phys(hose->sg_pci->ptes); in wildfire_init_hose()
|
D | core_lca.c | 284 *(vulp)LCA_IOC_T_BASE0 = virt_to_phys(hose->sg_isa->ptes); in lca_init_arch()
|
D | core_t2.c | 360 *(vulp)T2_TBASE2 = virt_to_phys(hose->sg_isa->ptes) >> 1; in t2_sg_map_window2()
|
/arch/powerpc/platforms/pseries/ |
D | lpar.c | 228 } ptes[4]; in pSeries_lpar_hptab_clear() local 237 lpar_rc = plpar_pte_read_4_raw(0, i, (void *)ptes); in pSeries_lpar_hptab_clear() 241 if ((ptes[j].pteh & HPTE_V_VRMA_MASK) == in pSeries_lpar_hptab_clear() 244 if (ptes[j].pteh & HPTE_V_VALID) in pSeries_lpar_hptab_clear() 246 &(ptes[j].pteh), &(ptes[j].ptel)); in pSeries_lpar_hptab_clear()
|
/arch/powerpc/include/asm/ |
D | plpar_wrappers.h | 209 unsigned long *ptes) in plpar_pte_read_4_raw() argument 217 memcpy(ptes, retbuf, 8*sizeof(unsigned long)); in plpar_pte_read_4_raw()
|
/arch/x86/kvm/ |
D | paging_tmpl.h | 98 pt_element_t ptes[PT_MAX_FULL_LEVELS]; member 223 pte = orig_pte = walker->ptes[level - 1]; in FNAME() 260 walker->ptes[level - 1] = pte; in FNAME() 365 walker->ptes[walker->level - 1] = pte; in FNAME() 521 return r || curr_pte != gw->ptes[level - 1]; in FNAME()
|
/arch/powerpc/mm/ |
D | hugetlbpage.c | 444 void *ptes[0]; member 456 kmem_cache_free(hugepte_cache, batch->ptes[i]); in hugepd_free_rcu_callback() 480 (*batchp)->ptes[(*batchp)->index++] = hugepte; in hugepd_free()
|