Searched refs:vpage (Results 1 – 12 of 12) sorted by relevance
/arch/nios2/mm/ |
D | init.c | 94 unsigned long vpage; in alloc_kuser_page() local 96 vpage = get_zeroed_page(GFP_ATOMIC); in alloc_kuser_page() 97 if (!vpage) in alloc_kuser_page() 101 memcpy((void *)vpage, __kuser_helper_start, kuser_sz); in alloc_kuser_page() 103 flush_icache_range(vpage, vpage + KUSER_SIZE); in alloc_kuser_page() 104 kuser_page[0] = virt_to_page(vpage); in alloc_kuser_page()
|
/arch/powerpc/kvm/ |
D | trace_pr.h | 42 __field( unsigned long long, vpage ) 52 __entry->vpage = orig_pte->vpage; 58 __entry->hpteg, __entry->va, __entry->vpage, __entry->hpaddr) 71 __field( u64, vpage ) 80 __entry->vpage = pte->pte.vpage; 89 __entry->vpage, __entry->raddr, __entry->flags) 100 __field( u64, vpage ) 109 __entry->vpage = pte->pte.vpage; 118 __entry->vpage, __entry->raddr, __entry->flags)
|
D | book3s_mmu_hpte.c | 37 static inline u64 kvmppc_mmu_hash_vpte(u64 vpage) in kvmppc_mmu_hash_vpte() argument 39 return hash_64(vpage & 0xfffffffffULL, HPTEG_HASH_BITS_VPTE); in kvmppc_mmu_hash_vpte() 42 static inline u64 kvmppc_mmu_hash_vpte_long(u64 vpage) in kvmppc_mmu_hash_vpte_long() argument 44 return hash_64((vpage & 0xffffff000ULL) >> 12, in kvmppc_mmu_hash_vpte_long() 49 static inline u64 kvmppc_mmu_hash_vpte_64k(u64 vpage) in kvmppc_mmu_hash_vpte_64k() argument 51 return hash_64((vpage & 0xffffffff0ULL) >> 4, in kvmppc_mmu_hash_vpte_64k() 75 index = kvmppc_mmu_hash_vpte(pte->pte.vpage); in kvmppc_mmu_hpte_cache_map() 79 index = kvmppc_mmu_hash_vpte_long(pte->pte.vpage); in kvmppc_mmu_hpte_cache_map() 85 index = kvmppc_mmu_hash_vpte_64k(pte->pte.vpage); in kvmppc_mmu_hpte_cache_map() 225 if ((pte->pte.vpage & vp_mask) == guest_vp) in kvmppc_mmu_pte_vflush_short() [all …]
|
D | book3s_32_mmu.c | 87 return pte.vpage; in kvmppc_mmu_book3s_32_ea_to_vp() 165 pte->vpage = (((u64)eaddr >> 12) & 0xffff) | vsid; in kvmppc_mmu_book3s_32_xlate_bat() 204 pte->vpage = kvmppc_mmu_book3s_32_ea_to_vp(vcpu, eaddr, data); in kvmppc_mmu_book3s_32_xlate_pte() 309 pte->vpage = kvmppc_mmu_book3s_32_ea_to_vp(vcpu, eaddr, data); in kvmppc_mmu_book3s_32_xlate()
|
D | book3s_32_mmu_host.c | 244 orig_pte->vpage, hpaddr); in kvmppc_mmu_map_page() 260 kvmppc_mmu_pte_vflush(vcpu, pte->vpage, 0xfffffffffULL); in kvmppc_mmu_unmap_page()
|
D | book3s_64_mmu.c | 228 gpte->vpage = kvmppc_mmu_book3s_64_ea_to_vp(vcpu, eaddr, data); in kvmppc_mmu_book3s_64_xlate() 309 gpte->vpage = kvmppc_mmu_book3s_64_ea_to_vp(vcpu, eaddr, data); in kvmppc_mmu_book3s_64_xlate() 339 eaddr, avpn, gpte->vpage, gpte->raddr); in kvmppc_mmu_book3s_64_xlate()
|
D | book3s_pr.c | 693 pte.vpage = eaddr >> 12; in kvmppc_handle_pagefault() 700 pte.vpage |= ((u64)VSID_REAL << (SID_SHIFT - 12)); in kvmppc_handle_pagefault() 712 pte.vpage |= ((u64)VSID_REAL_DR << (SID_SHIFT - 12)); in kvmppc_handle_pagefault() 714 pte.vpage |= ((u64)VSID_REAL_IR << (SID_SHIFT - 12)); in kvmppc_handle_pagefault() 715 pte.vpage |= vsid; in kvmppc_handle_pagefault()
|
D | book3s_64_mmu_host.c | 220 kvmppc_mmu_pte_vflush(vcpu, pte->vpage, mask); in kvmppc_mmu_unmap_page()
|
D | book3s.c | 465 pte->vpage = VSID_REAL | eaddr >> 12; in kvmppc_xlate()
|
D | booke.c | 1975 pte->vpage = eaddr >> PAGE_SHIFT; in kvmppc_xlate() 2004 pte->vpage = eaddr >> PAGE_SHIFT; in kvmppc_xlate()
|
D | book3s_64_mmu_hv.c | 384 gpte->vpage = ((v & HPTE_V_AVPN) << 4) | ((eaddr >> 12) & 0xfff); in kvmppc_mmu_book3s_64_hv_xlate()
|
/arch/powerpc/include/asm/ |
D | kvm_host.h | 380 u64 vpage; member
|