Lines Matching refs:gpa
472 u64 gpa = gfn << PAGE_SHIFT; in mark_mmio_spte() local
476 mask |= gpa | shadow_nonpresent_or_rsvd_mask; in mark_mmio_spte()
477 mask |= (gpa & shadow_nonpresent_or_rsvd_mask) in mark_mmio_spte()
486 u64 gpa = spte & shadow_nonpresent_or_rsvd_lower_gfn_mask; in get_mmio_spte_gfn() local
488 gpa |= (spte >> shadow_nonpresent_or_rsvd_mask_len) in get_mmio_spte_gfn()
491 return gpa >> PAGE_SHIFT; in get_mmio_spte_gfn()
3330 static int __direct_map(struct kvm_vcpu *vcpu, gpa_t gpa, int write, in __direct_map() argument
3337 gfn_t gfn = gpa >> PAGE_SHIFT; in __direct_map()
3343 trace_kvm_mmu_spte_requested(gpa, level, pfn); in __direct_map()
3344 for_each_shadow_entry(vcpu, gpa, it) { in __direct_map()
3651 static int nonpaging_map(struct kvm_vcpu *vcpu, gpa_t gpa, u32 error_code, in nonpaging_map() argument
3677 if (fast_page_fault(vcpu, gpa, level, error_code)) in nonpaging_map()
3683 if (try_async_pf(vcpu, prefault, gfn, gpa, &pfn, write, &map_writable)) in nonpaging_map()
3686 if (handle_abnormal_pfn(vcpu, gpa, gfn, pfn, ACC_ALL, &r)) in nonpaging_map()
3697 r = __direct_map(vcpu, gpa, write, map_writable, level, pfn, in nonpaging_map()
4163 static int nonpaging_page_fault(struct kvm_vcpu *vcpu, gpa_t gpa, in nonpaging_page_fault() argument
4166 gfn_t gfn = gpa >> PAGE_SHIFT; in nonpaging_page_fault()
4170 pgprintk("%s: gva %lx error %x\n", __func__, gpa, error_code); in nonpaging_page_fault()
4182 return nonpaging_map(vcpu, gpa & PAGE_MASK, in nonpaging_page_fault()
4283 static int tdp_page_fault(struct kvm_vcpu *vcpu, gpa_t gpa, u32 error_code, in tdp_page_fault() argument
4290 gfn_t gfn = gpa >> PAGE_SHIFT; in tdp_page_fault()
4317 if (fast_page_fault(vcpu, gpa, level, error_code)) in tdp_page_fault()
4323 if (try_async_pf(vcpu, prefault, gfn, gpa, &pfn, write, &map_writable)) in tdp_page_fault()
4337 r = __direct_map(vcpu, gpa, write, map_writable, level, pfn, in tdp_page_fault()
5324 static u64 mmu_pte_write_fetch_gpte(struct kvm_vcpu *vcpu, gpa_t *gpa, in mmu_pte_write_fetch_gpte() argument
5337 *gpa &= ~(gpa_t)7; in mmu_pte_write_fetch_gpte()
5342 r = kvm_vcpu_read_guest_atomic(vcpu, *gpa, &gentry, *bytes); in mmu_pte_write_fetch_gpte()
5371 static bool detect_write_misaligned(struct kvm_mmu_page *sp, gpa_t gpa, in detect_write_misaligned() argument
5377 gpa, bytes, sp->role.word); in detect_write_misaligned()
5379 offset = offset_in_page(gpa); in detect_write_misaligned()
5395 static u64 *get_written_sptes(struct kvm_mmu_page *sp, gpa_t gpa, int *nspte) in get_written_sptes() argument
5401 page_offset = offset_in_page(gpa); in get_written_sptes()
5426 static void kvm_mmu_pte_write(struct kvm_vcpu *vcpu, gpa_t gpa, in kvm_mmu_pte_write() argument
5430 gfn_t gfn = gpa >> PAGE_SHIFT; in kvm_mmu_pte_write()
5446 pgprintk("%s: gpa %llx bytes %d\n", __func__, gpa, bytes); in kvm_mmu_pte_write()
5457 gentry = mmu_pte_write_fetch_gpte(vcpu, &gpa, &bytes); in kvm_mmu_pte_write()
5463 if (detect_write_misaligned(sp, gpa, bytes) || in kvm_mmu_pte_write()
5470 spte = get_written_sptes(sp, gpa, &npte); in kvm_mmu_pte_write()
5492 gpa_t gpa; in kvm_mmu_unprotect_page_virt() local
5498 gpa = kvm_mmu_gva_to_gpa_read(vcpu, gva, NULL); in kvm_mmu_unprotect_page_virt()
5500 r = kvm_mmu_unprotect_page(vcpu->kvm, gpa >> PAGE_SHIFT); in kvm_mmu_unprotect_page_virt()