Home
last modified time | relevance | path

Searched refs:gva_to_gpa (Results 1 – 7 of 7) sorted by relevance

/arch/mips/kvm/
Demulate.c993 run->mmio.phys_addr = kvm_mips_callbacks->gva_to_gpa( in kvm_mips_emulate_store()
1038 run->mmio.phys_addr = kvm_mips_callbacks->gva_to_gpa( in kvm_mips_emulate_store()
1068 run->mmio.phys_addr = kvm_mips_callbacks->gva_to_gpa( in kvm_mips_emulate_store()
1099 run->mmio.phys_addr = kvm_mips_callbacks->gva_to_gpa( in kvm_mips_emulate_store()
1146 run->mmio.phys_addr = kvm_mips_callbacks->gva_to_gpa( in kvm_mips_emulate_store()
1297 run->mmio.phys_addr = kvm_mips_callbacks->gva_to_gpa( in kvm_mips_emulate_load()
1332 run->mmio.phys_addr = kvm_mips_callbacks->gva_to_gpa( in kvm_mips_emulate_load()
1356 run->mmio.phys_addr = kvm_mips_callbacks->gva_to_gpa( in kvm_mips_emulate_load()
1381 run->mmio.phys_addr = kvm_mips_callbacks->gva_to_gpa( in kvm_mips_emulate_load()
1417 run->mmio.phys_addr = kvm_mips_callbacks->gva_to_gpa( in kvm_mips_emulate_load()
Dvz.c3284 .gva_to_gpa = kvm_vz_gva_to_gpa_cb,
/arch/x86/kvm/mmu/
Dmmu.c4097 context->gva_to_gpa = nonpaging_gva_to_gpa; in nonpaging_init_context()
4656 context->gva_to_gpa = paging64_gva_to_gpa; in paging64_init_context()
4665 context->gva_to_gpa = paging32_gva_to_gpa; in paging32_init_context()
4765 context->gva_to_gpa = nonpaging_gva_to_gpa; in init_kvm_tdp_mmu()
4767 context->gva_to_gpa = paging64_gva_to_gpa; in init_kvm_tdp_mmu()
4769 context->gva_to_gpa = paging32_gva_to_gpa; in init_kvm_tdp_mmu()
4916 context->gva_to_gpa = ept_gva_to_gpa; in kvm_init_shadow_ept_mmu()
4988 g_context->gva_to_gpa = nonpaging_gva_to_gpa_nested; in init_kvm_nested_mmu()
4990 g_context->gva_to_gpa = paging64_gva_to_gpa_nested; in init_kvm_nested_mmu()
4992 g_context->gva_to_gpa = paging64_gva_to_gpa_nested; in init_kvm_nested_mmu()
[all …]
Dpaging_tmpl.h1017 static gpa_t FNAME(gva_to_gpa)(struct kvm_vcpu *vcpu, gpa_t addr, u32 access, in FNAME() argument
/arch/mips/include/asm/
Dkvm_host.h739 gpa_t (*gva_to_gpa)(gva_t gva); member
/arch/x86/kvm/
Dx86.c6580 t_gpa = vcpu->arch.mmu->gva_to_gpa(vcpu, gpa, access, exception); in translate_nested_gpa()
6589 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, access, exception); in kvm_mmu_gva_to_gpa_read()
6598 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, access, exception); in kvm_mmu_gva_to_gpa_fetch()
6606 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, access, exception); in kvm_mmu_gva_to_gpa_write()
6614 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, 0, exception); in kvm_mmu_gva_to_gpa_system()
6625 gpa_t gpa = vcpu->arch.walk_mmu->gva_to_gpa(vcpu, addr, access, in kvm_read_guest_virt_helper()
6659 gpa_t gpa = vcpu->arch.walk_mmu->gva_to_gpa(vcpu, addr, access|PFERR_FETCH_MASK, in kvm_fetch_guest_virt()
6723 gpa_t gpa = vcpu->arch.walk_mmu->gva_to_gpa(vcpu, addr, in kvm_write_guest_virt_helper()
6829 *gpa = vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, access, exception); in vcpu_mmio_gva_to_gpa()
12513 vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, access, &fault) != UNMAPPED_GVA) { in kvm_fixup_and_inject_pf_error()
/arch/x86/include/asm/
Dkvm_host.h424 gpa_t (*gva_to_gpa)(struct kvm_vcpu *vcpu, gpa_t gva_or_gpa, member