Home
last modified time | relevance | path

Searched refs:walk_mmu (Results 1 – 8 of 8) sorted by relevance

/arch/x86/kvm/
Dkvm_cache_regs.h45 return vcpu->arch.walk_mmu->pdptrs[index]; in kvm_pdptr_read()
Dx86.h62 return vcpu->arch.walk_mmu == &vcpu->arch.nested_mmu; in mmu_is_nested()
Dx86.c485 return kvm_read_guest_page_mmu(vcpu, vcpu->arch.walk_mmu, gfn, in kvm_read_nested_guest_page()
529 u64 pdpte[ARRAY_SIZE(vcpu->arch.walk_mmu->pdptrs)]; in pdptrs_changed()
548 changed = memcmp(pdpte, vcpu->arch.walk_mmu->pdptrs, sizeof(pdpte)) != 0; in pdptrs_changed()
587 if (is_pae(vcpu) && !load_pdptrs(vcpu, vcpu->arch.walk_mmu, in kvm_set_cr0()
703 && !load_pdptrs(vcpu, vcpu->arch.walk_mmu, in kvm_set_cr4()
742 !load_pdptrs(vcpu, vcpu->arch.walk_mmu, cr3)) in kvm_set_cr3()
4190 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, access, exception); in kvm_mmu_gva_to_gpa_read()
4198 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, access, exception); in kvm_mmu_gva_to_gpa_fetch()
4206 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, access, exception); in kvm_mmu_gva_to_gpa_write()
4213 return vcpu->arch.walk_mmu->gva_to_gpa(vcpu, gva, 0, exception); in kvm_mmu_gva_to_gpa_system()
[all …]
Dmmu.c3722 struct kvm_mmu *context = vcpu->arch.walk_mmu; in init_kvm_tdp_mmu()
3813 kvm_init_shadow_mmu(vcpu, vcpu->arch.walk_mmu); in init_kvm_softmmu()
3814 vcpu->arch.walk_mmu->set_cr3 = kvm_x86_ops->set_cr3; in init_kvm_softmmu()
3815 vcpu->arch.walk_mmu->get_cr3 = get_cr3; in init_kvm_softmmu()
3816 vcpu->arch.walk_mmu->get_pdptr = kvm_pdptr_read; in init_kvm_softmmu()
3817 vcpu->arch.walk_mmu->inject_page_fault = kvm_inject_page_fault; in init_kvm_softmmu()
4255 vcpu->arch.walk_mmu = &vcpu->arch.mmu; in kvm_mmu_create()
Dpaging_tmpl.h439 walker->fault.nested_page_fault = mmu != vcpu->arch.walk_mmu; in FNAME()
Dvmx.c3425 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in ept_load_pdptrs()
3441 struct kvm_mmu *mmu = vcpu->arch.walk_mmu; in ept_save_pdptrs()
8025 vcpu->arch.walk_mmu = &vcpu->arch.nested_mmu; in nested_ept_init_mmu_context()
8030 vcpu->arch.walk_mmu = &vcpu->arch.mmu; in nested_ept_uninit_mmu_context()
8392 vcpu->arch.walk_mmu->inject_page_fault = vmx_inject_page_fault_nested; in prepare_vmcs02()
8929 vcpu->arch.walk_mmu->inject_page_fault = kvm_inject_page_fault; in load_vmcs12_host_state()
Dsvm.c1365 load_pdptrs(vcpu, vcpu->arch.walk_mmu, kvm_read_cr3(vcpu)); in svm_cache_reg()
2022 vcpu->arch.walk_mmu = &vcpu->arch.nested_mmu; in nested_svm_init_mmu_context()
2027 vcpu->arch.walk_mmu = &vcpu->arch.mmu; in nested_svm_uninit_mmu_context()
/arch/x86/include/asm/
Dkvm_host.h389 struct kvm_mmu *walk_mmu; member