Home
last modified time | relevance | path

Searched refs:efer (Results 1 – 19 of 19) sorted by relevance

/arch/x86/include/asm/
Dvirtext.h119 uint64_t efer; in cpu_svm_disable() local
122 rdmsrl(MSR_EFER, efer); in cpu_svm_disable()
123 if (efer & EFER_SVME) { in cpu_svm_disable()
136 wrmsrl(MSR_EFER, efer & ~EFER_SVME); in cpu_svm_disable()
Dsuspend_64.h44 unsigned long efer; member
Drealmode.h51 u64 efer;
Dsvm.h244 u64 efer; member
Dkvm_host.h634 u64 efer; member
1341 int (*set_efer)(struct kvm_vcpu *vcpu, u64 efer);
1679 bool kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer);
/arch/x86/realmode/
Dinit.c100 u64 efer; in setup_real_mode() local
148 rdmsrl(MSR_EFER, efer); in setup_real_mode()
149 trampoline_header->efer = efer & ~EFER_LMA; in setup_real_mode()
/arch/x86/kvm/svm/
Dnested.c108 svm->vmcb01.ptr->save.efer, in nested_svm_init_mmu_context()
271 if ((save->efer & EFER_LME) && (save->cr0 & X86_CR0_PG)) { in nested_vmcb_check_cr3_cr4()
296 if (CC(!(save->efer & EFER_SVME))) in nested_vmcb_valid_sregs()
309 if (CC(!kvm_valid_efer(vcpu, save->efer))) in nested_vmcb_valid_sregs()
484 svm_set_efer(&svm->vcpu, vmcb12->save.efer | EFER_SVME); in nested_vmcb02_prepare_save()
676 svm->vmcb01.ptr->save.efer = vcpu->arch.efer; in nested_svm_vmrun()
720 to_save->efer = from_save->efer; in svm_copy_vmrun_state()
781 vmcb12->save.efer = svm->vcpu.arch.efer; in nested_svm_vmexit()
839 svm_set_efer(vcpu, svm->vmcb->save.efer); in nested_svm_vmexit()
1070 if (!(vcpu->arch.efer & EFER_SVME) || !is_paging(vcpu)) { in nested_svm_check_permissions()
[all …]
Dsvm.c268 int svm_set_efer(struct kvm_vcpu *vcpu, u64 efer) in svm_set_efer() argument
271 u64 old_efer = vcpu->arch.efer; in svm_set_efer()
272 vcpu->arch.efer = efer; in svm_set_efer()
276 efer |= EFER_NX; in svm_set_efer()
278 if (!(efer & EFER_LMA)) in svm_set_efer()
279 efer &= ~EFER_LME; in svm_set_efer()
282 if ((old_efer & EFER_SVME) != (efer & EFER_SVME)) { in svm_set_efer()
283 if (!(efer & EFER_SVME)) { in svm_set_efer()
302 vcpu->arch.efer = old_efer; in svm_set_efer()
315 svm->vmcb->save.efer = efer | EFER_SVME; in svm_set_efer()
[all …]
Dsvm.h423 int svm_set_efer(struct kvm_vcpu *vcpu, u64 efer);
/arch/x86/power/
Dcpu.c116 rdmsrl(MSR_EFER, ctxt->efer); in __save_processor_state()
209 wrmsrl(MSR_EFER, ctxt->efer); in __restore_processor_state()
/arch/x86/include/uapi/asm/
Dkvm.h157 __u64 efer; member
168 __u64 efer; member
/arch/x86/kvm/
Demulate.c816 u64 efer; in emulator_recalc_and_set_mode() local
821 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in emulator_recalc_and_set_mode()
825 if (efer & EFER_LMA) in emulator_recalc_and_set_mode()
833 if (efer & EFER_LMA) in emulator_recalc_and_set_mode()
842 if (efer & EFER_LMA) { in emulator_recalc_and_set_mode()
1542 u64 efer = 0; in get_descriptor_ptr() local
1544 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in get_descriptor_ptr()
1545 if (!(efer & EFER_LMA)) in get_descriptor_ptr()
1688 u64 efer = 0; in __load_segment_descriptor() local
1690 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in __load_segment_descriptor()
[all …]
Dmmu.h74 unsigned long cr4, u64 efer, gpa_t nested_cr3);
Dx86.c926 if ((vcpu->arch.efer & EFER_LME) && !is_paging(vcpu) && in kvm_set_cr0()
937 if (!(vcpu->arch.efer & EFER_LME) && (cr0 & X86_CR0_PG) && in kvm_set_cr0()
1602 static bool __kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer) in __kvm_valid_efer() argument
1604 if (efer & EFER_FFXSR && !guest_cpuid_has(vcpu, X86_FEATURE_FXSR_OPT)) in __kvm_valid_efer()
1607 if (efer & EFER_SVME && !guest_cpuid_has(vcpu, X86_FEATURE_SVM)) in __kvm_valid_efer()
1610 if (efer & (EFER_LME | EFER_LMA) && in __kvm_valid_efer()
1614 if (efer & EFER_NX && !guest_cpuid_has(vcpu, X86_FEATURE_NX)) in __kvm_valid_efer()
1620 bool kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer) in kvm_valid_efer() argument
1622 if (efer & efer_reserved_bits) in kvm_valid_efer()
1625 return __kvm_valid_efer(vcpu, efer); in kvm_valid_efer()
[all …]
Dx86.h147 return vcpu->arch.efer & EFER_LMA; in is_long_mode()
/arch/x86/kvm/vmx/
Dvmx.c993 u64 guest_efer = vmx->vcpu.arch.efer; in update_transition_efer()
1018 (enable_ept && ((vmx->vcpu.arch.efer ^ host_efer) & EFER_NX))) { in update_transition_efer()
1750 (vmx->vcpu.arch.efer & EFER_SCE); in vmx_setup_uret_msrs()
2947 int vmx_set_efer(struct kvm_vcpu *vcpu, u64 efer) in vmx_set_efer() argument
2956 vcpu->arch.efer = efer; in vmx_set_efer()
2957 if (efer & EFER_LMA) { in vmx_set_efer()
2959 msr->data = efer; in vmx_set_efer()
2963 msr->data = efer & ~EFER_LME; in vmx_set_efer()
2985 vmx_set_efer(vcpu, vcpu->arch.efer | EFER_LMA); in enter_lmode()
2991 vmx_set_efer(vcpu, vcpu->arch.efer & ~EFER_LMA); in exit_lmode()
[all …]
Dvmx.h379 int vmx_set_efer(struct kvm_vcpu *vcpu, u64 efer);
Dnested.c2124 return vmx->vcpu.arch.efer | (EFER_LMA | EFER_LME); in nested_vmx_calc_efer()
2126 return vmx->vcpu.arch.efer & ~(EFER_LMA | EFER_LME); in nested_vmx_calc_efer()
2572 vcpu->arch.efer = nested_vmx_calc_efer(vmx, vmcs12); in prepare_vmcs02()
2574 vmx_set_efer(vcpu, vcpu->arch.efer); in prepare_vmcs02()
2870 !!(vcpu->arch.efer & EFER_LMA))) in nested_vmx_check_address_space_size()
4215 vmcs12->guest_ia32_efer = vcpu->arch.efer; in sync_vmcs02_to_vmcs12()
4292 vcpu->arch.efer = vmcs12->host_ia32_efer; in load_vmcs12_host_state()
4294 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in load_vmcs12_host_state()
4296 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in load_vmcs12_host_state()
4297 vmx_set_efer(vcpu, vcpu->arch.efer); in load_vmcs12_host_state()
/arch/x86/kvm/mmu/
Dmmu.c195 const u64 efer; member
219 BUILD_MMU_ROLE_REGS_ACCESSOR(efer, nx, EFER_NX);
220 BUILD_MMU_ROLE_REGS_ACCESSOR(efer, lma, EFER_LMA);
241 BUILD_MMU_ROLE_ACCESSOR(base, efer, nx);
248 .efer = vcpu->arch.efer, in vcpu_to_role_regs()
4866 unsigned long cr4, u64 efer, gpa_t nested_cr3) in kvm_init_shadow_npt_mmu() argument
4872 .efer = efer, in kvm_init_shadow_npt_mmu()