Searched refs:EFER_LME (Results 1 – 5 of 5) sorted by relevance
32 #define EFER_LME (1<<_EFER_LME) macro
2063 ignore_bits |= EFER_LMA | EFER_LME; in update_transition_efer()2079 guest_efer &= ~EFER_LME; in update_transition_efer()3914 msr->data = efer & ~EFER_LME; in vmx_set_efer()4072 if (vcpu->arch.efer & EFER_LME) { in vmx_set_cr0()10208 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in prepare_vmcs02()10210 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in prepare_vmcs02()10367 ia32e != !!(vmcs12->guest_ia32_efer & EFER_LME))) { in nested_vmx_run()10385 ia32e != !!(vmcs12->host_ia32_efer & EFER_LME)) { in nested_vmx_run()10747 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in load_vmcs12_host_state()10749 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in load_vmcs12_host_state()
585 efer &= ~EFER_LME; in svm_set_efer()1968 if (vcpu->arch.efer & EFER_LME) { in svm_set_cr0()1971 svm->vmcb->save.efer |= EFER_LMA | EFER_LME; in svm_set_cr0()1976 svm->vmcb->save.efer &= ~(EFER_LMA | EFER_LME); in svm_set_cr0()
86 u64 __read_mostly efer_reserved_bits = ~((u64)(EFER_SCE | EFER_LME | EFER_LMA));625 if ((vcpu->arch.efer & EFER_LME)) { in kvm_set_cr0()1042 && (vcpu->arch.efer & EFER_LME) != (efer & EFER_LME)) in set_efer()
4106 if ((new_val & X86_CR0_PG) && (efer & EFER_LME) && in check_cr_write()