Searched refs:EFER_LMA (Results 1 – 6 of 6) sorted by relevance
44 return vcpu->arch.efer & EFER_LMA; in is_long_mode()
1323 ignore_bits |= EFER_LMA | EFER_LME; in update_transition_efer()1325 if (guest_efer & EFER_LMA) in update_transition_efer()1337 if (!(guest_efer & EFER_LMA)) in update_transition_efer()2821 if (efer & EFER_LMA) { in vmx_set_efer()2852 vmx_set_efer(vcpu, vcpu->arch.efer | EFER_LMA); in enter_lmode()2860 vmx_set_efer(vcpu, vcpu->arch.efer & ~EFER_LMA); in exit_lmode()6659 vcpu->arch.efer |= (EFER_LMA | EFER_LME);6661 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME);6956 vcpu->arch.efer |= (EFER_LMA | EFER_LME);6958 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME);
1996 if (efer & EFER_LMA) { in em_syscall()2004 if (efer & EFER_LMA) { in em_syscall()2044 if ((ctxt->mode == X86EMUL_MODE_PROT32) && (efer & EFER_LMA) in em_sysenter()2073 if (ctxt->mode == X86EMUL_MODE_PROT64 || (efer & EFER_LMA)) { in em_sysenter()3156 if (efer & EFER_LMA) in check_cr_write()3171 if ((efer & EFER_LMA) && !(new_val & X86_CR4_PAE)) in check_cr_write()
452 if (!npt_enabled && !(efer & EFER_LMA)) in svm_set_efer()1564 vcpu->arch.efer |= EFER_LMA; in svm_set_cr0()1565 svm->vmcb->save.efer |= EFER_LMA | EFER_LME; in svm_set_cr0()1569 vcpu->arch.efer &= ~EFER_LMA; in svm_set_cr0()1570 svm->vmcb->save.efer &= ~(EFER_LMA | EFER_LME); in svm_set_cr0()
78 u64 __read_mostly efer_reserved_bits = ~((u64)(EFER_SCE | EFER_LME | EFER_LMA));841 efer &= ~EFER_LMA; in set_efer()842 efer |= vcpu->arch.efer & EFER_LMA; in set_efer()
28 #define EFER_LMA (1<<_EFER_LMA) macro