Home
last modified time | relevance | path

Searched refs:EFER_LMA (Results 1 – 9 of 9) sorted by relevance

/arch/x86/realmode/
Dinit.c149 trampoline_header->efer = efer & ~EFER_LMA; in setup_real_mode()
/arch/x86/kvm/
Dx86.h147 return vcpu->arch.efer & EFER_LMA; in is_long_mode()
Demulate.c825 if (efer & EFER_LMA) in emulator_recalc_and_set_mode()
833 if (efer & EFER_LMA) in emulator_recalc_and_set_mode()
842 if (efer & EFER_LMA) { in emulator_recalc_and_set_mode()
1545 if (!(efer & EFER_LMA)) in get_descriptor_ptr()
1691 if (efer & EFER_LMA) in __load_segment_descriptor()
2527 if (ctxt->ops->set_msr(ctxt, MSR_EFER, val & ~EFER_LMA)) in rsm_load_state_64()
2753 if (efer & EFER_LMA) { in em_syscall()
2761 if (efer & EFER_LMA) { in em_syscall()
2803 if ((ctxt->mode != X86EMUL_MODE_PROT64) && (efer & EFER_LMA) in em_sysenter()
2819 if (efer & EFER_LMA) { in em_sysenter()
[all …]
Dx86.c100 u64 __read_mostly efer_reserved_bits = ~((u64)(EFER_SCE | EFER_LME | EFER_LMA));
1610 if (efer & (EFER_LME | EFER_LMA) && in __kvm_valid_efer()
1647 efer &= ~EFER_LMA; in set_efer()
1648 efer |= vcpu->arch.efer & EFER_LMA; in set_efer()
10602 if (!(sregs->cr4 & X86_CR4_PAE) || !(sregs->efer & EFER_LMA)) in kvm_is_valid_sregs()
10611 if (sregs->efer & EFER_LMA || sregs->cs.l) in kvm_is_valid_sregs()
10720 !(sregs2->efer & EFER_LMA); in __set_sregs2()
/arch/x86/include/asm/
Dmsr-index.h36 #define EFER_LMA (1<<_EFER_LMA) macro
/arch/x86/kvm/vmx/
Dnested.c2124 return vmx->vcpu.arch.efer | (EFER_LMA | EFER_LME); in nested_vmx_calc_efer()
2126 return vmx->vcpu.arch.efer & ~(EFER_LMA | EFER_LME); in nested_vmx_calc_efer()
2326 if (guest_efer & EFER_LMA) in prepare_vmcs02_early()
2870 !!(vcpu->arch.efer & EFER_LMA))) in nested_vmx_check_address_space_size()
2943 CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LMA)) || in nested_vmx_check_host_state()
3039 CC(ia32e != !!(vmcs12->guest_ia32_efer & EFER_LMA)) || in nested_vmx_check_guest_state()
4294 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in load_vmcs12_host_state()
4296 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in load_vmcs12_host_state()
Dvmx.c1006 ignore_bits |= EFER_LMA | EFER_LME; in update_transition_efer()
1008 if (guest_efer & EFER_LMA) in update_transition_efer()
1019 if (!(guest_efer & EFER_LMA)) in update_transition_efer()
2957 if (efer & EFER_LMA) { in vmx_set_efer()
2985 vmx_set_efer(vcpu, vcpu->arch.efer | EFER_LMA); in enter_lmode()
2991 vmx_set_efer(vcpu, vcpu->arch.efer & ~EFER_LMA); in exit_lmode()
5883 vcpu->arch.efer | (EFER_LMA | EFER_LME)); in dump_vmcs()
5886 vcpu->arch.efer & ~(EFER_LMA | EFER_LME)); in dump_vmcs()
/arch/x86/kvm/svm/
Dsvm.c278 if (!(efer & EFER_LMA)) in svm_set_efer()
1755 vcpu->arch.efer |= EFER_LMA; in svm_set_cr0()
1757 svm->vmcb->save.efer |= EFER_LMA | EFER_LME; in svm_set_cr0()
1761 vcpu->arch.efer &= ~EFER_LMA; in svm_set_cr0()
1763 svm->vmcb->save.efer &= ~(EFER_LMA | EFER_LME); in svm_set_cr0()
/arch/x86/kvm/mmu/
Dmmu.c220 BUILD_MMU_ROLE_REGS_ACCESSOR(efer, lma, EFER_LMA);