Home
last modified time | relevance | path

Searched refs:efer (Results 1 – 13 of 13) sorted by relevance

/arch/x86/include/asm/
Dvirtext.h116 uint64_t efer; in cpu_svm_disable() local
119 rdmsrl(MSR_EFER, efer); in cpu_svm_disable()
120 wrmsrl(MSR_EFER, efer & ~EFER_SVME); in cpu_svm_disable()
Dsuspend_64.h27 unsigned long efer; member
Dkvm.h129 __u64 efer; member
Dsvm.h149 u64 efer; member
Dkvm_host.h337 u64 efer; member
624 void (*set_efer)(struct kvm_vcpu *vcpu, u64 efer);
/arch/x86/power/
Dcpu.c95 rdmsrl(MSR_EFER, ctxt->efer); in __save_processor_state()
172 wrmsrl(MSR_EFER, ctxt->efer); in __restore_processor_state()
/arch/x86/kvm/
Dx86.h44 return vcpu->arch.efer & EFER_LMA; in is_long_mode()
Dsvm.c449 static void svm_set_efer(struct kvm_vcpu *vcpu, u64 efer) in svm_set_efer() argument
451 vcpu->arch.efer = efer; in svm_set_efer()
452 if (!npt_enabled && !(efer & EFER_LMA)) in svm_set_efer()
453 efer &= ~EFER_LME; in svm_set_efer()
455 to_svm(vcpu)->vmcb->save.efer = efer | EFER_SVME; in svm_set_efer()
616 uint64_t efer; in svm_hardware_enable() local
621 rdmsrl(MSR_EFER, efer); in svm_hardware_enable()
622 if (efer & EFER_SVME) in svm_hardware_enable()
646 wrmsrl(MSR_EFER, efer | EFER_SVME); in svm_hardware_enable()
1562 if (vcpu->arch.efer & EFER_LME) { in svm_set_cr0()
[all …]
Dcpuid.c54 unsigned long long efer = 0; in is_efer_nx() local
56 rdmsrl_safe(MSR_EFER, &efer); in is_efer_nx()
57 return efer & EFER_NX; in is_efer_nx()
Demulate.c1975 u64 efer = 0; in em_syscall() local
1985 ops->get_msr(ctxt, MSR_EFER, &efer); in em_syscall()
1988 if (!(efer & EFER_SCE)) in em_syscall()
1996 if (efer & EFER_LMA) { in em_syscall()
2004 if (efer & EFER_LMA) { in em_syscall()
2033 u64 efer = 0; in em_sysenter() local
2035 ops->get_msr(ctxt, MSR_EFER, &efer); in em_sysenter()
2044 if ((ctxt->mode == X86EMUL_MODE_PROT32) && (efer & EFER_LMA) in em_sysenter()
2073 if (ctxt->mode == X86EMUL_MODE_PROT64 || (efer & EFER_LMA)) { in em_sysenter()
3120 u64 efer = 0; in check_cr_write() local
[all …]
Dvmx.c1315 guest_efer = vmx->vcpu.arch.efer; in update_transition_efer()
1335 if (enable_ept && ((vmx->vcpu.arch.efer ^ host_efer) & EFER_NX)) { in update_transition_efer()
1336 guest_efer = vmx->vcpu.arch.efer; in update_transition_efer()
1756 if ((index >= 0) && (vmx->vcpu.arch.efer & EFER_SCE)) in setup_msrs()
2807 static void vmx_set_efer(struct kvm_vcpu *vcpu, u64 efer) in vmx_set_efer() argument
2820 vcpu->arch.efer = efer; in vmx_set_efer()
2821 if (efer & EFER_LMA) { in vmx_set_efer()
2825 msr->data = efer; in vmx_set_efer()
2831 msr->data = efer & ~EFER_LME; in vmx_set_efer()
2852 vmx_set_efer(vcpu, vcpu->arch.efer | EFER_LMA); in enter_lmode()
[all …]
Dx86.c516 if ((vcpu->arch.efer & EFER_LME)) { in kvm_set_cr0()
814 static int set_efer(struct kvm_vcpu *vcpu, u64 efer) in set_efer() argument
816 u64 old_efer = vcpu->arch.efer; in set_efer()
818 if (efer & efer_reserved_bits) in set_efer()
822 && (vcpu->arch.efer & EFER_LME) != (efer & EFER_LME)) in set_efer()
825 if (efer & EFER_FFXSR) { in set_efer()
833 if (efer & EFER_SVME) { in set_efer()
841 efer &= ~EFER_LMA; in set_efer()
842 efer |= vcpu->arch.efer & EFER_LMA; in set_efer()
844 kvm_x86_ops->set_efer(vcpu, efer); in set_efer()
[all …]
Dmmu.c253 return vcpu->arch.efer & EFER_NX; in is_nx()