Home
last modified time | relevance | path

Searched refs:efer (Results 1 – 15 of 15) sorted by relevance

/arch/x86/include/asm/
Dvirtext.h113 uint64_t efer; in cpu_svm_disable() local
116 rdmsrl(MSR_EFER, efer); in cpu_svm_disable()
117 wrmsrl(MSR_EFER, efer & ~EFER_SVME); in cpu_svm_disable()
Dsuspend_64.h41 unsigned long efer; member
Drealmode.h49 u64 efer;
Dsvm.h170 u64 efer; member
Dkvm_host.h501 u64 efer; member
969 void (*set_efer)(struct kvm_vcpu *vcpu, u64 efer);
1194 bool kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer);
/arch/x86/realmode/
Dinit.c59 u64 efer; in setup_real_mode() local
105 rdmsrl(MSR_EFER, efer); in setup_real_mode()
106 trampoline_header->efer = efer & ~EFER_LMA; in setup_real_mode()
/arch/x86/kvm/
Dx86.h51 return vcpu->arch.efer & EFER_LMA; in is_long_mode()
70 return (vcpu->arch.efer & EFER_LMA) && in is_la57_mode()
Demulate.c792 u64 efer = 0; in assign_eip_far() local
794 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in assign_eip_far()
795 if (efer & EFER_LMA) in assign_eip_far()
1565 u64 efer = 0; in get_descriptor_ptr() local
1567 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in get_descriptor_ptr()
1568 if (!(efer & EFER_LMA)) in get_descriptor_ptr()
1716 u64 efer = 0; in __load_segment_descriptor() local
1718 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in __load_segment_descriptor()
1719 if (efer & EFER_LMA) in __load_segment_descriptor()
2579 unsigned long cr0, cr4, efer; in em_rsm() local
[all …]
Dsvm.c608 static void svm_set_efer(struct kvm_vcpu *vcpu, u64 efer) in svm_set_efer() argument
610 vcpu->arch.efer = efer; in svm_set_efer()
614 efer |= EFER_NX; in svm_set_efer()
616 if (!(efer & EFER_LMA)) in svm_set_efer()
617 efer &= ~EFER_LME; in svm_set_efer()
620 to_svm(vcpu)->vmcb->save.efer = efer | EFER_SVME; in svm_set_efer()
785 uint64_t efer; in svm_hardware_enable() local
789 rdmsrl(MSR_EFER, efer); in svm_hardware_enable()
790 if (efer & EFER_SVME) in svm_hardware_enable()
810 wrmsrl(MSR_EFER, efer | EFER_SVME); in svm_hardware_enable()
[all …]
Dx86.c666 if ((vcpu->arch.efer & EFER_LME)) { in kvm_set_cr0()
1165 static bool __kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer) in __kvm_valid_efer() argument
1167 if (efer & EFER_FFXSR && !guest_cpuid_has(vcpu, X86_FEATURE_FXSR_OPT)) in __kvm_valid_efer()
1170 if (efer & EFER_SVME && !guest_cpuid_has(vcpu, X86_FEATURE_SVM)) in __kvm_valid_efer()
1176 bool kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer) in kvm_valid_efer() argument
1178 if (efer & efer_reserved_bits) in kvm_valid_efer()
1181 return __kvm_valid_efer(vcpu, efer); in kvm_valid_efer()
1187 u64 old_efer = vcpu->arch.efer; in set_efer()
1188 u64 efer = msr_info->data; in set_efer() local
1190 if (efer & efer_reserved_bits) in set_efer()
[all …]
Dcpuid.c148 unsigned long long efer = 0; in is_efer_nx() local
150 rdmsrl_safe(MSR_EFER, &efer); in is_efer_nx()
151 return efer & EFER_NX; in is_efer_nx()
Dvmx.c2259 u64 guest_efer = vmx->vcpu.arch.efer; in update_transition_efer()
2285 (enable_ept && ((vmx->vcpu.arch.efer ^ host_efer) & EFER_NX))) { in update_transition_efer()
2826 if ((index >= 0) && (vmx->vcpu.arch.efer & EFER_SCE)) in setup_msrs()
4371 static void vmx_set_efer(struct kvm_vcpu *vcpu, u64 efer) in vmx_set_efer() argument
4384 vcpu->arch.efer = efer; in vmx_set_efer()
4385 if (efer & EFER_LMA) { in vmx_set_efer()
4387 msr->data = efer; in vmx_set_efer()
4391 msr->data = efer & ~EFER_LME; in vmx_set_efer()
4412 vmx_set_efer(vcpu, vcpu->arch.efer | EFER_LMA); in enter_lmode()
4418 vmx_set_efer(vcpu, vcpu->arch.efer & ~EFER_LMA); in exit_lmode()
[all …]
Dmmu.c484 return vcpu->arch.efer & EFER_NX; in is_nx()
/arch/x86/power/
Dcpu.c117 rdmsrl(MSR_EFER, ctxt->efer); in __save_processor_state()
211 wrmsrl(MSR_EFER, ctxt->efer); in __restore_processor_state()
/arch/x86/include/uapi/asm/
Dkvm.h155 __u64 efer; member