Home
last modified time | relevance | path

Searched refs:MSR_EFER (Results 1 – 19 of 19) sorted by relevance

/arch/x86/include/asm/
Dvirtext.h115 rdmsrl(MSR_EFER, efer); in cpu_svm_disable()
116 wrmsrl(MSR_EFER, efer & ~EFER_SVME); in cpu_svm_disable()
Dmsr-index.h15 #define MSR_EFER 0xc0000080 /* extended feature register */ macro
/arch/x86/kernel/acpi/
Dsleep.c71 if (!rdmsr_safe(MSR_EFER, in x86_acpi_suspend_lowlevel()
74 !wrmsr_safe(MSR_EFER, in x86_acpi_suspend_lowlevel()
/arch/x86/boot/compressed/
Defi_thunk_64.S131 movl $MSR_EFER, %ecx
158 movl $MSR_EFER, %ecx
Dhead_64.S187 movl $MSR_EFER, %ecx
614 movl $MSR_EFER, %ecx
/arch/x86/realmode/rm/
Dreboot.S39 movl $MSR_EFER, %ecx
Dwakeup_asm.S123 movl $MSR_EFER, %ecx
Dtrampoline_64.S126 movl $MSR_EFER, %ecx
/arch/x86/realmode/
Dinit.c97 rdmsrl(MSR_EFER, efer); in setup_real_mode()
/arch/x86/power/
Dcpu.c116 rdmsrl(MSR_EFER, ctxt->efer); in __save_processor_state()
207 wrmsrl(MSR_EFER, ctxt->efer); in __restore_processor_state()
/arch/x86/platform/pvh/
Dhead.S80 mov $MSR_EFER, %ecx
/arch/x86/kernel/
Dhead_64.S151 movl $MSR_EFER, %ecx
Dhead_32.S277 movl $MSR_EFER, %ecx
/arch/x86/kvm/vmx/
Dvmx.c452 MSR_EFER, MSR_TSC_AUX, MSR_STAR,
855 case MSR_EFER: in clear_atomic_switch_msr()
908 case MSR_EFER: in add_atomic_switch_msr()
997 add_atomic_switch_msr(vmx, MSR_EFER, in update_transition_efer()
1000 clear_atomic_switch_msr(vmx, MSR_EFER); in update_transition_efer()
1003 clear_atomic_switch_msr(vmx, MSR_EFER); in update_transition_efer()
1680 index = __find_msr_index(vmx, MSR_EFER); in setup_msrs()
1783 case MSR_EFER: in vmx_get_msr()
1915 case MSR_EFER: in vmx_set_msr()
2796 struct shared_msr_entry *msr = find_msr_entry(vmx, MSR_EFER); in vmx_set_efer()
[all …]
Dnested.c3949 if (vmx->msr_autoload.guest.val[i].index == MSR_EFER) in nested_vmx_get_vmcs01_guest_efer()
3953 efer_msr = find_msr_entry(vmx, MSR_EFER); in nested_vmx_get_vmcs01_guest_efer()
/arch/x86/kvm/
Demulate.c811 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in assign_eip_far()
1570 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in get_descriptor_ptr()
1721 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in __load_segment_descriptor()
2544 ctxt->ops->set_msr(ctxt, MSR_EFER, val & ~EFER_LMA); in rsm_load_state_64()
2637 ctxt->ops->set_msr(ctxt, MSR_EFER, efer); in em_rsm()
2772 ops->get_msr(ctxt, MSR_EFER, &efer); in em_syscall()
2824 ops->get_msr(ctxt, MSR_EFER, &efer); in em_sysenter()
4221 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_cr_write()
4232 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_cr_write()
4255 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_cr_write()
[all …]
Dcpuid.c159 rdmsrl_safe(MSR_EFER, &efer); in is_efer_nx()
Dsvm.c917 rdmsrl(MSR_EFER, efer); in svm_hardware_enable()
939 wrmsrl(MSR_EFER, efer | EFER_SVME); in svm_hardware_enable()
Dx86.c2655 case MSR_EFER: in kvm_set_msr_common()
3026 case MSR_EFER: in kvm_get_msr_common()