Searched refs:MSR_EFER (Results 1 – 19 of 19) sorted by relevance
/arch/x86/include/asm/ |
D | virtext.h | 115 rdmsrl(MSR_EFER, efer); in cpu_svm_disable() 116 wrmsrl(MSR_EFER, efer & ~EFER_SVME); in cpu_svm_disable()
|
D | msr-index.h | 15 #define MSR_EFER 0xc0000080 /* extended feature register */ macro
|
/arch/x86/kernel/acpi/ |
D | sleep.c | 71 if (!rdmsr_safe(MSR_EFER, in x86_acpi_suspend_lowlevel() 74 !wrmsr_safe(MSR_EFER, in x86_acpi_suspend_lowlevel()
|
/arch/x86/boot/compressed/ |
D | efi_thunk_64.S | 131 movl $MSR_EFER, %ecx 158 movl $MSR_EFER, %ecx
|
D | head_64.S | 187 movl $MSR_EFER, %ecx 614 movl $MSR_EFER, %ecx
|
/arch/x86/realmode/rm/ |
D | reboot.S | 39 movl $MSR_EFER, %ecx
|
D | wakeup_asm.S | 123 movl $MSR_EFER, %ecx
|
D | trampoline_64.S | 126 movl $MSR_EFER, %ecx
|
/arch/x86/realmode/ |
D | init.c | 97 rdmsrl(MSR_EFER, efer); in setup_real_mode()
|
/arch/x86/power/ |
D | cpu.c | 116 rdmsrl(MSR_EFER, ctxt->efer); in __save_processor_state() 207 wrmsrl(MSR_EFER, ctxt->efer); in __restore_processor_state()
|
/arch/x86/platform/pvh/ |
D | head.S | 80 mov $MSR_EFER, %ecx
|
/arch/x86/kernel/ |
D | head_64.S | 151 movl $MSR_EFER, %ecx
|
D | head_32.S | 277 movl $MSR_EFER, %ecx
|
/arch/x86/kvm/vmx/ |
D | vmx.c | 452 MSR_EFER, MSR_TSC_AUX, MSR_STAR, 855 case MSR_EFER: in clear_atomic_switch_msr() 908 case MSR_EFER: in add_atomic_switch_msr() 997 add_atomic_switch_msr(vmx, MSR_EFER, in update_transition_efer() 1000 clear_atomic_switch_msr(vmx, MSR_EFER); in update_transition_efer() 1003 clear_atomic_switch_msr(vmx, MSR_EFER); in update_transition_efer() 1680 index = __find_msr_index(vmx, MSR_EFER); in setup_msrs() 1783 case MSR_EFER: in vmx_get_msr() 1915 case MSR_EFER: in vmx_set_msr() 2796 struct shared_msr_entry *msr = find_msr_entry(vmx, MSR_EFER); in vmx_set_efer() [all …]
|
D | nested.c | 3949 if (vmx->msr_autoload.guest.val[i].index == MSR_EFER) in nested_vmx_get_vmcs01_guest_efer() 3953 efer_msr = find_msr_entry(vmx, MSR_EFER); in nested_vmx_get_vmcs01_guest_efer()
|
/arch/x86/kvm/ |
D | emulate.c | 811 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in assign_eip_far() 1570 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in get_descriptor_ptr() 1721 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in __load_segment_descriptor() 2544 ctxt->ops->set_msr(ctxt, MSR_EFER, val & ~EFER_LMA); in rsm_load_state_64() 2637 ctxt->ops->set_msr(ctxt, MSR_EFER, efer); in em_rsm() 2772 ops->get_msr(ctxt, MSR_EFER, &efer); in em_syscall() 2824 ops->get_msr(ctxt, MSR_EFER, &efer); in em_sysenter() 4221 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_cr_write() 4232 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_cr_write() 4255 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in check_cr_write() [all …]
|
D | cpuid.c | 159 rdmsrl_safe(MSR_EFER, &efer); in is_efer_nx()
|
D | svm.c | 917 rdmsrl(MSR_EFER, efer); in svm_hardware_enable() 939 wrmsrl(MSR_EFER, efer | EFER_SVME); in svm_hardware_enable()
|
D | x86.c | 2655 case MSR_EFER: in kvm_set_msr_common() 3026 case MSR_EFER: in kvm_get_msr_common()
|