• Home
  • Raw
  • Download

Lines Matching refs:msr_index

962 					    u32 msr_index,  in nested_vmx_get_vmexit_msr_value()  argument
972 if (msr_index == MSR_IA32_TSC) { in nested_vmx_get_vmexit_msr_value()
984 if (kvm_get_msr(vcpu, msr_index, data)) { in nested_vmx_get_vmexit_msr_value()
986 msr_index); in nested_vmx_get_vmexit_msr_value()
1042 static bool nested_msr_store_list_has_msr(struct kvm_vcpu *vcpu, u32 msr_index) in nested_msr_store_list_has_msr() argument
1054 if (e.index == msr_index) in nested_msr_store_list_has_msr()
1061 u32 msr_index) in prepare_vmx_msr_autostore_list() argument
1070 msr_autostore_slot = vmx_find_loadstore_msr_slot(autostore, msr_index); in prepare_vmx_msr_autostore_list()
1072 in_vmcs12_store_list = nested_msr_store_list_has_msr(vcpu, msr_index); in prepare_vmx_msr_autostore_list()
1085 msr_index); in prepare_vmx_msr_autostore_list()
1089 autostore->val[last].index = msr_index; in prepare_vmx_msr_autostore_list()
1243 static void vmx_get_control_msr(struct nested_vmx_msrs *msrs, u32 msr_index, in vmx_get_control_msr() argument
1246 switch (msr_index) { in vmx_get_control_msr()
1273 vmx_restore_control_msr(struct vcpu_vmx *vmx, u32 msr_index, u64 data) in vmx_restore_control_msr() argument
1278 vmx_get_control_msr(&vmcs_config.nested, msr_index, &lowp, &highp); in vmx_restore_control_msr()
1290 vmx_get_control_msr(&vmx->nested.msrs, msr_index, &lowp, &highp); in vmx_restore_control_msr()
1345 static u64 *vmx_get_fixed0_msr(struct nested_vmx_msrs *msrs, u32 msr_index) in vmx_get_fixed0_msr() argument
1347 switch (msr_index) { in vmx_get_fixed0_msr()
1357 static int vmx_restore_fixed0_msr(struct vcpu_vmx *vmx, u32 msr_index, u64 data) in vmx_restore_fixed0_msr() argument
1359 const u64 *msr = vmx_get_fixed0_msr(&vmcs_config.nested, msr_index); in vmx_restore_fixed0_msr()
1368 *vmx_get_fixed0_msr(&vmx->nested.msrs, msr_index) = data; in vmx_restore_fixed0_msr()
1377 int vmx_set_vmx_msr(struct kvm_vcpu *vcpu, u32 msr_index, u64 data) in vmx_set_vmx_msr() argument
1388 switch (msr_index) { in vmx_set_vmx_msr()
1410 return vmx_restore_control_msr(vmx, msr_index, data); in vmx_set_vmx_msr()
1415 return vmx_restore_fixed0_msr(vmx, msr_index, data); in vmx_set_vmx_msr()
1442 int vmx_get_vmx_msr(struct nested_vmx_msrs *msrs, u32 msr_index, u64 *pdata) in vmx_get_vmx_msr() argument
1444 switch (msr_index) { in vmx_get_vmx_msr()
1453 if (msr_index == MSR_IA32_VMX_PINBASED_CTLS) in vmx_get_vmx_msr()
1461 if (msr_index == MSR_IA32_VMX_PROCBASED_CTLS) in vmx_get_vmx_msr()
1469 if (msr_index == MSR_IA32_VMX_EXIT_CTLS) in vmx_get_vmx_msr()
1477 if (msr_index == MSR_IA32_VMX_ENTRY_CTLS) in vmx_get_vmx_msr()
5784 u32 msr_index = kvm_rcx_read(vcpu); in nested_vmx_exit_handled_msr() local
5798 if (msr_index >= 0xc0000000) { in nested_vmx_exit_handled_msr()
5799 msr_index -= 0xc0000000; in nested_vmx_exit_handled_msr()
5804 if (msr_index < 1024*8) { in nested_vmx_exit_handled_msr()
5806 if (kvm_vcpu_read_guest(vcpu, bitmap + msr_index/8, &b, 1)) in nested_vmx_exit_handled_msr()
5808 return 1 & (b >> (msr_index & 7)); in nested_vmx_exit_handled_msr()