Lines Matching defs:vmx
173 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_failValid() local
211 static void vmx_disable_shadow_vmcs(struct vcpu_vmx *vmx) in vmx_disable_shadow_vmcs()
220 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_release_evmcs() local
236 struct vcpu_vmx *vmx = to_vmx(vcpu); in free_nested() local
274 static void vmx_sync_vmcs_host_state(struct vcpu_vmx *vmx, in vmx_sync_vmcs_host_state()
295 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_switch_vmcs() local
329 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_ept_inject_page_fault() local
680 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_flush_cached_shadow_vmcs12() local
881 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_max_atomic_switch_msrs() local
1043 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_get_vpid02() local
1056 static int vmx_restore_vmx_basic(struct vcpu_vmx *vmx, u64 data) in vmx_restore_vmx_basic()
1087 vmx_restore_control_msr(struct vcpu_vmx *vmx, u32 msr_index, u64 data) in vmx_restore_control_msr()
1132 static int vmx_restore_vmx_misc(struct vcpu_vmx *vmx, u64 data) in vmx_restore_vmx_misc()
1169 static int vmx_restore_vmx_ept_vpid_cap(struct vcpu_vmx *vmx, u64 data) in vmx_restore_vmx_ept_vpid_cap()
1185 static int vmx_restore_fixed0_msr(struct vcpu_vmx *vmx, u32 msr_index, u64 data) in vmx_restore_fixed0_msr()
1218 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_vmx_msr() local
1366 static void copy_shadow_to_vmcs12(struct vcpu_vmx *vmx) in copy_shadow_to_vmcs12()
1393 static void copy_vmcs12_to_shadow(struct vcpu_vmx *vmx) in copy_vmcs12_to_shadow()
1427 static int copy_enlightened_to_vmcs12(struct vcpu_vmx *vmx) in copy_enlightened_to_vmcs12()
1647 static int copy_vmcs12_to_enlightened(struct vcpu_vmx *vmx) in copy_vmcs12_to_enlightened()
1821 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_handle_enlightened_vmptrld() local
1902 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_sync_vmcs12_to_shadow() local
1926 struct vcpu_vmx *vmx = in vmx_preemption_timer_fn() local
1939 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_start_preemption_timer() local
1960 static u64 nested_vmx_calc_efer(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) in nested_vmx_calc_efer()
1971 static void prepare_vmcs02_constant_state(struct vcpu_vmx *vmx) in prepare_vmcs02_constant_state()
2028 static void prepare_vmcs02_early_rare(struct vcpu_vmx *vmx, in prepare_vmcs02_early_rare()
2043 static void prepare_vmcs02_early(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) in prepare_vmcs02_early()
2191 static void prepare_vmcs02_rare(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) in prepare_vmcs02_rare()
2308 struct vcpu_vmx *vmx = to_vmx(vcpu); in prepare_vmcs02() local
2463 struct vcpu_vmx *vmx = to_vmx(vcpu); in valid_ept_address() local
2503 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_check_vm_execution_controls() local
2562 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_check_vm_exit_controls() local
2579 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_check_vm_entry_controls() local
2824 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_check_vmentry_hw() local
2933 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_get_vmcs12_pages() local
3055 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_enter_non_root_mode() local
3199 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_run() local
3407 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_complete_nested_posted_interrupt() local
3464 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_check_nested_events() local
3585 struct vcpu_vmx *vmx = to_vmx(vcpu); in sync_vmcs02_to_vmcs12_rare() local
3632 struct vcpu_vmx *vmx = to_vmx(vcpu); in copy_vmcs02_to_vmcs12_rare() local
3660 struct vcpu_vmx *vmx = to_vmx(vcpu); in sync_vmcs02_to_vmcs12() local
3937 static inline u64 nested_vmx_get_vmcs01_guest_efer(struct vcpu_vmx *vmx) in nested_vmx_get_vmcs01_guest_efer()
3963 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_restore_host_state() local
4076 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_vmexit() local
4365 struct vcpu_vmx *vmx = to_vmx(vcpu); in alloc_shadow_vmcs() local
4386 struct vcpu_vmx *vmx = to_vmx(vcpu); in enter_vmx_operation() local
4446 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_vmon() local
4508 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_release_vmcs12() local
4550 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_vmclear() local
4702 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_vmwrite() local
4812 static void set_current_vmptr(struct vcpu_vmx *vmx, gpa_t vmptr) in set_current_vmptr()
4827 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_vmptrld() local
4918 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_invept() local
4975 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_invvpid() local
5098 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_vmfunc() local
5331 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_exit_reflected() local
5514 struct vcpu_vmx *vmx; in vmx_get_nested_state() local
5626 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_nested_state() local