Lines Matching refs:loaded_vmcs
238 struct loaded_vmcs *prev) in vmx_sync_vmcs_host_state()
246 dest = &vmx->loaded_vmcs->host_state; in vmx_sync_vmcs_host_state()
256 static void vmx_switch_vmcs(struct kvm_vcpu *vcpu, struct loaded_vmcs *vmcs) in vmx_switch_vmcs()
259 struct loaded_vmcs *prev; in vmx_switch_vmcs()
262 if (WARN_ON_ONCE(vmx->loaded_vmcs == vmcs)) in vmx_switch_vmcs()
266 prev = vmx->loaded_vmcs; in vmx_switch_vmcs()
267 vmx->loaded_vmcs = vmcs; in vmx_switch_vmcs()
283 if (WARN_ON_ONCE(vmx->loaded_vmcs != &vmx->vmcs01)) in free_nested()
1577 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_shadow_to_vmcs12()
1613 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_vmcs12_to_shadow()
2235 static void prepare_vmcs02_early(struct vcpu_vmx *vmx, struct loaded_vmcs *vmcs01, in prepare_vmcs02_early()
2391 vmx->loaded_vmcs->nmi_known_unmasked = in prepare_vmcs02_early()
3090 if (unlikely(cr3 != vmx->loaded_vmcs->host_state.cr3)) { in nested_vmx_check_vmentry_hw()
3092 vmx->loaded_vmcs->host_state.cr3 = cr3; in nested_vmx_check_vmentry_hw()
3096 if (unlikely(cr4 != vmx->loaded_vmcs->host_state.cr4)) { in nested_vmx_check_vmentry_hw()
3098 vmx->loaded_vmcs->host_state.cr4 = cr4; in nested_vmx_check_vmentry_hw()
4073 WARN_ON_ONCE(vmx->loaded_vmcs != &vmx->vmcs01); in copy_vmcs02_to_vmcs12_rare()
4076 vmx->loaded_vmcs = &vmx->nested.vmcs02; in copy_vmcs02_to_vmcs12_rare()
4081 vmx->loaded_vmcs = &vmx->vmcs01; in copy_vmcs02_to_vmcs12_rare()
4836 struct loaded_vmcs *loaded_vmcs = vmx->loaded_vmcs; in alloc_shadow_vmcs() local
4844 WARN_ON(loaded_vmcs == &vmx->vmcs01 && loaded_vmcs->shadow_vmcs); in alloc_shadow_vmcs()
4846 if (!loaded_vmcs->shadow_vmcs) { in alloc_shadow_vmcs()
4847 loaded_vmcs->shadow_vmcs = alloc_vmcs(true); in alloc_shadow_vmcs()
4848 if (loaded_vmcs->shadow_vmcs) in alloc_shadow_vmcs()
4849 vmcs_clear(loaded_vmcs->shadow_vmcs); in alloc_shadow_vmcs()
4851 return loaded_vmcs->shadow_vmcs; in alloc_shadow_vmcs()
5283 vmcs_load(vmx->loaded_vmcs->vmcs); in handle_vmwrite()