Searched refs:is_guest_mode (Results 1 – 17 of 17) sorted by relevance
182 bool is_guest_mode) in kvm_hv_get_tlb_flush_fifo() argument185 int i = is_guest_mode ? HV_L2_TLB_FLUSH_FIFO : in kvm_hv_get_tlb_flush_fifo()198 tlb_flush_fifo = kvm_hv_get_tlb_flush_fifo(vcpu, is_guest_mode(vcpu)); in kvm_hv_vcpu_purge_flush_tlb()
87 if (!is_guest_mode(v) && kvm_vcpu_apicv_active(v)) in kvm_cpu_has_injectable_intr()
226 static inline bool is_guest_mode(struct kvm_vcpu *vcpu) in is_guest_mode() function
1924 tlb_flush_fifo = kvm_hv_get_tlb_flush_fifo(vcpu, is_guest_mode(vcpu)); in kvm_hv_vcpu_flush_tlb()1987 if (!hc->fast && is_guest_mode(vcpu)) { in kvm_hv_flush_tlb()2009 is_guest_mode(vcpu)); in kvm_hv_flush_tlb()2040 flush_ex.flags, is_guest_mode(vcpu)); in kvm_hv_flush_tlb()2084 if (all_cpus && !is_guest_mode(vcpu)) { in kvm_hv_flush_tlb()2092 } else if (!is_guest_mode(vcpu)) { in kvm_hv_flush_tlb()2331 if (hv_result_success(result) && is_guest_mode(vcpu) && in kvm_hv_hypercall_complete()
223 bool (*is_guest_mode)(struct x86_emulate_ctxt *ctxt); member
662 if (!reinject && is_guest_mode(vcpu) && in kvm_multiple_exception()698 if (!is_guest_mode(vcpu)) in kvm_multiple_exception()787 if (is_guest_mode(vcpu) && fault->async_page_fault) in kvm_inject_page_fault()2628 if (is_guest_mode(vcpu)) in kvm_vcpu_write_tsc_offset()2644 if (is_guest_mode(vcpu)) in kvm_vcpu_write_tsc_multiplier()8309 return is_guest_mode(emul_to_vcpu(ctxt)); in emulator_is_guest_mode()8378 .is_guest_mode = emulator_is_guest_mode,8573 if (!is_guest_mode(vcpu) && static_call(kvm_x86_get_cpl)(vcpu) == 0) { in handle_emulation_failure()8590 if (WARN_ON_ONCE(is_guest_mode(vcpu)) || in reexecute_instruction()8682 if (WARN_ON_ONCE(is_guest_mode(vcpu)) || in retry_instruction()[all …]
5142 bool is_guest_mode = ctxt->ops->is_guest_mode(ctxt); in x86_emulate_insn() local5190 if (unlikely(is_guest_mode) && ctxt->intercept) { in x86_emulate_insn()5219 if (unlikely(is_guest_mode) && (ctxt->d & Intercept)) { in x86_emulate_insn()5273 if (unlikely(is_guest_mode) && (ctxt->d & Intercept)) { in x86_emulate_insn()
3273 if (is_guest_mode(vcpu)) { in kvm_apic_accept_events()
911 if (is_guest_mode(vcpu)) in vmx_update_exception_bitmap()1761 if (!is_guest_mode(vcpu)) in vmx_update_emulated_instruction()2216 if (is_guest_mode(vcpu)) in vmx_set_msr()2221 if (is_guest_mode(vcpu)) { in vmx_set_msr()2228 if (is_guest_mode(vcpu)) { in vmx_set_msr()2247 if (is_guest_mode(vcpu) && get_vmcs12(vcpu)->vm_exit_controls & in vmx_set_msr()2266 if (is_guest_mode(vcpu) && in vmx_set_msr()2323 if (is_guest_mode(vcpu) && in vmx_set_msr()3087 WARN_ON_ONCE(is_guest_mode(vcpu)); in enter_rmode()3196 if (is_guest_mode(vcpu)) in vmx_get_current_vpid()[all …]
3300 if (is_guest_mode(vcpu) && !nested_get_vmcs12_pages(vcpu)) in vmx_get_nested_state_pages()3312 if (WARN_ON_ONCE(!is_guest_mode(vcpu))) in nested_vmx_write_pml_buffer()5378 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmread()5402 (is_guest_mode(vcpu) && in handle_vmread()5410 if (!is_guest_mode(vcpu) && is_vmcs12_ext_field(field)) in handle_vmread()5426 if (WARN_ON_ONCE(is_guest_mode(vcpu))) in handle_vmread()5484 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmwrite()5512 (is_guest_mode(vcpu) && in handle_vmwrite()5546 if (!is_guest_mode(vcpu) && !is_shadow_field_rw(field)) in handle_vmwrite()5568 if (!is_guest_mode(vcpu) && !is_shadow_field_rw(field)) { in handle_vmwrite()[all …]
503 if (!vmcs12 && is_guest_mode(vcpu)) in vmx_write_encls_bitmap()
726 return enable_unrestricted_guest && (!is_guest_mode(vcpu) || in is_unrestricted_guest()
131 if (is_guest_mode(&svm->vcpu) && in avic_deactivate_vmcb()544 if (is_guest_mode(vcpu)) in avic_vcpu_get_apicv_inhibit_reasons()941 pi.is_guest_mode = true; in avic_pi_update_irte()952 if (!ret && pi.is_guest_mode) in avic_pi_update_irte()964 pi.is_guest_mode = false; in avic_pi_update_irte()
829 msrpm = is_guest_mode(vcpu) ? to_svm(vcpu)->nested.msrpm: in msr_write_intercepted()1026 if (is_guest_mode(vcpu)) in svm_enable_lbrv()1046 if (is_guest_mode(vcpu)) in svm_disable_lbrv()1066 (is_guest_mode(vcpu) && guest_can_use(vcpu, X86_FEATURE_LBRV) && in svm_update_lbrv()1693 if (is_guest_mode(&svm->vcpu)) { in svm_clear_vintr()2373 if (is_guest_mode(vcpu)) { in emulate_svm_instr()2415 if (!is_guest_mode(vcpu)) in gp_interception()2633 if (!is_guest_mode(vcpu) || in check_selective_cr0_intercepted()3031 is_guest_mode(vcpu)) in svm_set_msr()3041 if (is_guest_mode(vcpu)) in svm_set_msr()[all …]
456 if (is_guest_mode(&svm->vcpu) && !nested_vgif_enabled(svm)) in get_vgif_vmcb()517 if (is_guest_mode(&svm->vcpu)) in get_vnmi_vmcb_l1()573 return is_guest_mode(vcpu) && (svm->nested.ctl.int_ctl & V_INTR_MASKING_MASK); in nested_svm_virtualize_tpr()
130 if (!is_guest_mode(&svm->vcpu)) in recalc_intercepts()1231 if (is_guest_mode(vcpu)) { in svm_leave_nested()1600 if (is_guest_mode(vcpu)) { in svm_get_nested_state()1615 if (!is_guest_mode(vcpu)) in svm_get_nested_state()1737 if (is_guest_mode(vcpu)) in svm_set_nested_state()1782 if (WARN_ON(!is_guest_mode(vcpu))) in svm_get_nested_state_pages()
4269 if (is_guest_mode(vcpu)) { in __kvm_faultin_pfn()5117 role.base.guest_mode = is_guest_mode(vcpu); in kvm_calc_cpu_role()5760 if (!mmio_info_in_cache(vcpu, cr2_or_gpa, direct) && !is_guest_mode(vcpu)) in kvm_mmu_page_fault()