Lines Matching refs:control
466 vmcb->control.clean = 0; in mark_all_dirty()
471 vmcb->control.clean = ((1 << VMCB_DIRTY_MAX) - 1) in mark_all_clean()
477 vmcb->control.clean &= ~(1 << bit); in mark_dirty()
487 svm->vmcb->control.avic_vapic_bar = data & VMCB_AVIC_APIC_BAR_MASK; in avic_update_vapic_bar()
512 c = &svm->vmcb->control; in recalc_intercepts()
513 h = &svm->nested.hsave->control; in recalc_intercepts()
537 vmcb->control.intercept_cr |= (1U << bit); in set_cr_intercept()
546 vmcb->control.intercept_cr &= ~(1U << bit); in clr_cr_intercept()
555 return vmcb->control.intercept_cr & (1U << bit); in is_cr_intercept()
562 vmcb->control.intercept_dr = (1 << INTERCEPT_DR0_READ) in set_dr_intercepts()
586 vmcb->control.intercept_dr = 0; in clr_dr_intercepts()
595 vmcb->control.intercept_exceptions |= (1U << bit); in set_exception_intercept()
604 vmcb->control.intercept_exceptions &= ~(1U << bit); in clr_exception_intercept()
613 vmcb->control.intercept |= (1ULL << bit); in set_intercept()
622 vmcb->control.intercept &= ~(1ULL << bit); in clr_intercept()
629 return !!(svm->vmcb->control.int_ctl & V_GIF_ENABLE_MASK); in vgif_enabled()
635 svm->vmcb->control.int_ctl |= V_GIF_MASK; in enable_gif()
643 svm->vmcb->control.int_ctl &= ~V_GIF_MASK; in disable_gif()
651 return !!(svm->vmcb->control.int_ctl & V_GIF_MASK); in gif_set()
765 if (svm->vmcb->control.int_state & SVM_INTERRUPT_SHADOW_MASK) in svm_get_interrupt_shadow()
775 svm->vmcb->control.int_state &= ~SVM_INTERRUPT_SHADOW_MASK; in svm_set_interrupt_shadow()
777 svm->vmcb->control.int_state |= SVM_INTERRUPT_SHADOW_MASK; in svm_set_interrupt_shadow()
785 if (nrips && svm->vmcb->control.next_rip != 0) { in skip_emulated_instruction()
787 svm->next_rip = svm->vmcb->control.next_rip; in skip_emulated_instruction()
835 svm->vmcb->control.event_inj = nr in svm_queue_exception()
839 svm->vmcb->control.event_inj_err = error_code; in svm_queue_exception()
1153 svm->vmcb->control.virt_ext |= LBR_CTL_ENABLE_MASK; in svm_enable_lbrv()
1164 svm->vmcb->control.virt_ext &= ~LBR_CTL_ENABLE_MASK; in svm_disable_lbrv()
1273 struct vmcb_control_area *control = &svm->vmcb->control; in grow_ple_window() local
1274 int old = control->pause_filter_count; in grow_ple_window()
1276 control->pause_filter_count = __grow_ple_window(old, in grow_ple_window()
1281 if (control->pause_filter_count != old) { in grow_ple_window()
1284 control->pause_filter_count, old); in grow_ple_window()
1291 struct vmcb_control_area *control = &svm->vmcb->control; in shrink_ple_window() local
1292 int old = control->pause_filter_count; in shrink_ple_window()
1294 control->pause_filter_count = in shrink_ple_window()
1299 if (control->pause_filter_count != old) { in shrink_ple_window()
1302 control->pause_filter_count, old); in shrink_ple_window()
1496 return svm->nested.hsave->control.tsc_offset; in svm_read_l1_tsc_offset()
1508 g_tsc_offset = svm->vmcb->control.tsc_offset - in svm_write_l1_tsc_offset()
1509 svm->nested.hsave->control.tsc_offset; in svm_write_l1_tsc_offset()
1510 svm->nested.hsave->control.tsc_offset = offset; in svm_write_l1_tsc_offset()
1514 svm->vmcb->control.tsc_offset - g_tsc_offset, in svm_write_l1_tsc_offset()
1517 svm->vmcb->control.tsc_offset = offset + g_tsc_offset; in svm_write_l1_tsc_offset()
1520 return svm->vmcb->control.tsc_offset; in svm_write_l1_tsc_offset()
1531 vmcb->control.avic_backing_page = bpa & AVIC_HPA_MASK; in avic_init_vmcb()
1532 vmcb->control.avic_logical_id = lpa & AVIC_HPA_MASK; in avic_init_vmcb()
1533 vmcb->control.avic_physical_id = ppa & AVIC_HPA_MASK; in avic_init_vmcb()
1534 vmcb->control.avic_physical_id |= AVIC_MAX_PHYSICAL_ID_COUNT; in avic_init_vmcb()
1535 vmcb->control.int_ctl |= AVIC_ENABLE_MASK; in avic_init_vmcb()
1540 struct vmcb_control_area *control = &svm->vmcb->control; in init_vmcb() local
1603 control->iopm_base_pa = __sme_set(iopm_base); in init_vmcb()
1604 control->msrpm_base_pa = __sme_set(__pa(svm->msrpm)); in init_vmcb()
1605 control->int_ctl = V_INTR_MASKING_MASK; in init_vmcb()
1644 control->nested_ctl |= SVM_NESTED_CTL_NP_ENABLE; in init_vmcb()
1659 control->pause_filter_count = pause_filter_count; in init_vmcb()
1661 control->pause_filter_thresh = pause_filter_thresh; in init_vmcb()
1677 svm->vmcb->control.virt_ext |= VIRTUAL_VMLOAD_VMSAVE_ENABLE_MASK; in init_vmcb()
1683 svm->vmcb->control.int_ctl |= V_GIF_ENABLE_MASK; in init_vmcb()
1687 svm->vmcb->control.nested_ctl |= SVM_NESTED_CTL_SEV_ENABLE; in init_vmcb()
2715 svm->vmcb->control.tlb_ctl = TLB_CONTROL_FLUSH_ALL_ASID; in new_asid()
2719 svm->vmcb->control.asid = sd->next_asid++; in new_asid()
2762 u64 fault_address = __sme_clr(svm->vmcb->control.exit_info_2); in pf_interception()
2763 u64 error_code = svm->vmcb->control.exit_info_1; in pf_interception()
2767 svm->vmcb->control.insn_bytes : NULL, in pf_interception()
2768 svm->vmcb->control.insn_len); in pf_interception()
2773 u64 fault_address = __sme_clr(svm->vmcb->control.exit_info_2); in npf_interception()
2774 u64 error_code = svm->vmcb->control.exit_info_1; in npf_interception()
2779 svm->vmcb->control.insn_bytes : NULL, in npf_interception()
2780 svm->vmcb->control.insn_len); in npf_interception()
2837 u32 error_code = svm->vmcb->control.exit_info_1; in gp_interception()
2939 u32 io_info = svm->vmcb->control.exit_info_1; /* address size bug? */ in io_interception()
2951 svm->next_rip = svm->vmcb->control.exit_info_2; in io_interception()
3008 svm->vmcb->control.nested_cr3 = __sme_set(root); in nested_svm_set_tdp_cr3()
3017 if (svm->vmcb->control.exit_code != SVM_EXIT_NPF) { in nested_svm_inject_npf_exit()
3022 svm->vmcb->control.exit_code = SVM_EXIT_NPF; in nested_svm_inject_npf_exit()
3023 svm->vmcb->control.exit_code_hi = 0; in nested_svm_inject_npf_exit()
3024 svm->vmcb->control.exit_info_1 = (1ULL << 32); in nested_svm_inject_npf_exit()
3025 svm->vmcb->control.exit_info_2 = fault->address; in nested_svm_inject_npf_exit()
3028 svm->vmcb->control.exit_info_1 &= ~0xffffffffULL; in nested_svm_inject_npf_exit()
3029 svm->vmcb->control.exit_info_1 |= fault->error_code; in nested_svm_inject_npf_exit()
3035 if (svm->vmcb->control.exit_info_1 & (2ULL << 32)) in nested_svm_inject_npf_exit()
3036 svm->vmcb->control.exit_info_1 &= ~1; in nested_svm_inject_npf_exit()
3090 svm->vmcb->control.exit_code = SVM_EXIT_EXCP_BASE + nr; in nested_svm_check_exception()
3091 svm->vmcb->control.exit_code_hi = 0; in nested_svm_check_exception()
3092 svm->vmcb->control.exit_info_1 = error_code; in nested_svm_check_exception()
3099 svm->vmcb->control.exit_info_2 = svm->vcpu.arch.apf.nested_apf_token; in nested_svm_check_exception()
3101 svm->vmcb->control.exit_info_2 = svm->vcpu.arch.exception.payload; in nested_svm_check_exception()
3103 svm->vmcb->control.exit_info_2 = svm->vcpu.arch.cr2; in nested_svm_check_exception()
3129 svm->vmcb->control.exit_code = SVM_EXIT_INTR; in nested_svm_intr()
3130 svm->vmcb->control.exit_info_1 = 0; in nested_svm_intr()
3131 svm->vmcb->control.exit_info_2 = 0; in nested_svm_intr()
3157 svm->vmcb->control.exit_code = SVM_EXIT_NMI; in nested_svm_nmi()
3173 port = svm->vmcb->control.exit_info_1 >> 16; in nested_svm_intercept_ioio()
3174 size = (svm->vmcb->control.exit_info_1 & SVM_IOIO_SIZE_MASK) >> in nested_svm_intercept_ioio()
3198 write = svm->vmcb->control.exit_info_1 & 1; in nested_svm_exit_handled_msr()
3240 u32 exit_code = svm->vmcb->control.exit_code; in nested_svm_exit_special()
3269 u32 exit_code = svm->vmcb->control.exit_code; in nested_svm_intercept()
3333 struct vmcb_control_area *dst = &dst_vmcb->control; in copy_vmcb_control_area()
3334 struct vmcb_control_area *from = &from_vmcb->control; in copy_vmcb_control_area()
3371 trace_kvm_nested_vmexit_inject(vmcb->control.exit_code, in nested_svm_vmexit()
3372 vmcb->control.exit_info_1, in nested_svm_vmexit()
3373 vmcb->control.exit_info_2, in nested_svm_vmexit()
3374 vmcb->control.exit_int_info, in nested_svm_vmexit()
3375 vmcb->control.exit_int_info_err, in nested_svm_vmexit()
3413 nested_vmcb->control.int_ctl = vmcb->control.int_ctl; in nested_svm_vmexit()
3414 nested_vmcb->control.int_vector = vmcb->control.int_vector; in nested_svm_vmexit()
3415 nested_vmcb->control.int_state = vmcb->control.int_state; in nested_svm_vmexit()
3416 nested_vmcb->control.exit_code = vmcb->control.exit_code; in nested_svm_vmexit()
3417 nested_vmcb->control.exit_code_hi = vmcb->control.exit_code_hi; in nested_svm_vmexit()
3418 nested_vmcb->control.exit_info_1 = vmcb->control.exit_info_1; in nested_svm_vmexit()
3419 nested_vmcb->control.exit_info_2 = vmcb->control.exit_info_2; in nested_svm_vmexit()
3420 nested_vmcb->control.exit_int_info = vmcb->control.exit_int_info; in nested_svm_vmexit()
3421 nested_vmcb->control.exit_int_info_err = vmcb->control.exit_int_info_err; in nested_svm_vmexit()
3424 nested_vmcb->control.next_rip = vmcb->control.next_rip; in nested_svm_vmexit()
3434 if (vmcb->control.event_inj & SVM_EVTINJ_VALID) { in nested_svm_vmexit()
3435 struct vmcb_control_area *nc = &nested_vmcb->control; in nested_svm_vmexit()
3437 nc->exit_int_info = vmcb->control.event_inj; in nested_svm_vmexit()
3438 nc->exit_int_info_err = vmcb->control.event_inj_err; in nested_svm_vmexit()
3441 nested_vmcb->control.tlb_ctl = 0; in nested_svm_vmexit()
3442 nested_vmcb->control.event_inj = 0; in nested_svm_vmexit()
3443 nested_vmcb->control.event_inj_err = 0; in nested_svm_vmexit()
3445 nested_vmcb->control.pause_filter_count = in nested_svm_vmexit()
3446 svm->vmcb->control.pause_filter_count; in nested_svm_vmexit()
3447 nested_vmcb->control.pause_filter_thresh = in nested_svm_vmexit()
3448 svm->vmcb->control.pause_filter_thresh; in nested_svm_vmexit()
3452 nested_vmcb->control.int_ctl &= ~V_INTR_MASKING_MASK; in nested_svm_vmexit()
3457 svm->vcpu.arch.tsc_offset = svm->vmcb->control.tsc_offset; in nested_svm_vmexit()
3485 svm->vmcb->control.exit_int_info = 0; in nested_svm_vmexit()
3534 svm->vmcb->control.msrpm_base_pa = __sme_set(__pa(svm->nested.msrpm)); in nested_svm_vmrun_msrpm()
3541 if ((vmcb->control.intercept & (1ULL << INTERCEPT_VMRUN)) == 0) in nested_vmcb_checks()
3544 if (vmcb->control.asid == 0) in nested_vmcb_checks()
3547 if ((vmcb->control.nested_ctl & SVM_NESTED_CTL_NP_ENABLE) && in nested_vmcb_checks()
3562 if (nested_vmcb->control.nested_ctl & SVM_NESTED_CTL_NP_ENABLE) { in enter_svm_guest_mode()
3563 svm->nested.nested_cr3 = nested_vmcb->control.nested_cr3; in enter_svm_guest_mode()
3600 svm->nested.vmcb_msrpm = nested_vmcb->control.msrpm_base_pa & ~0x0fffULL; in enter_svm_guest_mode()
3601 svm->nested.vmcb_iopm = nested_vmcb->control.iopm_base_pa & ~0x0fffULL; in enter_svm_guest_mode()
3604 svm->nested.intercept_cr = nested_vmcb->control.intercept_cr; in enter_svm_guest_mode()
3605 svm->nested.intercept_dr = nested_vmcb->control.intercept_dr; in enter_svm_guest_mode()
3606 svm->nested.intercept_exceptions = nested_vmcb->control.intercept_exceptions; in enter_svm_guest_mode()
3607 svm->nested.intercept = nested_vmcb->control.intercept; in enter_svm_guest_mode()
3611 svm->vmcb->control.int_ctl &= in enter_svm_guest_mode()
3614 svm->vmcb->control.int_ctl |= nested_vmcb->control.int_ctl & in enter_svm_guest_mode()
3617 if (nested_vmcb->control.int_ctl & V_INTR_MASKING_MASK) in enter_svm_guest_mode()
3631 svm->vcpu.arch.tsc_offset += nested_vmcb->control.tsc_offset; in enter_svm_guest_mode()
3632 svm->vmcb->control.tsc_offset = svm->vcpu.arch.tsc_offset; in enter_svm_guest_mode()
3634 svm->vmcb->control.virt_ext = nested_vmcb->control.virt_ext; in enter_svm_guest_mode()
3635 svm->vmcb->control.int_vector = nested_vmcb->control.int_vector; in enter_svm_guest_mode()
3636 svm->vmcb->control.int_state = nested_vmcb->control.int_state; in enter_svm_guest_mode()
3637 svm->vmcb->control.event_inj = nested_vmcb->control.event_inj; in enter_svm_guest_mode()
3638 svm->vmcb->control.event_inj_err = nested_vmcb->control.event_inj_err; in enter_svm_guest_mode()
3640 svm->vmcb->control.pause_filter_count = in enter_svm_guest_mode()
3641 nested_vmcb->control.pause_filter_count; in enter_svm_guest_mode()
3642 svm->vmcb->control.pause_filter_thresh = in enter_svm_guest_mode()
3643 nested_vmcb->control.pause_filter_thresh; in enter_svm_guest_mode()
3687 nested_vmcb->control.exit_code = SVM_EXIT_ERR; in nested_svm_vmrun()
3688 nested_vmcb->control.exit_code_hi = 0; in nested_svm_vmrun()
3689 nested_vmcb->control.exit_info_1 = 0; in nested_svm_vmrun()
3690 nested_vmcb->control.exit_info_2 = 0; in nested_svm_vmrun()
3699 nested_vmcb->control.int_ctl, in nested_svm_vmrun()
3700 nested_vmcb->control.event_inj, in nested_svm_vmrun()
3701 nested_vmcb->control.nested_ctl); in nested_svm_vmrun()
3703 trace_kvm_nested_intercepts(nested_vmcb->control.intercept_cr & 0xffff, in nested_svm_vmrun()
3704 nested_vmcb->control.intercept_cr >> 16, in nested_svm_vmrun()
3705 nested_vmcb->control.intercept_exceptions, in nested_svm_vmrun()
3706 nested_vmcb->control.intercept); in nested_svm_vmrun()
3739 svm->vmcb->control.exit_code = SVM_EXIT_ERR; in nested_svm_vmrun()
3740 svm->vmcb->control.exit_code_hi = 0; in nested_svm_vmrun()
3741 svm->vmcb->control.exit_info_1 = 0; in nested_svm_vmrun()
3742 svm->vmcb->control.exit_info_2 = 0; in nested_svm_vmrun()
3862 svm->vmcb->control.int_ctl &= ~V_IRQ_MASK; in clgi_interception()
3917 int int_type = svm->vmcb->control.exit_int_info & in task_switch_interception()
3919 int int_vec = svm->vmcb->control.exit_int_info & SVM_EVTINJ_VEC_MASK; in task_switch_interception()
3921 svm->vmcb->control.exit_int_info & SVM_EXITINTINFO_TYPE_MASK; in task_switch_interception()
3923 svm->vmcb->control.exit_int_info & SVM_EXITINTINFO_VALID; in task_switch_interception()
3927 tss_selector = (u16)svm->vmcb->control.exit_info_1; in task_switch_interception()
3929 if (svm->vmcb->control.exit_info_2 & in task_switch_interception()
3932 else if (svm->vmcb->control.exit_info_2 & in task_switch_interception()
3946 if (svm->vmcb->control.exit_info_2 & in task_switch_interception()
3950 (u32)svm->vmcb->control.exit_info_2; in task_switch_interception()
4003 kvm_mmu_invlpg(&svm->vcpu, svm->vmcb->control.exit_info_1); in invlpg_interception()
4045 svm->vmcb->control.exit_code = SVM_EXIT_CR0_SEL_WRITE; in check_selective_cr0_intercepted()
4063 if (unlikely((svm->vmcb->control.exit_info_1 & CR_VALID) == 0)) in cr_interception()
4066 reg = svm->vmcb->control.exit_info_1 & SVM_EXITINFO_REG_MASK; in cr_interception()
4067 if (svm->vmcb->control.exit_code == SVM_EXIT_CR0_SEL_WRITE) in cr_interception()
4070 cr = svm->vmcb->control.exit_code - SVM_EXIT_READ_CR0; in cr_interception()
4144 reg = svm->vmcb->control.exit_info_1 & SVM_EXITINFO_REG_MASK; in dr_interception()
4145 dr = svm->vmcb->control.exit_code - SVM_EXIT_READ_DR0; in dr_interception()
4489 if (svm->vmcb->control.exit_info_1) in msr_interception()
4499 svm->vmcb->control.int_ctl &= ~V_IRQ_MASK; in interrupt_window_interception()
4543 u32 icrh = svm->vmcb->control.exit_info_1 >> 32; in avic_incomplete_ipi_interception()
4544 u32 icrl = svm->vmcb->control.exit_info_1; in avic_incomplete_ipi_interception()
4545 u32 id = svm->vmcb->control.exit_info_2 >> 32; in avic_incomplete_ipi_interception()
4546 u32 index = svm->vmcb->control.exit_info_2 & 0xFF; in avic_incomplete_ipi_interception()
4724 u32 offset = svm->vmcb->control.exit_info_1 & in avic_unaccel_trap_write()
4780 u32 offset = svm->vmcb->control.exit_info_1 & in avic_unaccelerated_access_interception()
4782 u32 vector = svm->vmcb->control.exit_info_2 & in avic_unaccelerated_access_interception()
4784 bool write = (svm->vmcb->control.exit_info_1 >> 32) & in avic_unaccelerated_access_interception()
4873 struct vmcb_control_area *control = &svm->vmcb->control; in dump_vmcb() local
4882 pr_err("%-20s%04x\n", "cr_read:", control->intercept_cr & 0xffff); in dump_vmcb()
4883 pr_err("%-20s%04x\n", "cr_write:", control->intercept_cr >> 16); in dump_vmcb()
4884 pr_err("%-20s%04x\n", "dr_read:", control->intercept_dr & 0xffff); in dump_vmcb()
4885 pr_err("%-20s%04x\n", "dr_write:", control->intercept_dr >> 16); in dump_vmcb()
4886 pr_err("%-20s%08x\n", "exceptions:", control->intercept_exceptions); in dump_vmcb()
4887 pr_err("%-20s%016llx\n", "intercepts:", control->intercept); in dump_vmcb()
4888 pr_err("%-20s%d\n", "pause filter count:", control->pause_filter_count); in dump_vmcb()
4890 control->pause_filter_thresh); in dump_vmcb()
4891 pr_err("%-20s%016llx\n", "iopm_base_pa:", control->iopm_base_pa); in dump_vmcb()
4892 pr_err("%-20s%016llx\n", "msrpm_base_pa:", control->msrpm_base_pa); in dump_vmcb()
4893 pr_err("%-20s%016llx\n", "tsc_offset:", control->tsc_offset); in dump_vmcb()
4894 pr_err("%-20s%d\n", "asid:", control->asid); in dump_vmcb()
4895 pr_err("%-20s%d\n", "tlb_ctl:", control->tlb_ctl); in dump_vmcb()
4896 pr_err("%-20s%08x\n", "int_ctl:", control->int_ctl); in dump_vmcb()
4897 pr_err("%-20s%08x\n", "int_vector:", control->int_vector); in dump_vmcb()
4898 pr_err("%-20s%08x\n", "int_state:", control->int_state); in dump_vmcb()
4899 pr_err("%-20s%08x\n", "exit_code:", control->exit_code); in dump_vmcb()
4900 pr_err("%-20s%016llx\n", "exit_info1:", control->exit_info_1); in dump_vmcb()
4901 pr_err("%-20s%016llx\n", "exit_info2:", control->exit_info_2); in dump_vmcb()
4902 pr_err("%-20s%08x\n", "exit_int_info:", control->exit_int_info); in dump_vmcb()
4903 pr_err("%-20s%08x\n", "exit_int_info_err:", control->exit_int_info_err); in dump_vmcb()
4904 pr_err("%-20s%lld\n", "nested_ctl:", control->nested_ctl); in dump_vmcb()
4905 pr_err("%-20s%016llx\n", "nested_cr3:", control->nested_cr3); in dump_vmcb()
4906 pr_err("%-20s%016llx\n", "avic_vapic_bar:", control->avic_vapic_bar); in dump_vmcb()
4907 pr_err("%-20s%08x\n", "event_inj:", control->event_inj); in dump_vmcb()
4908 pr_err("%-20s%08x\n", "event_inj_err:", control->event_inj_err); in dump_vmcb()
4909 pr_err("%-20s%lld\n", "virt_ext:", control->virt_ext); in dump_vmcb()
4910 pr_err("%-20s%016llx\n", "next_rip:", control->next_rip); in dump_vmcb()
4911 pr_err("%-20s%016llx\n", "avic_backing_page:", control->avic_backing_page); in dump_vmcb()
4912 pr_err("%-20s%016llx\n", "avic_logical_id:", control->avic_logical_id); in dump_vmcb()
4913 pr_err("%-20s%016llx\n", "avic_physical_id:", control->avic_physical_id); in dump_vmcb()
4988 struct vmcb_control_area *control = &to_svm(vcpu)->vmcb->control; in svm_get_exit_info() local
4990 *info1 = control->exit_info_1; in svm_get_exit_info()
4991 *info2 = control->exit_info_2; in svm_get_exit_info()
4998 u32 exit_code = svm->vmcb->control.exit_code; in handle_exit()
5018 svm->vmcb->control.exit_info_1, in handle_exit()
5019 svm->vmcb->control.exit_info_2, in handle_exit()
5020 svm->vmcb->control.exit_int_info, in handle_exit()
5021 svm->vmcb->control.exit_int_info_err, in handle_exit()
5035 if (svm->vmcb->control.exit_code == SVM_EXIT_ERR) { in handle_exit()
5038 = svm->vmcb->control.exit_code; in handle_exit()
5043 if (is_external_interrupt(svm->vmcb->control.exit_int_info) && in handle_exit()
5049 __func__, svm->vmcb->control.exit_int_info, in handle_exit()
5082 svm->vmcb->control.asid = asid; in pre_sev_run()
5096 svm->vmcb->control.tlb_ctl = TLB_CONTROL_FLUSH_ASID; in pre_sev_run()
5118 svm->vmcb->control.event_inj = SVM_EVTINJ_VALID | SVM_EVTINJ_TYPE_NMI; in svm_inject_nmi()
5126 struct vmcb_control_area *control; in svm_inject_irq() local
5129 control = &svm->vmcb->control; in svm_inject_irq()
5130 control->int_vector = irq; in svm_inject_irq()
5131 control->int_ctl &= ~V_INTR_PRIO_MASK; in svm_inject_irq()
5132 control->int_ctl |= V_IRQ_MASK | in svm_inject_irq()
5144 svm->vmcb->control.event_inj = vcpu->arch.interrupt.nr | in svm_set_irq()
5195 vmcb->control.int_ctl |= AVIC_ENABLE_MASK; in svm_refresh_apicv_exec_ctrl()
5197 vmcb->control.int_ctl &= ~AVIC_ENABLE_MASK; in svm_refresh_apicv_exec_ctrl()
5442 ret = !(vmcb->control.int_state & SVM_INTERRUPT_SHADOW_MASK) && in svm_nmi_allowed()
5476 (vmcb->control.int_state & SVM_INTERRUPT_SHADOW_MASK)) in svm_interrupt_allowed()
5549 svm->vmcb->control.tlb_ctl = TLB_CONTROL_FLUSH_ASID; in svm_flush_tlb()
5558 invlpga(gva, svm->vmcb->control.asid); in svm_flush_tlb_gva()
5573 int cr8 = svm->vmcb->control.int_ctl & V_TPR_MASK; in sync_cr8_to_lapic()
5588 svm->vmcb->control.int_ctl &= ~V_TPR_MASK; in sync_lapic_to_cr8()
5589 svm->vmcb->control.int_ctl |= cr8 & V_TPR_MASK; in sync_lapic_to_cr8()
5596 u32 exitintinfo = svm->vmcb->control.exit_int_info; in svm_complete_interrupts()
5642 u32 err = svm->vmcb->control.exit_int_info_err; in svm_complete_interrupts()
5659 struct vmcb_control_area *control = &svm->vmcb->control; in svm_cancel_injection() local
5661 control->exit_int_info = control->event_inj; in svm_cancel_injection()
5662 control->exit_int_info_err = control->event_inj_err; in svm_cancel_injection()
5663 control->event_inj = 0; in svm_cancel_injection()
5688 if (svm->nmi_singlestep && svm->vmcb->control.event_inj) { in svm_vcpu_run()
5852 if (unlikely(svm->vmcb->control.exit_code == SVM_EXIT_NMI)) in svm_vcpu_run()
5860 if (unlikely(svm->vmcb->control.exit_code == SVM_EXIT_NMI)) in svm_vcpu_run()
5867 svm->vmcb->control.tlb_ctl = TLB_CONTROL_DO_NOTHING; in svm_vcpu_run()
5870 if (svm->vmcb->control.exit_code == SVM_EXIT_EXCP_BASE + PF_VECTOR) in svm_vcpu_run()
5882 if (unlikely(svm->vmcb->control.exit_code == in svm_vcpu_run()
5902 svm->vmcb->control.nested_cr3 = __sme_set(root); in set_tdp_cr3()
6186 vmcb->control.exit_info_1 = 1; in svm_check_intercept()
6188 vmcb->control.exit_info_1 = 0; in svm_check_intercept()
6225 vmcb->control.exit_info_1 = exit_info; in svm_check_intercept()
6226 vmcb->control.exit_info_2 = info->next_rip; in svm_check_intercept()
6236 vmcb->control.next_rip = info->next_rip; in svm_check_intercept()
6237 vmcb->control.exit_code = icpt_info.exit_code; in svm_check_intercept()
6249 if (to_svm(vcpu)->vmcb->control.exit_code == SVM_EXIT_INTR) in svm_handle_exit_irqoff()
6284 svm->vmcb->control.exit_code = SVM_EXIT_SMI; in svm_smi_allowed()
7255 (svm->vmcb->control.intercept & (1ULL << INTERCEPT_INIT)); in svm_apic_init_signal_blocked()