• Home
  • Raw
  • Download

Lines Matching refs:vmcb

98 	struct vmcb *hsave;
101 u64 vmcb; member
134 struct vmcb *vmcb; member
247 static inline void mark_all_dirty(struct vmcb *vmcb) in mark_all_dirty() argument
249 vmcb->control.clean = 0; in mark_all_dirty()
252 static inline void mark_all_clean(struct vmcb *vmcb) in mark_all_clean() argument
254 vmcb->control.clean = ((1 << VMCB_DIRTY_MAX) - 1) in mark_all_clean()
258 static inline void mark_dirty(struct vmcb *vmcb, int bit) in mark_dirty() argument
260 vmcb->control.clean &= ~(1 << bit); in mark_dirty()
273 mark_dirty(svm->vmcb, VMCB_INTERCEPTS); in recalc_intercepts()
278 c = &svm->vmcb->control; in recalc_intercepts()
288 static inline struct vmcb *get_host_vmcb(struct vcpu_svm *svm) in get_host_vmcb()
293 return svm->vmcb; in get_host_vmcb()
298 struct vmcb *vmcb = get_host_vmcb(svm); in set_cr_intercept() local
300 vmcb->control.intercept_cr |= (1U << bit); in set_cr_intercept()
307 struct vmcb *vmcb = get_host_vmcb(svm); in clr_cr_intercept() local
309 vmcb->control.intercept_cr &= ~(1U << bit); in clr_cr_intercept()
316 struct vmcb *vmcb = get_host_vmcb(svm); in is_cr_intercept() local
318 return vmcb->control.intercept_cr & (1U << bit); in is_cr_intercept()
323 struct vmcb *vmcb = get_host_vmcb(svm); in set_dr_intercepts() local
325 vmcb->control.intercept_dr = (1 << INTERCEPT_DR0_READ) in set_dr_intercepts()
347 struct vmcb *vmcb = get_host_vmcb(svm); in clr_dr_intercepts() local
349 vmcb->control.intercept_dr = 0; in clr_dr_intercepts()
356 struct vmcb *vmcb = get_host_vmcb(svm); in set_exception_intercept() local
358 vmcb->control.intercept_exceptions |= (1U << bit); in set_exception_intercept()
365 struct vmcb *vmcb = get_host_vmcb(svm); in clr_exception_intercept() local
367 vmcb->control.intercept_exceptions &= ~(1U << bit); in clr_exception_intercept()
374 struct vmcb *vmcb = get_host_vmcb(svm); in set_intercept() local
376 vmcb->control.intercept |= (1ULL << bit); in set_intercept()
383 struct vmcb *vmcb = get_host_vmcb(svm); in clr_intercept() local
385 vmcb->control.intercept &= ~(1ULL << bit); in clr_intercept()
425 struct vmcb *current_vmcb;
494 to_svm(vcpu)->vmcb->save.efer = efer | EFER_SVME; in svm_set_efer()
495 mark_dirty(to_svm(vcpu)->vmcb, VMCB_CR); in svm_set_efer()
509 if (svm->vmcb->control.int_state & SVM_INTERRUPT_SHADOW_MASK) in svm_get_interrupt_shadow()
519 svm->vmcb->control.int_state &= ~SVM_INTERRUPT_SHADOW_MASK; in svm_set_interrupt_shadow()
521 svm->vmcb->control.int_state |= SVM_INTERRUPT_SHADOW_MASK; in svm_set_interrupt_shadow()
529 if (svm->vmcb->control.next_rip != 0) { in skip_emulated_instruction()
531 svm->next_rip = svm->vmcb->control.next_rip; in skip_emulated_instruction()
574 svm->int3_rip = rip + svm->vmcb->save.cs.base; in svm_queue_exception()
578 svm->vmcb->control.event_inj = nr in svm_queue_exception()
582 svm->vmcb->control.event_inj_err = error_code; in svm_queue_exception()
883 svm->vmcb->control.lbr_ctl = 1; in svm_enable_lbrv()
894 svm->vmcb->control.lbr_ctl = 0; in svm_disable_lbrv()
996 return svm->vmcb->control.tsc_offset; in svm_read_tsc_offset()
1005 g_tsc_offset = svm->vmcb->control.tsc_offset - in svm_write_tsc_offset()
1010 svm->vmcb->control.tsc_offset, in svm_write_tsc_offset()
1013 svm->vmcb->control.tsc_offset = offset + g_tsc_offset; in svm_write_tsc_offset()
1015 mark_dirty(svm->vmcb, VMCB_INTERCEPTS); in svm_write_tsc_offset()
1022 svm->vmcb->control.tsc_offset += adjustment; in svm_adjust_tsc_offset_guest()
1027 svm->vmcb->control.tsc_offset - adjustment, in svm_adjust_tsc_offset_guest()
1028 svm->vmcb->control.tsc_offset); in svm_adjust_tsc_offset_guest()
1030 mark_dirty(svm->vmcb, VMCB_INTERCEPTS); in svm_adjust_tsc_offset_guest()
1035 struct vmcb_control_area *control = &svm->vmcb->control; in init_vmcb()
1036 struct vmcb_save_area *save = &svm->vmcb->save; in init_vmcb()
1135 svm->nested.vmcb = 0; in init_vmcb()
1143 mark_all_dirty(svm->vmcb); in init_vmcb()
1213 svm->vmcb = page_address(page); in svm_create_vcpu()
1214 clear_page(svm->vmcb); in svm_create_vcpu()
1262 mark_all_dirty(svm->vmcb); in svm_vcpu_load()
1282 if (sd->current_vmcb != svm->vmcb) { in svm_vcpu_load()
1283 sd->current_vmcb = svm->vmcb; in svm_vcpu_load()
1310 return to_svm(vcpu)->vmcb->save.rflags; in svm_get_rflags()
1320 to_svm(vcpu)->vmcb->save.rflags = rflags; in svm_set_rflags()
1347 struct vmcb_save_area *save = &to_svm(vcpu)->vmcb->save; in svm_seg()
1434 var->dpl = to_svm(vcpu)->vmcb->save.cpl; in svm_get_segment()
1441 struct vmcb_save_area *save = &to_svm(vcpu)->vmcb->save; in svm_get_cpl()
1450 dt->size = svm->vmcb->save.idtr.limit; in svm_get_idt()
1451 dt->address = svm->vmcb->save.idtr.base; in svm_get_idt()
1458 svm->vmcb->save.idtr.limit = dt->size; in svm_set_idt()
1459 svm->vmcb->save.idtr.base = dt->address ; in svm_set_idt()
1460 mark_dirty(svm->vmcb, VMCB_DT); in svm_set_idt()
1467 dt->size = svm->vmcb->save.gdtr.limit; in svm_get_gdt()
1468 dt->address = svm->vmcb->save.gdtr.base; in svm_get_gdt()
1475 svm->vmcb->save.gdtr.limit = dt->size; in svm_set_gdt()
1476 svm->vmcb->save.gdtr.base = dt->address ; in svm_set_gdt()
1477 mark_dirty(svm->vmcb, VMCB_DT); in svm_set_gdt()
1495 u64 *hcr0 = &svm->vmcb->save.cr0; in update_cr0_intercept()
1503 mark_dirty(svm->vmcb, VMCB_CR); in update_cr0_intercept()
1522 svm->vmcb->save.efer |= EFER_LMA | EFER_LME; in svm_set_cr0()
1527 svm->vmcb->save.efer &= ~(EFER_LMA | EFER_LME); in svm_set_cr0()
1545 svm->vmcb->save.cr0 = cr0; in svm_set_cr0()
1546 mark_dirty(svm->vmcb, VMCB_CR); in svm_set_cr0()
1553 unsigned long old_cr4 = to_svm(vcpu)->vmcb->save.cr4; in svm_set_cr4()
1565 to_svm(vcpu)->vmcb->save.cr4 = cr4; in svm_set_cr4()
1566 mark_dirty(to_svm(vcpu)->vmcb, VMCB_CR); in svm_set_cr4()
1596 svm->vmcb->save.cpl = (var->dpl & 3); in svm_set_segment()
1598 mark_dirty(svm->vmcb, VMCB_SEG); in svm_set_segment()
1619 svm->vmcb->control.tlb_ctl = TLB_CONTROL_FLUSH_ALL_ASID; in new_asid()
1623 svm->vmcb->control.asid = sd->next_asid++; in new_asid()
1625 mark_dirty(svm->vmcb, VMCB_ASID); in new_asid()
1630 return to_svm(vcpu)->vmcb->save.dr6; in svm_get_dr6()
1637 svm->vmcb->save.dr6 = value; in svm_set_dr6()
1638 mark_dirty(svm->vmcb, VMCB_DR); in svm_set_dr6()
1650 vcpu->arch.dr7 = svm->vmcb->save.dr7; in svm_sync_dirty_debug_regs()
1660 svm->vmcb->save.dr7 = value; in svm_set_dr7()
1661 mark_dirty(svm->vmcb, VMCB_DR); in svm_set_dr7()
1666 u64 fault_address = svm->vmcb->control.exit_info_2; in pf_interception()
1672 error_code = svm->vmcb->control.exit_info_1; in pf_interception()
1678 svm->vmcb->control.insn_bytes, in pf_interception()
1679 svm->vmcb->control.insn_len); in pf_interception()
1711 svm->vmcb->save.rflags &= in db_interception()
1719 svm->vmcb->save.cs.base + svm->vmcb->save.rip; in db_interception()
1732 kvm_run->debug.arch.pc = svm->vmcb->save.cs.base + svm->vmcb->save.rip; in bp_interception()
1848 clear_page(svm->vmcb); in shutdown_interception()
1858 u32 io_info = svm->vmcb->control.exit_info_1; /* address size bug? */ in io_interception()
1870 svm->next_rip = svm->vmcb->control.exit_info_2; in io_interception()
1931 svm->vmcb->control.nested_cr3 = root; in nested_svm_set_tdp_cr3()
1932 mark_dirty(svm->vmcb, VMCB_NPT); in nested_svm_set_tdp_cr3()
1941 if (svm->vmcb->control.exit_code != SVM_EXIT_NPF) { in nested_svm_inject_npf_exit()
1946 svm->vmcb->control.exit_code = SVM_EXIT_NPF; in nested_svm_inject_npf_exit()
1947 svm->vmcb->control.exit_code_hi = 0; in nested_svm_inject_npf_exit()
1948 svm->vmcb->control.exit_info_1 = (1ULL << 32); in nested_svm_inject_npf_exit()
1949 svm->vmcb->control.exit_info_2 = fault->address; in nested_svm_inject_npf_exit()
1952 svm->vmcb->control.exit_info_1 &= ~0xffffffffULL; in nested_svm_inject_npf_exit()
1953 svm->vmcb->control.exit_info_1 |= fault->error_code; in nested_svm_inject_npf_exit()
1959 if (svm->vmcb->control.exit_info_1 & (2ULL << 32)) in nested_svm_inject_npf_exit()
1960 svm->vmcb->control.exit_info_1 &= ~1; in nested_svm_inject_npf_exit()
1991 if (svm->vmcb->save.cpl) { in nested_svm_check_permissions()
2007 svm->vmcb->control.exit_code = SVM_EXIT_EXCP_BASE + nr; in nested_svm_check_exception()
2008 svm->vmcb->control.exit_code_hi = 0; in nested_svm_check_exception()
2009 svm->vmcb->control.exit_info_1 = error_code; in nested_svm_check_exception()
2010 svm->vmcb->control.exit_info_2 = svm->vcpu.arch.cr2; in nested_svm_check_exception()
2039 svm->vmcb->control.exit_code = SVM_EXIT_INTR; in nested_svm_intr()
2040 svm->vmcb->control.exit_info_1 = 0; in nested_svm_intr()
2041 svm->vmcb->control.exit_info_2 = 0; in nested_svm_intr()
2051 trace_kvm_nested_intr_vmexit(svm->vmcb->save.rip); in nested_svm_intr()
2067 svm->vmcb->control.exit_code = SVM_EXIT_NMI; in nested_svm_nmi()
2109 port = svm->vmcb->control.exit_info_1 >> 16; in nested_svm_intercept_ioio()
2110 size = (svm->vmcb->control.exit_info_1 & SVM_IOIO_SIZE_MASK) >> in nested_svm_intercept_ioio()
2134 write = svm->vmcb->control.exit_info_1 & 1; in nested_svm_exit_handled_msr()
2151 u32 exit_code = svm->vmcb->control.exit_code; in nested_svm_exit_special()
2183 u32 exit_code = svm->vmcb->control.exit_code; in nested_svm_intercept()
2241 static inline void copy_vmcb_control_area(struct vmcb *dst_vmcb, struct vmcb *from_vmcb) in copy_vmcb_control_area()
2273 struct vmcb *nested_vmcb; in nested_svm_vmexit()
2274 struct vmcb *hsave = svm->nested.hsave; in nested_svm_vmexit()
2275 struct vmcb *vmcb = svm->vmcb; in nested_svm_vmexit() local
2278 trace_kvm_nested_vmexit_inject(vmcb->control.exit_code, in nested_svm_vmexit()
2279 vmcb->control.exit_info_1, in nested_svm_vmexit()
2280 vmcb->control.exit_info_2, in nested_svm_vmexit()
2281 vmcb->control.exit_int_info, in nested_svm_vmexit()
2282 vmcb->control.exit_int_info_err, in nested_svm_vmexit()
2285 nested_vmcb = nested_svm_map(svm, svm->nested.vmcb, &page); in nested_svm_vmexit()
2291 svm->nested.vmcb = 0; in nested_svm_vmexit()
2296 nested_vmcb->save.es = vmcb->save.es; in nested_svm_vmexit()
2297 nested_vmcb->save.cs = vmcb->save.cs; in nested_svm_vmexit()
2298 nested_vmcb->save.ss = vmcb->save.ss; in nested_svm_vmexit()
2299 nested_vmcb->save.ds = vmcb->save.ds; in nested_svm_vmexit()
2300 nested_vmcb->save.gdtr = vmcb->save.gdtr; in nested_svm_vmexit()
2301 nested_vmcb->save.idtr = vmcb->save.idtr; in nested_svm_vmexit()
2305 nested_vmcb->save.cr2 = vmcb->save.cr2; in nested_svm_vmexit()
2308 nested_vmcb->save.rip = vmcb->save.rip; in nested_svm_vmexit()
2309 nested_vmcb->save.rsp = vmcb->save.rsp; in nested_svm_vmexit()
2310 nested_vmcb->save.rax = vmcb->save.rax; in nested_svm_vmexit()
2311 nested_vmcb->save.dr7 = vmcb->save.dr7; in nested_svm_vmexit()
2312 nested_vmcb->save.dr6 = vmcb->save.dr6; in nested_svm_vmexit()
2313 nested_vmcb->save.cpl = vmcb->save.cpl; in nested_svm_vmexit()
2315 nested_vmcb->control.int_ctl = vmcb->control.int_ctl; in nested_svm_vmexit()
2316 nested_vmcb->control.int_vector = vmcb->control.int_vector; in nested_svm_vmexit()
2317 nested_vmcb->control.int_state = vmcb->control.int_state; in nested_svm_vmexit()
2318 nested_vmcb->control.exit_code = vmcb->control.exit_code; in nested_svm_vmexit()
2319 nested_vmcb->control.exit_code_hi = vmcb->control.exit_code_hi; in nested_svm_vmexit()
2320 nested_vmcb->control.exit_info_1 = vmcb->control.exit_info_1; in nested_svm_vmexit()
2321 nested_vmcb->control.exit_info_2 = vmcb->control.exit_info_2; in nested_svm_vmexit()
2322 nested_vmcb->control.exit_int_info = vmcb->control.exit_int_info; in nested_svm_vmexit()
2323 nested_vmcb->control.exit_int_info_err = vmcb->control.exit_int_info_err; in nested_svm_vmexit()
2326 nested_vmcb->control.next_rip = vmcb->control.next_rip; in nested_svm_vmexit()
2336 if (vmcb->control.event_inj & SVM_EVTINJ_VALID) { in nested_svm_vmexit()
2339 nc->exit_int_info = vmcb->control.event_inj; in nested_svm_vmexit()
2340 nc->exit_int_info_err = vmcb->control.event_inj_err; in nested_svm_vmexit()
2352 copy_vmcb_control_area(vmcb, hsave); in nested_svm_vmexit()
2360 svm->vmcb->save.es = hsave->save.es; in nested_svm_vmexit()
2361 svm->vmcb->save.cs = hsave->save.cs; in nested_svm_vmexit()
2362 svm->vmcb->save.ss = hsave->save.ss; in nested_svm_vmexit()
2363 svm->vmcb->save.ds = hsave->save.ds; in nested_svm_vmexit()
2364 svm->vmcb->save.gdtr = hsave->save.gdtr; in nested_svm_vmexit()
2365 svm->vmcb->save.idtr = hsave->save.idtr; in nested_svm_vmexit()
2371 svm->vmcb->save.cr3 = hsave->save.cr3; in nested_svm_vmexit()
2379 svm->vmcb->save.dr7 = 0; in nested_svm_vmexit()
2380 svm->vmcb->save.cpl = 0; in nested_svm_vmexit()
2381 svm->vmcb->control.exit_int_info = 0; in nested_svm_vmexit()
2383 mark_all_dirty(svm->vmcb); in nested_svm_vmexit()
2430 svm->vmcb->control.msrpm_base_pa = __pa(svm->nested.msrpm); in nested_svm_vmrun_msrpm()
2435 static bool nested_vmcb_checks(struct vmcb *vmcb) in nested_vmcb_checks() argument
2437 if ((vmcb->control.intercept & (1ULL << INTERCEPT_VMRUN)) == 0) in nested_vmcb_checks()
2440 if (vmcb->control.asid == 0) in nested_vmcb_checks()
2443 if (vmcb->control.nested_ctl && !npt_enabled) in nested_vmcb_checks()
2451 struct vmcb *nested_vmcb; in nested_svm_vmrun()
2452 struct vmcb *hsave = svm->nested.hsave; in nested_svm_vmrun()
2453 struct vmcb *vmcb = svm->vmcb; in nested_svm_vmrun() local
2457 vmcb_gpa = svm->vmcb->save.rax; in nested_svm_vmrun()
2459 nested_vmcb = nested_svm_map(svm, svm->vmcb->save.rax, &page); in nested_svm_vmrun()
2474 trace_kvm_nested_vmrun(svm->vmcb->save.rip, vmcb_gpa, in nested_svm_vmrun()
2493 hsave->save.es = vmcb->save.es; in nested_svm_vmrun()
2494 hsave->save.cs = vmcb->save.cs; in nested_svm_vmrun()
2495 hsave->save.ss = vmcb->save.ss; in nested_svm_vmrun()
2496 hsave->save.ds = vmcb->save.ds; in nested_svm_vmrun()
2497 hsave->save.gdtr = vmcb->save.gdtr; in nested_svm_vmrun()
2498 hsave->save.idtr = vmcb->save.idtr; in nested_svm_vmrun()
2504 hsave->save.rsp = vmcb->save.rsp; in nested_svm_vmrun()
2505 hsave->save.rax = vmcb->save.rax; in nested_svm_vmrun()
2507 hsave->save.cr3 = vmcb->save.cr3; in nested_svm_vmrun()
2511 copy_vmcb_control_area(hsave, vmcb); in nested_svm_vmrun()
2525 svm->vmcb->save.es = nested_vmcb->save.es; in nested_svm_vmrun()
2526 svm->vmcb->save.cs = nested_vmcb->save.cs; in nested_svm_vmrun()
2527 svm->vmcb->save.ss = nested_vmcb->save.ss; in nested_svm_vmrun()
2528 svm->vmcb->save.ds = nested_vmcb->save.ds; in nested_svm_vmrun()
2529 svm->vmcb->save.gdtr = nested_vmcb->save.gdtr; in nested_svm_vmrun()
2530 svm->vmcb->save.idtr = nested_vmcb->save.idtr; in nested_svm_vmrun()
2536 svm->vmcb->save.cr3 = nested_vmcb->save.cr3; in nested_svm_vmrun()
2544 svm->vmcb->save.cr2 = svm->vcpu.arch.cr2 = nested_vmcb->save.cr2; in nested_svm_vmrun()
2550 svm->vmcb->save.rax = nested_vmcb->save.rax; in nested_svm_vmrun()
2551 svm->vmcb->save.rsp = nested_vmcb->save.rsp; in nested_svm_vmrun()
2552 svm->vmcb->save.rip = nested_vmcb->save.rip; in nested_svm_vmrun()
2553 svm->vmcb->save.dr7 = nested_vmcb->save.dr7; in nested_svm_vmrun()
2554 svm->vmcb->save.dr6 = nested_vmcb->save.dr6; in nested_svm_vmrun()
2555 svm->vmcb->save.cpl = nested_vmcb->save.cpl; in nested_svm_vmrun()
2567 svm->vmcb->control.int_ctl = nested_vmcb->control.int_ctl & in nested_svm_vmrun()
2570 svm->vmcb->control.int_ctl |= V_INTR_MASKING_MASK; in nested_svm_vmrun()
2586 svm->vmcb->control.lbr_ctl = nested_vmcb->control.lbr_ctl; in nested_svm_vmrun()
2587 svm->vmcb->control.int_vector = nested_vmcb->control.int_vector; in nested_svm_vmrun()
2588 svm->vmcb->control.int_state = nested_vmcb->control.int_state; in nested_svm_vmrun()
2589 svm->vmcb->control.tsc_offset += nested_vmcb->control.tsc_offset; in nested_svm_vmrun()
2590 svm->vmcb->control.event_inj = nested_vmcb->control.event_inj; in nested_svm_vmrun()
2591 svm->vmcb->control.event_inj_err = nested_vmcb->control.event_inj_err; in nested_svm_vmrun()
2604 svm->nested.vmcb = vmcb_gpa; in nested_svm_vmrun()
2608 mark_all_dirty(svm->vmcb); in nested_svm_vmrun()
2613 static void nested_svm_vmloadsave(struct vmcb *from_vmcb, struct vmcb *to_vmcb) in nested_svm_vmloadsave()
2631 struct vmcb *nested_vmcb; in vmload_interception()
2637 nested_vmcb = nested_svm_map(svm, svm->vmcb->save.rax, &page); in vmload_interception()
2644 nested_svm_vmloadsave(nested_vmcb, svm->vmcb); in vmload_interception()
2652 struct vmcb *nested_vmcb; in vmsave_interception()
2658 nested_vmcb = nested_svm_map(svm, svm->vmcb->save.rax, &page); in vmsave_interception()
2665 nested_svm_vmloadsave(svm->vmcb, nested_vmcb); in vmsave_interception()
2689 svm->vmcb->control.exit_code = SVM_EXIT_ERR; in vmrun_interception()
2690 svm->vmcb->control.exit_code_hi = 0; in vmrun_interception()
2691 svm->vmcb->control.exit_info_1 = 0; in vmrun_interception()
2692 svm->vmcb->control.exit_info_2 = 0; in vmrun_interception()
2725 svm->vmcb->control.int_ctl &= ~V_IRQ_MASK; in clgi_interception()
2727 mark_dirty(svm->vmcb, VMCB_INTR); in clgi_interception()
2736 trace_kvm_invlpga(svm->vmcb->save.rip, kvm_register_read(&svm->vcpu, VCPU_REGS_RCX), in invlpga_interception()
2749 trace_kvm_skinit(svm->vmcb->save.rip, kvm_register_read(&svm->vcpu, VCPU_REGS_RAX)); in skinit_interception()
2778 int int_type = svm->vmcb->control.exit_int_info & in task_switch_interception()
2780 int int_vec = svm->vmcb->control.exit_int_info & SVM_EVTINJ_VEC_MASK; in task_switch_interception()
2782 svm->vmcb->control.exit_int_info & SVM_EXITINTINFO_TYPE_MASK; in task_switch_interception()
2784 svm->vmcb->control.exit_int_info & SVM_EXITINTINFO_VALID; in task_switch_interception()
2788 tss_selector = (u16)svm->vmcb->control.exit_info_1; in task_switch_interception()
2790 if (svm->vmcb->control.exit_info_2 & in task_switch_interception()
2793 else if (svm->vmcb->control.exit_info_2 & in task_switch_interception()
2807 if (svm->vmcb->control.exit_info_2 & in task_switch_interception()
2811 (u32)svm->vmcb->control.exit_info_2; in task_switch_interception()
2864 kvm_mmu_invlpg(&svm->vcpu, svm->vmcb->control.exit_info_1); in invlpg_interception()
2904 svm->vmcb->control.exit_code = SVM_EXIT_CR0_SEL_WRITE; in check_selective_cr0_intercepted()
2922 if (unlikely((svm->vmcb->control.exit_info_1 & CR_VALID) == 0)) in cr_interception()
2925 reg = svm->vmcb->control.exit_info_1 & SVM_EXITINFO_REG_MASK; in cr_interception()
2926 if (svm->vmcb->control.exit_code == SVM_EXIT_CR0_SEL_WRITE) in cr_interception()
2929 cr = svm->vmcb->control.exit_code - SVM_EXIT_READ_CR0; in cr_interception()
3005 reg = svm->vmcb->control.exit_info_1 & SVM_EXITINFO_REG_MASK; in dr_interception()
3006 dr = svm->vmcb->control.exit_code - SVM_EXIT_READ_DR0; in dr_interception()
3043 struct vmcb *vmcb = get_host_vmcb(to_svm(vcpu)); in svm_read_l1_tsc() local
3044 return vmcb->control.tsc_offset + host_tsc; in svm_read_l1_tsc()
3053 msr_info->data = svm->vmcb->control.tsc_offset + in svm_get_msr()
3059 msr_info->data = svm->vmcb->save.star; in svm_get_msr()
3063 msr_info->data = svm->vmcb->save.lstar; in svm_get_msr()
3066 msr_info->data = svm->vmcb->save.cstar; in svm_get_msr()
3069 msr_info->data = svm->vmcb->save.kernel_gs_base; in svm_get_msr()
3072 msr_info->data = svm->vmcb->save.sfmask; in svm_get_msr()
3076 msr_info->data = svm->vmcb->save.sysenter_cs; in svm_get_msr()
3090 msr_info->data = svm->vmcb->save.dbgctl; in svm_get_msr()
3093 msr_info->data = svm->vmcb->save.br_from; in svm_get_msr()
3096 msr_info->data = svm->vmcb->save.br_to; in svm_get_msr()
3099 msr_info->data = svm->vmcb->save.last_excp_from; in svm_get_msr()
3102 msr_info->data = svm->vmcb->save.last_excp_to; in svm_get_msr()
3192 svm->vmcb->save.g_pat = data; in svm_set_msr()
3193 mark_dirty(svm->vmcb, VMCB_NPT); in svm_set_msr()
3252 svm->vmcb->save.star = data; in svm_set_msr()
3256 svm->vmcb->save.lstar = data; in svm_set_msr()
3259 svm->vmcb->save.cstar = data; in svm_set_msr()
3262 svm->vmcb->save.kernel_gs_base = data; in svm_set_msr()
3265 svm->vmcb->save.sfmask = data; in svm_set_msr()
3269 svm->vmcb->save.sysenter_cs = data; in svm_set_msr()
3273 svm->vmcb->save.sysenter_eip = data; in svm_set_msr()
3277 svm->vmcb->save.sysenter_esp = data; in svm_set_msr()
3288 svm->vmcb->save.dbgctl = data; in svm_set_msr()
3289 mark_dirty(svm->vmcb, VMCB_LBR); in svm_set_msr()
3332 if (svm->vmcb->control.exit_info_1) in msr_interception()
3342 svm->vmcb->control.int_ctl &= ~V_IRQ_MASK; in interrupt_window_interception()
3343 mark_dirty(svm->vmcb, VMCB_INTR); in interrupt_window_interception()
3440 struct vmcb_control_area *control = &svm->vmcb->control; in dump_vmcb()
3441 struct vmcb_save_area *save = &svm->vmcb->save; in dump_vmcb()
3544 struct vmcb_control_area *control = &to_svm(vcpu)->vmcb->control; in svm_get_exit_info()
3554 u32 exit_code = svm->vmcb->control.exit_code; in handle_exit()
3559 vcpu->arch.cr0 = svm->vmcb->save.cr0; in handle_exit()
3561 vcpu->arch.cr3 = svm->vmcb->save.cr3; in handle_exit()
3573 trace_kvm_nested_vmexit(svm->vmcb->save.rip, exit_code, in handle_exit()
3574 svm->vmcb->control.exit_info_1, in handle_exit()
3575 svm->vmcb->control.exit_info_2, in handle_exit()
3576 svm->vmcb->control.exit_int_info, in handle_exit()
3577 svm->vmcb->control.exit_int_info_err, in handle_exit()
3591 if (svm->vmcb->control.exit_code == SVM_EXIT_ERR) { in handle_exit()
3594 = svm->vmcb->control.exit_code; in handle_exit()
3600 if (is_external_interrupt(svm->vmcb->control.exit_int_info) && in handle_exit()
3606 __func__, svm->vmcb->control.exit_int_info, in handle_exit()
3643 svm->vmcb->control.event_inj = SVM_EVTINJ_VALID | SVM_EVTINJ_TYPE_NMI; in svm_inject_nmi()
3653 control = &svm->vmcb->control; in svm_inject_irq()
3658 mark_dirty(svm->vmcb, VMCB_INTR); in svm_inject_irq()
3670 svm->vmcb->control.event_inj = vcpu->arch.interrupt.nr | in svm_set_irq()
3713 struct vmcb *vmcb = svm->vmcb; in svm_nmi_allowed() local
3715 ret = !(vmcb->control.int_state & SVM_INTERRUPT_SHADOW_MASK) && in svm_nmi_allowed()
3745 struct vmcb *vmcb = svm->vmcb; in svm_interrupt_allowed() local
3749 (vmcb->control.int_state & SVM_INTERRUPT_SHADOW_MASK)) in svm_interrupt_allowed()
3789 svm->vmcb->save.rflags |= (X86_EFLAGS_TF | X86_EFLAGS_RF); in enable_nmi_window()
3802 svm->vmcb->control.tlb_ctl = TLB_CONTROL_FLUSH_ASID; in svm_flush_tlb()
3819 int cr8 = svm->vmcb->control.int_ctl & V_TPR_MASK; in sync_cr8_to_lapic()
3833 svm->vmcb->control.int_ctl &= ~V_TPR_MASK; in sync_lapic_to_cr8()
3834 svm->vmcb->control.int_ctl |= cr8 & V_TPR_MASK; in sync_lapic_to_cr8()
3841 u32 exitintinfo = svm->vmcb->control.exit_int_info; in svm_complete_interrupts()
3887 u32 err = svm->vmcb->control.exit_int_info_err; in svm_complete_interrupts()
3904 struct vmcb_control_area *control = &svm->vmcb->control; in svm_cancel_injection()
3916 svm->vmcb->save.rax = vcpu->arch.regs[VCPU_REGS_RAX]; in svm_vcpu_run()
3917 svm->vmcb->save.rsp = vcpu->arch.regs[VCPU_REGS_RSP]; in svm_vcpu_run()
3918 svm->vmcb->save.rip = vcpu->arch.regs[VCPU_REGS_RIP]; in svm_vcpu_run()
3931 svm->vmcb->save.cr2 = vcpu->arch.cr2; in svm_vcpu_run()
4011 [vmcb]"i"(offsetof(struct vcpu_svm, vmcb_pa)), in svm_vcpu_run()
4073 vcpu->arch.cr2 = svm->vmcb->save.cr2; in svm_vcpu_run()
4074 vcpu->arch.regs[VCPU_REGS_RAX] = svm->vmcb->save.rax; in svm_vcpu_run()
4075 vcpu->arch.regs[VCPU_REGS_RSP] = svm->vmcb->save.rsp; in svm_vcpu_run()
4076 vcpu->arch.regs[VCPU_REGS_RIP] = svm->vmcb->save.rip; in svm_vcpu_run()
4078 if (unlikely(svm->vmcb->control.exit_code == SVM_EXIT_NMI)) in svm_vcpu_run()
4085 if (unlikely(svm->vmcb->control.exit_code == SVM_EXIT_NMI)) in svm_vcpu_run()
4092 svm->vmcb->control.tlb_ctl = TLB_CONTROL_DO_NOTHING; in svm_vcpu_run()
4095 if (svm->vmcb->control.exit_code == SVM_EXIT_EXCP_BASE + PF_VECTOR) in svm_vcpu_run()
4107 if (unlikely(svm->vmcb->control.exit_code == in svm_vcpu_run()
4111 mark_all_clean(svm->vmcb); in svm_vcpu_run()
4118 svm->vmcb->save.cr3 = root; in svm_set_cr3()
4119 mark_dirty(svm->vmcb, VMCB_CR); in svm_set_cr3()
4127 svm->vmcb->control.nested_cr3 = root; in set_tdp_cr3()
4128 mark_dirty(svm->vmcb, VMCB_NPT); in set_tdp_cr3()
4131 svm->vmcb->save.cr3 = kvm_read_cr3(vcpu); in set_tdp_cr3()
4132 mark_dirty(svm->vmcb, VMCB_CR); in set_tdp_cr3()
4329 struct vmcb *vmcb = svm->vmcb; in svm_check_intercept() local
4382 vmcb->control.exit_info_1 = 1; in svm_check_intercept()
4384 vmcb->control.exit_info_1 = 0; in svm_check_intercept()
4420 vmcb->control.exit_info_1 = exit_info; in svm_check_intercept()
4421 vmcb->control.exit_info_2 = info->next_rip; in svm_check_intercept()
4431 vmcb->control.next_rip = info->next_rip; in svm_check_intercept()
4432 vmcb->control.exit_code = icpt_info.exit_code; in svm_check_intercept()