Lines Matching full:save
751 to_svm(vcpu)->vmcb->save.efer = efer | EFER_SVME; in svm_set_efer()
833 svm->int3_rip = rip + svm->vmcb->save.cs.base; in svm_queue_exception()
1175 svm->vmcb->save.rflags &= ~X86_EFLAGS_TF; in disable_nmi_singlestep()
1177 svm->vmcb->save.rflags &= ~X86_EFLAGS_RF; in disable_nmi_singlestep()
1532 struct vmcb_save_area *save = &svm->vmcb->save; in init_vmcb() local
1597 init_seg(&save->es); in init_vmcb()
1598 init_seg(&save->ss); in init_vmcb()
1599 init_seg(&save->ds); in init_vmcb()
1600 init_seg(&save->fs); in init_vmcb()
1601 init_seg(&save->gs); in init_vmcb()
1603 save->cs.selector = 0xf000; in init_vmcb()
1604 save->cs.base = 0xffff0000; in init_vmcb()
1606 save->cs.attrib = SVM_SELECTOR_READ_MASK | SVM_SELECTOR_P_MASK | in init_vmcb()
1608 save->cs.limit = 0xffff; in init_vmcb()
1610 save->gdtr.limit = 0xffff; in init_vmcb()
1611 save->idtr.limit = 0xffff; in init_vmcb()
1613 init_sys_seg(&save->ldtr, SEG_TYPE_LDT); in init_vmcb()
1614 init_sys_seg(&save->tr, SEG_TYPE_BUSY_TSS16); in init_vmcb()
1617 save->dr6 = 0xffff0ff0; in init_vmcb()
1619 save->rip = 0x0000fff0; in init_vmcb()
1620 svm->vcpu.arch.regs[VCPU_REGS_RIP] = save->rip; in init_vmcb()
1623 * svm_set_cr0() sets PG and WP and clears NW and CD on save->cr0. in init_vmcb()
1629 save->cr4 = X86_CR4_PAE; in init_vmcb()
1639 save->g_pat = svm->vcpu.arch.pat; in init_vmcb()
1640 save->cr3 = 0; in init_vmcb()
1641 save->cr4 = 0; in init_vmcb()
2350 unsigned long rflags = svm->vmcb->save.rflags; in svm_get_rflags()
2372 to_svm(vcpu)->vmcb->save.rflags = rflags; in svm_set_rflags()
2399 struct vmcb_save_area *save = &to_svm(vcpu)->vmcb->save; in svm_seg() local
2402 case VCPU_SREG_CS: return &save->cs; in svm_seg()
2403 case VCPU_SREG_DS: return &save->ds; in svm_seg()
2404 case VCPU_SREG_ES: return &save->es; in svm_seg()
2405 case VCPU_SREG_FS: return &save->fs; in svm_seg()
2406 case VCPU_SREG_GS: return &save->gs; in svm_seg()
2407 case VCPU_SREG_SS: return &save->ss; in svm_seg()
2408 case VCPU_SREG_TR: return &save->tr; in svm_seg()
2409 case VCPU_SREG_LDTR: return &save->ldtr; in svm_seg()
2486 var->dpl = to_svm(vcpu)->vmcb->save.cpl; in svm_get_segment()
2493 struct vmcb_save_area *save = &to_svm(vcpu)->vmcb->save; in svm_get_cpl() local
2495 return save->cpl; in svm_get_cpl()
2502 dt->size = svm->vmcb->save.idtr.limit; in svm_get_idt()
2503 dt->address = svm->vmcb->save.idtr.base; in svm_get_idt()
2510 svm->vmcb->save.idtr.limit = dt->size; in svm_set_idt()
2511 svm->vmcb->save.idtr.base = dt->address ; in svm_set_idt()
2519 dt->size = svm->vmcb->save.gdtr.limit; in svm_get_gdt()
2520 dt->address = svm->vmcb->save.gdtr.base; in svm_get_gdt()
2527 svm->vmcb->save.gdtr.limit = dt->size; in svm_set_gdt()
2528 svm->vmcb->save.gdtr.base = dt->address ; in svm_set_gdt()
2547 u64 *hcr0 = &svm->vmcb->save.cr0; in update_cr0_intercept()
2571 svm->vmcb->save.efer |= EFER_LMA | EFER_LME; in svm_set_cr0()
2576 svm->vmcb->save.efer &= ~(EFER_LMA | EFER_LME); in svm_set_cr0()
2592 svm->vmcb->save.cr0 = cr0; in svm_set_cr0()
2600 unsigned long old_cr4 = to_svm(vcpu)->vmcb->save.cr4; in svm_set_cr4()
2612 to_svm(vcpu)->vmcb->save.cr4 = cr4; in svm_set_cr4()
2643 svm->vmcb->save.cpl = (var->dpl & 3); in svm_set_segment()
2677 return to_svm(vcpu)->vmcb->save.dr6; in svm_get_dr6()
2684 svm->vmcb->save.dr6 = value; in svm_set_dr6()
2697 vcpu->arch.dr7 = svm->vmcb->save.dr7; in svm_sync_dirty_debug_regs()
2707 svm->vmcb->save.dr7 = value; in svm_set_dr7()
2756 svm->vmcb->save.cs.base + svm->vmcb->save.rip; in db_interception()
2769 kvm_run->debug.arch.pc = svm->vmcb->save.cs.base + svm->vmcb->save.rip; in bp_interception()
3019 if (svm->vmcb->save.cpl) { in nested_svm_check_permissions()
3061 /* This function returns true if it is save to enable the irq window */
3093 trace_kvm_nested_intr_vmexit(svm->vmcb->save.rip); in nested_svm_intr()
3100 /* This function returns true if it is save to enable the nmi window */
3364 nested_vmcb->save.es = vmcb->save.es; in nested_svm_vmexit()
3365 nested_vmcb->save.cs = vmcb->save.cs; in nested_svm_vmexit()
3366 nested_vmcb->save.ss = vmcb->save.ss; in nested_svm_vmexit()
3367 nested_vmcb->save.ds = vmcb->save.ds; in nested_svm_vmexit()
3368 nested_vmcb->save.gdtr = vmcb->save.gdtr; in nested_svm_vmexit()
3369 nested_vmcb->save.idtr = vmcb->save.idtr; in nested_svm_vmexit()
3370 nested_vmcb->save.efer = svm->vcpu.arch.efer; in nested_svm_vmexit()
3371 nested_vmcb->save.cr0 = kvm_read_cr0(&svm->vcpu); in nested_svm_vmexit()
3372 nested_vmcb->save.cr3 = kvm_read_cr3(&svm->vcpu); in nested_svm_vmexit()
3373 nested_vmcb->save.cr2 = vmcb->save.cr2; in nested_svm_vmexit()
3374 nested_vmcb->save.cr4 = svm->vcpu.arch.cr4; in nested_svm_vmexit()
3375 nested_vmcb->save.rflags = kvm_get_rflags(&svm->vcpu); in nested_svm_vmexit()
3376 nested_vmcb->save.rip = vmcb->save.rip; in nested_svm_vmexit()
3377 nested_vmcb->save.rsp = vmcb->save.rsp; in nested_svm_vmexit()
3378 nested_vmcb->save.rax = vmcb->save.rax; in nested_svm_vmexit()
3379 nested_vmcb->save.dr7 = vmcb->save.dr7; in nested_svm_vmexit()
3380 nested_vmcb->save.dr6 = vmcb->save.dr6; in nested_svm_vmexit()
3381 nested_vmcb->save.cpl = vmcb->save.cpl; in nested_svm_vmexit()
3428 /* Restore selected save entries */ in nested_svm_vmexit()
3429 svm->vmcb->save.es = hsave->save.es; in nested_svm_vmexit()
3430 svm->vmcb->save.cs = hsave->save.cs; in nested_svm_vmexit()
3431 svm->vmcb->save.ss = hsave->save.ss; in nested_svm_vmexit()
3432 svm->vmcb->save.ds = hsave->save.ds; in nested_svm_vmexit()
3433 svm->vmcb->save.gdtr = hsave->save.gdtr; in nested_svm_vmexit()
3434 svm->vmcb->save.idtr = hsave->save.idtr; in nested_svm_vmexit()
3435 kvm_set_rflags(&svm->vcpu, hsave->save.rflags); in nested_svm_vmexit()
3436 svm_set_efer(&svm->vcpu, hsave->save.efer); in nested_svm_vmexit()
3437 svm_set_cr0(&svm->vcpu, hsave->save.cr0 | X86_CR0_PE); in nested_svm_vmexit()
3438 svm_set_cr4(&svm->vcpu, hsave->save.cr4); in nested_svm_vmexit()
3440 svm->vmcb->save.cr3 = hsave->save.cr3; in nested_svm_vmexit()
3441 svm->vcpu.arch.cr3 = hsave->save.cr3; in nested_svm_vmexit()
3443 (void)kvm_set_cr3(&svm->vcpu, hsave->save.cr3); in nested_svm_vmexit()
3445 kvm_register_write(&svm->vcpu, VCPU_REGS_RAX, hsave->save.rax); in nested_svm_vmexit()
3446 kvm_register_write(&svm->vcpu, VCPU_REGS_RSP, hsave->save.rsp); in nested_svm_vmexit()
3447 kvm_register_write(&svm->vcpu, VCPU_REGS_RIP, hsave->save.rip); in nested_svm_vmexit()
3448 svm->vmcb->save.dr7 = 0; in nested_svm_vmexit()
3449 svm->vmcb->save.cpl = 0; in nested_svm_vmexit()
3534 svm->vmcb->save.es = nested_vmcb->save.es; in enter_svm_guest_mode()
3535 svm->vmcb->save.cs = nested_vmcb->save.cs; in enter_svm_guest_mode()
3536 svm->vmcb->save.ss = nested_vmcb->save.ss; in enter_svm_guest_mode()
3537 svm->vmcb->save.ds = nested_vmcb->save.ds; in enter_svm_guest_mode()
3538 svm->vmcb->save.gdtr = nested_vmcb->save.gdtr; in enter_svm_guest_mode()
3539 svm->vmcb->save.idtr = nested_vmcb->save.idtr; in enter_svm_guest_mode()
3540 kvm_set_rflags(&svm->vcpu, nested_vmcb->save.rflags); in enter_svm_guest_mode()
3541 svm_set_efer(&svm->vcpu, nested_vmcb->save.efer); in enter_svm_guest_mode()
3542 svm_set_cr0(&svm->vcpu, nested_vmcb->save.cr0); in enter_svm_guest_mode()
3543 svm_set_cr4(&svm->vcpu, nested_vmcb->save.cr4); in enter_svm_guest_mode()
3545 svm->vmcb->save.cr3 = nested_vmcb->save.cr3; in enter_svm_guest_mode()
3546 svm->vcpu.arch.cr3 = nested_vmcb->save.cr3; in enter_svm_guest_mode()
3548 (void)kvm_set_cr3(&svm->vcpu, nested_vmcb->save.cr3); in enter_svm_guest_mode()
3553 svm->vmcb->save.cr2 = svm->vcpu.arch.cr2 = nested_vmcb->save.cr2; in enter_svm_guest_mode()
3554 kvm_register_write(&svm->vcpu, VCPU_REGS_RAX, nested_vmcb->save.rax); in enter_svm_guest_mode()
3555 kvm_register_write(&svm->vcpu, VCPU_REGS_RSP, nested_vmcb->save.rsp); in enter_svm_guest_mode()
3556 kvm_register_write(&svm->vcpu, VCPU_REGS_RIP, nested_vmcb->save.rip); in enter_svm_guest_mode()
3559 svm->vmcb->save.rax = nested_vmcb->save.rax; in enter_svm_guest_mode()
3560 svm->vmcb->save.rsp = nested_vmcb->save.rsp; in enter_svm_guest_mode()
3561 svm->vmcb->save.rip = nested_vmcb->save.rip; in enter_svm_guest_mode()
3562 svm->vmcb->save.dr7 = nested_vmcb->save.dr7; in enter_svm_guest_mode()
3563 svm->vmcb->save.dr6 = nested_vmcb->save.dr6; in enter_svm_guest_mode()
3564 svm->vmcb->save.cpl = nested_vmcb->save.cpl; in enter_svm_guest_mode()
3632 vmcb_gpa = svm->vmcb->save.rax; in nested_svm_vmrun()
3634 nested_vmcb = nested_svm_map(svm, svm->vmcb->save.rax, &page); in nested_svm_vmrun()
3649 trace_kvm_nested_vmrun(svm->vmcb->save.rip, vmcb_gpa, in nested_svm_vmrun()
3650 nested_vmcb->save.rip, in nested_svm_vmrun()
3665 * Save the old vmcb, so we don't need to pick what we save, but can in nested_svm_vmrun()
3668 hsave->save.es = vmcb->save.es; in nested_svm_vmrun()
3669 hsave->save.cs = vmcb->save.cs; in nested_svm_vmrun()
3670 hsave->save.ss = vmcb->save.ss; in nested_svm_vmrun()
3671 hsave->save.ds = vmcb->save.ds; in nested_svm_vmrun()
3672 hsave->save.gdtr = vmcb->save.gdtr; in nested_svm_vmrun()
3673 hsave->save.idtr = vmcb->save.idtr; in nested_svm_vmrun()
3674 hsave->save.efer = svm->vcpu.arch.efer; in nested_svm_vmrun()
3675 hsave->save.cr0 = kvm_read_cr0(&svm->vcpu); in nested_svm_vmrun()
3676 hsave->save.cr4 = svm->vcpu.arch.cr4; in nested_svm_vmrun()
3677 hsave->save.rflags = kvm_get_rflags(&svm->vcpu); in nested_svm_vmrun()
3678 hsave->save.rip = kvm_rip_read(&svm->vcpu); in nested_svm_vmrun()
3679 hsave->save.rsp = vmcb->save.rsp; in nested_svm_vmrun()
3680 hsave->save.rax = vmcb->save.rax; in nested_svm_vmrun()
3682 hsave->save.cr3 = vmcb->save.cr3; in nested_svm_vmrun()
3684 hsave->save.cr3 = kvm_read_cr3(&svm->vcpu); in nested_svm_vmrun()
3695 to_vmcb->save.fs = from_vmcb->save.fs; in nested_svm_vmloadsave()
3696 to_vmcb->save.gs = from_vmcb->save.gs; in nested_svm_vmloadsave()
3697 to_vmcb->save.tr = from_vmcb->save.tr; in nested_svm_vmloadsave()
3698 to_vmcb->save.ldtr = from_vmcb->save.ldtr; in nested_svm_vmloadsave()
3699 to_vmcb->save.kernel_gs_base = from_vmcb->save.kernel_gs_base; in nested_svm_vmloadsave()
3700 to_vmcb->save.star = from_vmcb->save.star; in nested_svm_vmloadsave()
3701 to_vmcb->save.lstar = from_vmcb->save.lstar; in nested_svm_vmloadsave()
3702 to_vmcb->save.cstar = from_vmcb->save.cstar; in nested_svm_vmloadsave()
3703 to_vmcb->save.sfmask = from_vmcb->save.sfmask; in nested_svm_vmloadsave()
3704 to_vmcb->save.sysenter_cs = from_vmcb->save.sysenter_cs; in nested_svm_vmloadsave()
3705 to_vmcb->save.sysenter_esp = from_vmcb->save.sysenter_esp; in nested_svm_vmloadsave()
3706 to_vmcb->save.sysenter_eip = from_vmcb->save.sysenter_eip; in nested_svm_vmloadsave()
3718 nested_vmcb = nested_svm_map(svm, svm->vmcb->save.rax, &page); in vmload_interception()
3740 nested_vmcb = nested_svm_map(svm, svm->vmcb->save.rax, &page); in vmsave_interception()
3758 /* Save rip after vmrun instruction */ in vmrun_interception()
3830 trace_kvm_invlpga(svm->vmcb->save.rip, kvm_register_read(&svm->vcpu, VCPU_REGS_RCX), in invlpga_interception()
3842 trace_kvm_skinit(svm->vmcb->save.rip, kvm_register_read(&svm->vcpu, VCPU_REGS_RAX)); in skinit_interception()
4159 msr_info->data = svm->vmcb->save.star; in svm_get_msr()
4163 msr_info->data = svm->vmcb->save.lstar; in svm_get_msr()
4166 msr_info->data = svm->vmcb->save.cstar; in svm_get_msr()
4169 msr_info->data = svm->vmcb->save.kernel_gs_base; in svm_get_msr()
4172 msr_info->data = svm->vmcb->save.sfmask; in svm_get_msr()
4176 msr_info->data = svm->vmcb->save.sysenter_cs; in svm_get_msr()
4195 msr_info->data = svm->vmcb->save.dbgctl; in svm_get_msr()
4198 msr_info->data = svm->vmcb->save.br_from; in svm_get_msr()
4201 msr_info->data = svm->vmcb->save.br_to; in svm_get_msr()
4204 msr_info->data = svm->vmcb->save.last_excp_from; in svm_get_msr()
4207 msr_info->data = svm->vmcb->save.last_excp_to; in svm_get_msr()
4315 svm->vmcb->save.g_pat = data; in svm_set_msr()
4373 svm->vmcb->save.star = data; in svm_set_msr()
4377 svm->vmcb->save.lstar = data; in svm_set_msr()
4380 svm->vmcb->save.cstar = data; in svm_set_msr()
4383 svm->vmcb->save.kernel_gs_base = data; in svm_set_msr()
4386 svm->vmcb->save.sfmask = data; in svm_set_msr()
4390 svm->vmcb->save.sysenter_cs = data; in svm_set_msr()
4394 svm->vmcb->save.sysenter_eip = data; in svm_set_msr()
4398 svm->vmcb->save.sysenter_esp = data; in svm_set_msr()
4421 svm->vmcb->save.dbgctl = data; in svm_set_msr()
4875 struct vmcb_save_area *save = &svm->vmcb->save; in dump_vmcb() local
4910 pr_err("VMCB State Save Area:\n"); in dump_vmcb()
4913 save->es.selector, save->es.attrib, in dump_vmcb()
4914 save->es.limit, save->es.base); in dump_vmcb()
4917 save->cs.selector, save->cs.attrib, in dump_vmcb()
4918 save->cs.limit, save->cs.base); in dump_vmcb()
4921 save->ss.selector, save->ss.attrib, in dump_vmcb()
4922 save->ss.limit, save->ss.base); in dump_vmcb()
4925 save->ds.selector, save->ds.attrib, in dump_vmcb()
4926 save->ds.limit, save->ds.base); in dump_vmcb()
4929 save->fs.selector, save->fs.attrib, in dump_vmcb()
4930 save->fs.limit, save->fs.base); in dump_vmcb()
4933 save->gs.selector, save->gs.attrib, in dump_vmcb()
4934 save->gs.limit, save->gs.base); in dump_vmcb()
4937 save->gdtr.selector, save->gdtr.attrib, in dump_vmcb()
4938 save->gdtr.limit, save->gdtr.base); in dump_vmcb()
4941 save->ldtr.selector, save->ldtr.attrib, in dump_vmcb()
4942 save->ldtr.limit, save->ldtr.base); in dump_vmcb()
4945 save->idtr.selector, save->idtr.attrib, in dump_vmcb()
4946 save->idtr.limit, save->idtr.base); in dump_vmcb()
4949 save->tr.selector, save->tr.attrib, in dump_vmcb()
4950 save->tr.limit, save->tr.base); in dump_vmcb()
4952 save->cpl, save->efer); in dump_vmcb()
4954 "cr0:", save->cr0, "cr2:", save->cr2); in dump_vmcb()
4956 "cr3:", save->cr3, "cr4:", save->cr4); in dump_vmcb()
4958 "dr6:", save->dr6, "dr7:", save->dr7); in dump_vmcb()
4960 "rip:", save->rip, "rflags:", save->rflags); in dump_vmcb()
4962 "rsp:", save->rsp, "rax:", save->rax); in dump_vmcb()
4964 "star:", save->star, "lstar:", save->lstar); in dump_vmcb()
4966 "cstar:", save->cstar, "sfmask:", save->sfmask); in dump_vmcb()
4968 "kernel_gs_base:", save->kernel_gs_base, in dump_vmcb()
4969 "sysenter_cs:", save->sysenter_cs); in dump_vmcb()
4971 "sysenter_esp:", save->sysenter_esp, in dump_vmcb()
4972 "sysenter_eip:", save->sysenter_eip); in dump_vmcb()
4974 "gpat:", save->g_pat, "dbgctl:", save->dbgctl); in dump_vmcb()
4976 "br_from:", save->br_from, "br_to:", save->br_to); in dump_vmcb()
4978 "excp_from:", save->last_excp_from, in dump_vmcb()
4979 "excp_to:", save->last_excp_to); in dump_vmcb()
4999 vcpu->arch.cr0 = svm->vmcb->save.cr0; in handle_exit()
5001 vcpu->arch.cr3 = svm->vmcb->save.cr3; in handle_exit()
5013 trace_kvm_nested_vmexit(svm->vmcb->save.rip, exit_code, in handle_exit()
5520 svm->vmcb->save.rflags |= (X86_EFLAGS_TF | X86_EFLAGS_RF); in enable_nmi_window()
5660 svm->vmcb->save.rax = vcpu->arch.regs[VCPU_REGS_RAX]; in svm_vcpu_run()
5661 svm->vmcb->save.rsp = vcpu->arch.regs[VCPU_REGS_RSP]; in svm_vcpu_run()
5662 svm->vmcb->save.rip = vcpu->arch.regs[VCPU_REGS_RIP]; in svm_vcpu_run()
5691 svm->vmcb->save.cr2 = vcpu->arch.cr2; in svm_vcpu_run()
5733 /* Save guest registers, load host registers */ in svm_vcpu_run()
5812 * SPEC_CTRL MSR it may have left it on; save the value and in svm_vcpu_run()
5814 * it to the atomic save/restore list. Especially as the former in svm_vcpu_run()
5819 * save it. in svm_vcpu_run()
5823 * save it. in svm_vcpu_run()
5834 vcpu->arch.cr2 = svm->vmcb->save.cr2; in svm_vcpu_run()
5835 vcpu->arch.regs[VCPU_REGS_RAX] = svm->vmcb->save.rax; in svm_vcpu_run()
5836 vcpu->arch.regs[VCPU_REGS_RSP] = svm->vmcb->save.rsp; in svm_vcpu_run()
5837 vcpu->arch.regs[VCPU_REGS_RIP] = svm->vmcb->save.rip; in svm_vcpu_run()
5881 svm->vmcb->save.cr3 = __sme_set(root); in svm_set_cr3()
5893 svm->vmcb->save.cr3 = kvm_read_cr3(vcpu); in set_tdp_cr3()
6277 svm->vmcb->save.rax = vcpu->arch.regs[VCPU_REGS_RAX]; in svm_pre_enter_smm()
6278 svm->vmcb->save.rsp = vcpu->arch.regs[VCPU_REGS_RSP]; in svm_pre_enter_smm()
6279 svm->vmcb->save.rip = vcpu->arch.regs[VCPU_REGS_RIP]; in svm_pre_enter_smm()