Lines Matching full:save
100 kvm_init_shadow_npt_mmu(vcpu, X86_CR0_PG, hsave->save.cr4, hsave->save.efer, in nested_svm_init_mmu_context()
259 * to avoid TOC/TOU races. For these save area checks in nested_vmcb_check_save()
264 if ((vmcb12->save.efer & EFER_SVME) == 0) in nested_vmcb_check_save()
267 if (((vmcb12->save.cr0 & X86_CR0_CD) == 0) && (vmcb12->save.cr0 & X86_CR0_NW)) in nested_vmcb_check_save()
270 if (!kvm_dr6_valid(vmcb12->save.dr6) || !kvm_dr7_valid(vmcb12->save.dr7)) in nested_vmcb_check_save()
273 vmcb12_lma = (vmcb12->save.efer & EFER_LME) && (vmcb12->save.cr0 & X86_CR0_PG); in nested_vmcb_check_save()
276 if (!(vmcb12->save.cr4 & X86_CR4_PAE) || in nested_vmcb_check_save()
277 !(vmcb12->save.cr0 & X86_CR0_PE) || in nested_vmcb_check_save()
278 (vmcb12->save.cr3 & vcpu->arch.cr3_lm_rsvd_bits)) in nested_vmcb_check_save()
281 if (kvm_valid_cr4(&svm->vcpu, vmcb12->save.cr4)) in nested_vmcb_check_save()
402 svm->vmcb->save.es = vmcb12->save.es; in nested_prepare_vmcb_save()
403 svm->vmcb->save.cs = vmcb12->save.cs; in nested_prepare_vmcb_save()
404 svm->vmcb->save.ss = vmcb12->save.ss; in nested_prepare_vmcb_save()
405 svm->vmcb->save.ds = vmcb12->save.ds; in nested_prepare_vmcb_save()
406 svm->vmcb->save.gdtr = vmcb12->save.gdtr; in nested_prepare_vmcb_save()
407 svm->vmcb->save.idtr = vmcb12->save.idtr; in nested_prepare_vmcb_save()
408 kvm_set_rflags(&svm->vcpu, vmcb12->save.rflags); in nested_prepare_vmcb_save()
415 svm_set_efer(&svm->vcpu, vmcb12->save.efer | EFER_SVME); in nested_prepare_vmcb_save()
417 svm_set_cr0(&svm->vcpu, vmcb12->save.cr0); in nested_prepare_vmcb_save()
418 svm_set_cr4(&svm->vcpu, vmcb12->save.cr4); in nested_prepare_vmcb_save()
419 svm->vmcb->save.cr2 = svm->vcpu.arch.cr2 = vmcb12->save.cr2; in nested_prepare_vmcb_save()
420 kvm_rax_write(&svm->vcpu, vmcb12->save.rax); in nested_prepare_vmcb_save()
421 kvm_rsp_write(&svm->vcpu, vmcb12->save.rsp); in nested_prepare_vmcb_save()
422 kvm_rip_write(&svm->vcpu, vmcb12->save.rip); in nested_prepare_vmcb_save()
425 svm->vmcb->save.rax = vmcb12->save.rax; in nested_prepare_vmcb_save()
426 svm->vmcb->save.rsp = vmcb12->save.rsp; in nested_prepare_vmcb_save()
427 svm->vmcb->save.rip = vmcb12->save.rip; in nested_prepare_vmcb_save()
428 svm->vmcb->save.dr7 = vmcb12->save.dr7; in nested_prepare_vmcb_save()
429 svm->vcpu.arch.dr6 = vmcb12->save.dr6; in nested_prepare_vmcb_save()
430 svm->vmcb->save.cpl = vmcb12->save.cpl; in nested_prepare_vmcb_save()
479 ret = nested_svm_load_cr3(&svm->vcpu, vmcb12->save.cr3, in enter_svm_guest_mode()
506 vmcb12_gpa = svm->vmcb->save.rax; in nested_svm_vmrun()
533 trace_kvm_nested_vmrun(svm->vmcb->save.rip, vmcb12_gpa, in nested_svm_vmrun()
534 vmcb12->save.rip, in nested_svm_vmrun()
551 * Save the old vmcb, so we don't need to pick what we save, but can in nested_svm_vmrun()
554 hsave->save.es = vmcb->save.es; in nested_svm_vmrun()
555 hsave->save.cs = vmcb->save.cs; in nested_svm_vmrun()
556 hsave->save.ss = vmcb->save.ss; in nested_svm_vmrun()
557 hsave->save.ds = vmcb->save.ds; in nested_svm_vmrun()
558 hsave->save.gdtr = vmcb->save.gdtr; in nested_svm_vmrun()
559 hsave->save.idtr = vmcb->save.idtr; in nested_svm_vmrun()
560 hsave->save.efer = svm->vcpu.arch.efer; in nested_svm_vmrun()
561 hsave->save.cr0 = kvm_read_cr0(&svm->vcpu); in nested_svm_vmrun()
562 hsave->save.cr4 = svm->vcpu.arch.cr4; in nested_svm_vmrun()
563 hsave->save.rflags = kvm_get_rflags(&svm->vcpu); in nested_svm_vmrun()
564 hsave->save.rip = kvm_rip_read(&svm->vcpu); in nested_svm_vmrun()
565 hsave->save.rsp = vmcb->save.rsp; in nested_svm_vmrun()
566 hsave->save.rax = vmcb->save.rax; in nested_svm_vmrun()
568 hsave->save.cr3 = vmcb->save.cr3; in nested_svm_vmrun()
570 hsave->save.cr3 = kvm_read_cr3(&svm->vcpu); in nested_svm_vmrun()
600 to_vmcb->save.fs = from_vmcb->save.fs; in nested_svm_vmloadsave()
601 to_vmcb->save.gs = from_vmcb->save.gs; in nested_svm_vmloadsave()
602 to_vmcb->save.tr = from_vmcb->save.tr; in nested_svm_vmloadsave()
603 to_vmcb->save.ldtr = from_vmcb->save.ldtr; in nested_svm_vmloadsave()
604 to_vmcb->save.kernel_gs_base = from_vmcb->save.kernel_gs_base; in nested_svm_vmloadsave()
605 to_vmcb->save.star = from_vmcb->save.star; in nested_svm_vmloadsave()
606 to_vmcb->save.lstar = from_vmcb->save.lstar; in nested_svm_vmloadsave()
607 to_vmcb->save.cstar = from_vmcb->save.cstar; in nested_svm_vmloadsave()
608 to_vmcb->save.sfmask = from_vmcb->save.sfmask; in nested_svm_vmloadsave()
609 to_vmcb->save.sysenter_cs = from_vmcb->save.sysenter_cs; in nested_svm_vmloadsave()
610 to_vmcb->save.sysenter_esp = from_vmcb->save.sysenter_esp; in nested_svm_vmloadsave()
611 to_vmcb->save.sysenter_eip = from_vmcb->save.sysenter_eip; in nested_svm_vmloadsave()
643 vmcb12->save.es = vmcb->save.es; in nested_svm_vmexit()
644 vmcb12->save.cs = vmcb->save.cs; in nested_svm_vmexit()
645 vmcb12->save.ss = vmcb->save.ss; in nested_svm_vmexit()
646 vmcb12->save.ds = vmcb->save.ds; in nested_svm_vmexit()
647 vmcb12->save.gdtr = vmcb->save.gdtr; in nested_svm_vmexit()
648 vmcb12->save.idtr = vmcb->save.idtr; in nested_svm_vmexit()
649 vmcb12->save.efer = svm->vcpu.arch.efer; in nested_svm_vmexit()
650 vmcb12->save.cr0 = kvm_read_cr0(&svm->vcpu); in nested_svm_vmexit()
651 vmcb12->save.cr3 = kvm_read_cr3(&svm->vcpu); in nested_svm_vmexit()
652 vmcb12->save.cr2 = vmcb->save.cr2; in nested_svm_vmexit()
653 vmcb12->save.cr4 = svm->vcpu.arch.cr4; in nested_svm_vmexit()
654 vmcb12->save.rflags = kvm_get_rflags(&svm->vcpu); in nested_svm_vmexit()
655 vmcb12->save.rip = kvm_rip_read(&svm->vcpu); in nested_svm_vmexit()
656 vmcb12->save.rsp = kvm_rsp_read(&svm->vcpu); in nested_svm_vmexit()
657 vmcb12->save.rax = kvm_rax_read(&svm->vcpu); in nested_svm_vmexit()
658 vmcb12->save.dr7 = vmcb->save.dr7; in nested_svm_vmexit()
659 vmcb12->save.dr6 = svm->vcpu.arch.dr6; in nested_svm_vmexit()
660 vmcb12->save.cpl = vmcb->save.cpl; in nested_svm_vmexit()
695 /* Restore selected save entries */ in nested_svm_vmexit()
696 svm->vmcb->save.es = hsave->save.es; in nested_svm_vmexit()
697 svm->vmcb->save.cs = hsave->save.cs; in nested_svm_vmexit()
698 svm->vmcb->save.ss = hsave->save.ss; in nested_svm_vmexit()
699 svm->vmcb->save.ds = hsave->save.ds; in nested_svm_vmexit()
700 svm->vmcb->save.gdtr = hsave->save.gdtr; in nested_svm_vmexit()
701 svm->vmcb->save.idtr = hsave->save.idtr; in nested_svm_vmexit()
702 kvm_set_rflags(&svm->vcpu, hsave->save.rflags); in nested_svm_vmexit()
703 svm_set_efer(&svm->vcpu, hsave->save.efer); in nested_svm_vmexit()
704 svm_set_cr0(&svm->vcpu, hsave->save.cr0 | X86_CR0_PE); in nested_svm_vmexit()
705 svm_set_cr4(&svm->vcpu, hsave->save.cr4); in nested_svm_vmexit()
706 kvm_rax_write(&svm->vcpu, hsave->save.rax); in nested_svm_vmexit()
707 kvm_rsp_write(&svm->vcpu, hsave->save.rsp); in nested_svm_vmexit()
708 kvm_rip_write(&svm->vcpu, hsave->save.rip); in nested_svm_vmexit()
709 svm->vmcb->save.dr7 = 0; in nested_svm_vmexit()
710 svm->vmcb->save.cpl = 0; in nested_svm_vmexit()
726 rc = nested_svm_load_cr3(&svm->vcpu, hsave->save.cr3, false); in nested_svm_vmexit()
731 svm->vmcb->save.cr3 = hsave->save.cr3; in nested_svm_vmexit()
917 if (svm->vmcb->save.cpl) { in nested_svm_check_permissions()
986 trace_kvm_nested_intr_vmexit(svm->vmcb->save.rip); in nested_svm_intr()
1143 if (copy_to_user(&user_vmcb->save, &svm->nested.hsave->save, in svm_get_nested_state()
1144 sizeof(user_vmcb->save))) in svm_get_nested_state()
1160 struct vmcb_save_area *save; in svm_set_nested_state() local
1202 save = kzalloc(sizeof(*save), GFP_KERNEL_ACCOUNT); in svm_set_nested_state()
1203 if (!ctl || !save) in svm_set_nested_state()
1209 if (copy_from_user(save, &user_vmcb->save, sizeof(*save))) in svm_set_nested_state()
1229 if (!(save->cr0 & X86_CR0_PG)) in svm_set_nested_state()
1231 if (!(save->efer & EFER_SVME)) in svm_set_nested_state()
1236 * come from the nested save state. Guest state is already in svm_set_nested_state()
1237 * in the registers, the save area of the nested state instead in svm_set_nested_state()
1241 hsave->save = *save; in svm_set_nested_state()
1253 kfree(save); in svm_set_nested_state()