• Home
  • Raw
  • Download

Lines Matching refs:vmcs12

58 #define SHADOW_FIELD_RO(x, y) { x, offsetof(struct vmcs12, y) },
65 #define SHADOW_FIELD_RW(x, y) { x, offsetof(struct vmcs12, y) },
329 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_ept_inject_page_fault() local
344 vmcs12->guest_physical_address = fault->address; in nested_ept_inject_page_fault()
371 static bool nested_vmx_is_page_fault_vmexit(struct vmcs12 *vmcs12, in nested_vmx_is_page_fault_vmexit() argument
376 bit = (vmcs12->exception_bitmap & (1u << PF_VECTOR)) != 0; in nested_vmx_is_page_fault_vmexit()
378 (error_code & vmcs12->page_fault_error_code_mask) != in nested_vmx_is_page_fault_vmexit()
379 vmcs12->page_fault_error_code_match; in nested_vmx_is_page_fault_vmexit()
390 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_check_exception() local
400 if (nested_vmx_is_page_fault_vmexit(vmcs12, in nested_vmx_check_exception()
405 } else if (vmcs12->exception_bitmap & (1u << nr)) { in nested_vmx_check_exception()
425 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in vmx_inject_page_fault_nested() local
429 if (nested_vmx_is_page_fault_vmexit(vmcs12, fault->error_code) && in vmx_inject_page_fault_nested()
431 vmcs12->vm_exit_intr_error_code = fault->error_code; in vmx_inject_page_fault_nested()
447 struct vmcs12 *vmcs12) in nested_vmx_check_io_bitmap_controls() argument
449 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_IO_BITMAPS)) in nested_vmx_check_io_bitmap_controls()
452 if (CC(!page_address_valid(vcpu, vmcs12->io_bitmap_a)) || in nested_vmx_check_io_bitmap_controls()
453 CC(!page_address_valid(vcpu, vmcs12->io_bitmap_b))) in nested_vmx_check_io_bitmap_controls()
460 struct vmcs12 *vmcs12) in nested_vmx_check_msr_bitmap_controls() argument
462 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_MSR_BITMAPS)) in nested_vmx_check_msr_bitmap_controls()
465 if (CC(!page_address_valid(vcpu, vmcs12->msr_bitmap))) in nested_vmx_check_msr_bitmap_controls()
472 struct vmcs12 *vmcs12) in nested_vmx_check_tpr_shadow_controls() argument
474 if (!nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) in nested_vmx_check_tpr_shadow_controls()
477 if (CC(!page_address_valid(vcpu, vmcs12->virtual_apic_page_addr))) in nested_vmx_check_tpr_shadow_controls()
563 struct vmcs12 *vmcs12) in nested_vmx_prepare_msr_bitmap() argument
572 !nested_cpu_has(vmcs12, CPU_BASED_USE_MSR_BITMAPS)) in nested_vmx_prepare_msr_bitmap()
575 if (kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->msr_bitmap), map)) in nested_vmx_prepare_msr_bitmap()
587 if (nested_cpu_has_virt_x2apic_mode(vmcs12)) { in nested_vmx_prepare_msr_bitmap()
588 if (nested_cpu_has_apic_reg_virt(vmcs12)) { in nested_vmx_prepare_msr_bitmap()
607 if (nested_cpu_has_vid(vmcs12)) { in nested_vmx_prepare_msr_bitmap()
660 struct vmcs12 *vmcs12) in nested_cache_shadow_vmcs12() argument
663 struct vmcs12 *shadow; in nested_cache_shadow_vmcs12()
665 if (!nested_cpu_has_shadow_vmcs(vmcs12) || in nested_cache_shadow_vmcs12()
666 vmcs12->vmcs_link_pointer == -1ull) in nested_cache_shadow_vmcs12()
671 if (kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->vmcs_link_pointer), &map)) in nested_cache_shadow_vmcs12()
679 struct vmcs12 *vmcs12) in nested_flush_cached_shadow_vmcs12() argument
683 if (!nested_cpu_has_shadow_vmcs(vmcs12) || in nested_flush_cached_shadow_vmcs12()
684 vmcs12->vmcs_link_pointer == -1ull) in nested_flush_cached_shadow_vmcs12()
687 kvm_write_guest(vmx->vcpu.kvm, vmcs12->vmcs_link_pointer, in nested_flush_cached_shadow_vmcs12()
707 struct vmcs12 *vmcs12) in nested_vmx_check_apic_access_controls() argument
709 if (nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES) && in nested_vmx_check_apic_access_controls()
710 CC(!page_address_valid(vcpu, vmcs12->apic_access_addr))) in nested_vmx_check_apic_access_controls()
717 struct vmcs12 *vmcs12) in nested_vmx_check_apicv_controls() argument
719 if (!nested_cpu_has_virt_x2apic_mode(vmcs12) && in nested_vmx_check_apicv_controls()
720 !nested_cpu_has_apic_reg_virt(vmcs12) && in nested_vmx_check_apicv_controls()
721 !nested_cpu_has_vid(vmcs12) && in nested_vmx_check_apicv_controls()
722 !nested_cpu_has_posted_intr(vmcs12)) in nested_vmx_check_apicv_controls()
729 if (CC(nested_cpu_has_virt_x2apic_mode(vmcs12) && in nested_vmx_check_apicv_controls()
730 nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES))) in nested_vmx_check_apicv_controls()
737 if (CC(nested_cpu_has_vid(vmcs12) && !nested_exit_on_intr(vcpu))) in nested_vmx_check_apicv_controls()
747 if (nested_cpu_has_posted_intr(vmcs12) && in nested_vmx_check_apicv_controls()
748 (CC(!nested_cpu_has_vid(vmcs12)) || in nested_vmx_check_apicv_controls()
750 CC((vmcs12->posted_intr_nv & 0xff00)) || in nested_vmx_check_apicv_controls()
751 CC((vmcs12->posted_intr_desc_addr & 0x3f)) || in nested_vmx_check_apicv_controls()
752 CC((vmcs12->posted_intr_desc_addr >> cpuid_maxphyaddr(vcpu))))) in nested_vmx_check_apicv_controls()
756 if (CC(!nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW))) in nested_vmx_check_apicv_controls()
778 struct vmcs12 *vmcs12) in nested_vmx_check_exit_msr_switch_controls() argument
781 vmcs12->vm_exit_msr_load_count, in nested_vmx_check_exit_msr_switch_controls()
782 vmcs12->vm_exit_msr_load_addr)) || in nested_vmx_check_exit_msr_switch_controls()
784 vmcs12->vm_exit_msr_store_count, in nested_vmx_check_exit_msr_switch_controls()
785 vmcs12->vm_exit_msr_store_addr))) in nested_vmx_check_exit_msr_switch_controls()
792 struct vmcs12 *vmcs12) in nested_vmx_check_entry_msr_switch_controls() argument
795 vmcs12->vm_entry_msr_load_count, in nested_vmx_check_entry_msr_switch_controls()
796 vmcs12->vm_entry_msr_load_addr))) in nested_vmx_check_entry_msr_switch_controls()
803 struct vmcs12 *vmcs12) in nested_vmx_check_pml_controls() argument
805 if (!nested_cpu_has_pml(vmcs12)) in nested_vmx_check_pml_controls()
808 if (CC(!nested_cpu_has_ept(vmcs12)) || in nested_vmx_check_pml_controls()
809 CC(!page_address_valid(vcpu, vmcs12->pml_address))) in nested_vmx_check_pml_controls()
816 struct vmcs12 *vmcs12) in nested_vmx_check_unrestricted_guest_controls() argument
818 if (CC(nested_cpu_has2(vmcs12, SECONDARY_EXEC_UNRESTRICTED_GUEST) && in nested_vmx_check_unrestricted_guest_controls()
819 !nested_cpu_has_ept(vmcs12))) in nested_vmx_check_unrestricted_guest_controls()
825 struct vmcs12 *vmcs12) in nested_vmx_check_mode_based_ept_exec_controls() argument
827 if (CC(nested_cpu_has2(vmcs12, SECONDARY_EXEC_MODE_BASED_EPT_EXEC) && in nested_vmx_check_mode_based_ept_exec_controls()
828 !nested_cpu_has_ept(vmcs12))) in nested_vmx_check_mode_based_ept_exec_controls()
834 struct vmcs12 *vmcs12) in nested_vmx_check_shadow_vmcs_controls() argument
836 if (!nested_cpu_has_shadow_vmcs(vmcs12)) in nested_vmx_check_shadow_vmcs_controls()
839 if (CC(!page_address_valid(vcpu, vmcs12->vmread_bitmap)) || in nested_vmx_check_shadow_vmcs_controls()
840 CC(!page_address_valid(vcpu, vmcs12->vmwrite_bitmap))) in nested_vmx_check_shadow_vmcs_controls()
1036 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_has_guest_tlb_tag() local
1038 return nested_cpu_has_ept(vmcs12) || in nested_has_guest_tlb_tag()
1039 (nested_cpu_has_vpid(vmcs12) && to_vmx(vcpu)->nested.vpid02); in nested_has_guest_tlb_tag()
1374 struct vmcs12 *vmcs12 = get_vmcs12(&vmx->vcpu); in copy_shadow_to_vmcs12() local
1389 vmcs12_write_any(vmcs12, field.encoding, field.offset, val); in copy_shadow_to_vmcs12()
1409 struct vmcs12 *vmcs12 = get_vmcs12(&vmx->vcpu); in copy_vmcs12_to_shadow() local
1422 val = vmcs12_read_any(vmcs12, field.encoding, in copy_vmcs12_to_shadow()
1434 struct vmcs12 *vmcs12 = vmx->nested.cached_vmcs12; in copy_enlightened_to_vmcs12() local
1438 vmcs12->tpr_threshold = evmcs->tpr_threshold; in copy_enlightened_to_vmcs12()
1439 vmcs12->guest_rip = evmcs->guest_rip; in copy_enlightened_to_vmcs12()
1443 vmcs12->guest_rsp = evmcs->guest_rsp; in copy_enlightened_to_vmcs12()
1444 vmcs12->guest_rflags = evmcs->guest_rflags; in copy_enlightened_to_vmcs12()
1445 vmcs12->guest_interruptibility_info = in copy_enlightened_to_vmcs12()
1451 vmcs12->cpu_based_vm_exec_control = in copy_enlightened_to_vmcs12()
1457 vmcs12->exception_bitmap = evmcs->exception_bitmap; in copy_enlightened_to_vmcs12()
1462 vmcs12->vm_entry_controls = evmcs->vm_entry_controls; in copy_enlightened_to_vmcs12()
1467 vmcs12->vm_entry_intr_info_field = in copy_enlightened_to_vmcs12()
1469 vmcs12->vm_entry_exception_error_code = in copy_enlightened_to_vmcs12()
1471 vmcs12->vm_entry_instruction_len = in copy_enlightened_to_vmcs12()
1477 vmcs12->host_ia32_pat = evmcs->host_ia32_pat; in copy_enlightened_to_vmcs12()
1478 vmcs12->host_ia32_efer = evmcs->host_ia32_efer; in copy_enlightened_to_vmcs12()
1479 vmcs12->host_cr0 = evmcs->host_cr0; in copy_enlightened_to_vmcs12()
1480 vmcs12->host_cr3 = evmcs->host_cr3; in copy_enlightened_to_vmcs12()
1481 vmcs12->host_cr4 = evmcs->host_cr4; in copy_enlightened_to_vmcs12()
1482 vmcs12->host_ia32_sysenter_esp = evmcs->host_ia32_sysenter_esp; in copy_enlightened_to_vmcs12()
1483 vmcs12->host_ia32_sysenter_eip = evmcs->host_ia32_sysenter_eip; in copy_enlightened_to_vmcs12()
1484 vmcs12->host_rip = evmcs->host_rip; in copy_enlightened_to_vmcs12()
1485 vmcs12->host_ia32_sysenter_cs = evmcs->host_ia32_sysenter_cs; in copy_enlightened_to_vmcs12()
1486 vmcs12->host_es_selector = evmcs->host_es_selector; in copy_enlightened_to_vmcs12()
1487 vmcs12->host_cs_selector = evmcs->host_cs_selector; in copy_enlightened_to_vmcs12()
1488 vmcs12->host_ss_selector = evmcs->host_ss_selector; in copy_enlightened_to_vmcs12()
1489 vmcs12->host_ds_selector = evmcs->host_ds_selector; in copy_enlightened_to_vmcs12()
1490 vmcs12->host_fs_selector = evmcs->host_fs_selector; in copy_enlightened_to_vmcs12()
1491 vmcs12->host_gs_selector = evmcs->host_gs_selector; in copy_enlightened_to_vmcs12()
1492 vmcs12->host_tr_selector = evmcs->host_tr_selector; in copy_enlightened_to_vmcs12()
1497 vmcs12->pin_based_vm_exec_control = in copy_enlightened_to_vmcs12()
1499 vmcs12->vm_exit_controls = evmcs->vm_exit_controls; in copy_enlightened_to_vmcs12()
1500 vmcs12->secondary_vm_exec_control = in copy_enlightened_to_vmcs12()
1506 vmcs12->io_bitmap_a = evmcs->io_bitmap_a; in copy_enlightened_to_vmcs12()
1507 vmcs12->io_bitmap_b = evmcs->io_bitmap_b; in copy_enlightened_to_vmcs12()
1512 vmcs12->msr_bitmap = evmcs->msr_bitmap; in copy_enlightened_to_vmcs12()
1517 vmcs12->guest_es_base = evmcs->guest_es_base; in copy_enlightened_to_vmcs12()
1518 vmcs12->guest_cs_base = evmcs->guest_cs_base; in copy_enlightened_to_vmcs12()
1519 vmcs12->guest_ss_base = evmcs->guest_ss_base; in copy_enlightened_to_vmcs12()
1520 vmcs12->guest_ds_base = evmcs->guest_ds_base; in copy_enlightened_to_vmcs12()
1521 vmcs12->guest_fs_base = evmcs->guest_fs_base; in copy_enlightened_to_vmcs12()
1522 vmcs12->guest_gs_base = evmcs->guest_gs_base; in copy_enlightened_to_vmcs12()
1523 vmcs12->guest_ldtr_base = evmcs->guest_ldtr_base; in copy_enlightened_to_vmcs12()
1524 vmcs12->guest_tr_base = evmcs->guest_tr_base; in copy_enlightened_to_vmcs12()
1525 vmcs12->guest_gdtr_base = evmcs->guest_gdtr_base; in copy_enlightened_to_vmcs12()
1526 vmcs12->guest_idtr_base = evmcs->guest_idtr_base; in copy_enlightened_to_vmcs12()
1527 vmcs12->guest_es_limit = evmcs->guest_es_limit; in copy_enlightened_to_vmcs12()
1528 vmcs12->guest_cs_limit = evmcs->guest_cs_limit; in copy_enlightened_to_vmcs12()
1529 vmcs12->guest_ss_limit = evmcs->guest_ss_limit; in copy_enlightened_to_vmcs12()
1530 vmcs12->guest_ds_limit = evmcs->guest_ds_limit; in copy_enlightened_to_vmcs12()
1531 vmcs12->guest_fs_limit = evmcs->guest_fs_limit; in copy_enlightened_to_vmcs12()
1532 vmcs12->guest_gs_limit = evmcs->guest_gs_limit; in copy_enlightened_to_vmcs12()
1533 vmcs12->guest_ldtr_limit = evmcs->guest_ldtr_limit; in copy_enlightened_to_vmcs12()
1534 vmcs12->guest_tr_limit = evmcs->guest_tr_limit; in copy_enlightened_to_vmcs12()
1535 vmcs12->guest_gdtr_limit = evmcs->guest_gdtr_limit; in copy_enlightened_to_vmcs12()
1536 vmcs12->guest_idtr_limit = evmcs->guest_idtr_limit; in copy_enlightened_to_vmcs12()
1537 vmcs12->guest_es_ar_bytes = evmcs->guest_es_ar_bytes; in copy_enlightened_to_vmcs12()
1538 vmcs12->guest_cs_ar_bytes = evmcs->guest_cs_ar_bytes; in copy_enlightened_to_vmcs12()
1539 vmcs12->guest_ss_ar_bytes = evmcs->guest_ss_ar_bytes; in copy_enlightened_to_vmcs12()
1540 vmcs12->guest_ds_ar_bytes = evmcs->guest_ds_ar_bytes; in copy_enlightened_to_vmcs12()
1541 vmcs12->guest_fs_ar_bytes = evmcs->guest_fs_ar_bytes; in copy_enlightened_to_vmcs12()
1542 vmcs12->guest_gs_ar_bytes = evmcs->guest_gs_ar_bytes; in copy_enlightened_to_vmcs12()
1543 vmcs12->guest_ldtr_ar_bytes = evmcs->guest_ldtr_ar_bytes; in copy_enlightened_to_vmcs12()
1544 vmcs12->guest_tr_ar_bytes = evmcs->guest_tr_ar_bytes; in copy_enlightened_to_vmcs12()
1545 vmcs12->guest_es_selector = evmcs->guest_es_selector; in copy_enlightened_to_vmcs12()
1546 vmcs12->guest_cs_selector = evmcs->guest_cs_selector; in copy_enlightened_to_vmcs12()
1547 vmcs12->guest_ss_selector = evmcs->guest_ss_selector; in copy_enlightened_to_vmcs12()
1548 vmcs12->guest_ds_selector = evmcs->guest_ds_selector; in copy_enlightened_to_vmcs12()
1549 vmcs12->guest_fs_selector = evmcs->guest_fs_selector; in copy_enlightened_to_vmcs12()
1550 vmcs12->guest_gs_selector = evmcs->guest_gs_selector; in copy_enlightened_to_vmcs12()
1551 vmcs12->guest_ldtr_selector = evmcs->guest_ldtr_selector; in copy_enlightened_to_vmcs12()
1552 vmcs12->guest_tr_selector = evmcs->guest_tr_selector; in copy_enlightened_to_vmcs12()
1557 vmcs12->tsc_offset = evmcs->tsc_offset; in copy_enlightened_to_vmcs12()
1558 vmcs12->virtual_apic_page_addr = evmcs->virtual_apic_page_addr; in copy_enlightened_to_vmcs12()
1559 vmcs12->xss_exit_bitmap = evmcs->xss_exit_bitmap; in copy_enlightened_to_vmcs12()
1564 vmcs12->cr0_guest_host_mask = evmcs->cr0_guest_host_mask; in copy_enlightened_to_vmcs12()
1565 vmcs12->cr4_guest_host_mask = evmcs->cr4_guest_host_mask; in copy_enlightened_to_vmcs12()
1566 vmcs12->cr0_read_shadow = evmcs->cr0_read_shadow; in copy_enlightened_to_vmcs12()
1567 vmcs12->cr4_read_shadow = evmcs->cr4_read_shadow; in copy_enlightened_to_vmcs12()
1568 vmcs12->guest_cr0 = evmcs->guest_cr0; in copy_enlightened_to_vmcs12()
1569 vmcs12->guest_cr3 = evmcs->guest_cr3; in copy_enlightened_to_vmcs12()
1570 vmcs12->guest_cr4 = evmcs->guest_cr4; in copy_enlightened_to_vmcs12()
1571 vmcs12->guest_dr7 = evmcs->guest_dr7; in copy_enlightened_to_vmcs12()
1576 vmcs12->host_fs_base = evmcs->host_fs_base; in copy_enlightened_to_vmcs12()
1577 vmcs12->host_gs_base = evmcs->host_gs_base; in copy_enlightened_to_vmcs12()
1578 vmcs12->host_tr_base = evmcs->host_tr_base; in copy_enlightened_to_vmcs12()
1579 vmcs12->host_gdtr_base = evmcs->host_gdtr_base; in copy_enlightened_to_vmcs12()
1580 vmcs12->host_idtr_base = evmcs->host_idtr_base; in copy_enlightened_to_vmcs12()
1581 vmcs12->host_rsp = evmcs->host_rsp; in copy_enlightened_to_vmcs12()
1586 vmcs12->ept_pointer = evmcs->ept_pointer; in copy_enlightened_to_vmcs12()
1587 vmcs12->virtual_processor_id = evmcs->virtual_processor_id; in copy_enlightened_to_vmcs12()
1592 vmcs12->vmcs_link_pointer = evmcs->vmcs_link_pointer; in copy_enlightened_to_vmcs12()
1593 vmcs12->guest_ia32_debugctl = evmcs->guest_ia32_debugctl; in copy_enlightened_to_vmcs12()
1594 vmcs12->guest_ia32_pat = evmcs->guest_ia32_pat; in copy_enlightened_to_vmcs12()
1595 vmcs12->guest_ia32_efer = evmcs->guest_ia32_efer; in copy_enlightened_to_vmcs12()
1596 vmcs12->guest_pdptr0 = evmcs->guest_pdptr0; in copy_enlightened_to_vmcs12()
1597 vmcs12->guest_pdptr1 = evmcs->guest_pdptr1; in copy_enlightened_to_vmcs12()
1598 vmcs12->guest_pdptr2 = evmcs->guest_pdptr2; in copy_enlightened_to_vmcs12()
1599 vmcs12->guest_pdptr3 = evmcs->guest_pdptr3; in copy_enlightened_to_vmcs12()
1600 vmcs12->guest_pending_dbg_exceptions = in copy_enlightened_to_vmcs12()
1602 vmcs12->guest_sysenter_esp = evmcs->guest_sysenter_esp; in copy_enlightened_to_vmcs12()
1603 vmcs12->guest_sysenter_eip = evmcs->guest_sysenter_eip; in copy_enlightened_to_vmcs12()
1604 vmcs12->guest_bndcfgs = evmcs->guest_bndcfgs; in copy_enlightened_to_vmcs12()
1605 vmcs12->guest_activity_state = evmcs->guest_activity_state; in copy_enlightened_to_vmcs12()
1606 vmcs12->guest_sysenter_cs = evmcs->guest_sysenter_cs; in copy_enlightened_to_vmcs12()
1654 struct vmcs12 *vmcs12 = vmx->nested.cached_vmcs12; in copy_vmcs12_to_enlightened() local
1725 evmcs->guest_es_selector = vmcs12->guest_es_selector; in copy_vmcs12_to_enlightened()
1726 evmcs->guest_cs_selector = vmcs12->guest_cs_selector; in copy_vmcs12_to_enlightened()
1727 evmcs->guest_ss_selector = vmcs12->guest_ss_selector; in copy_vmcs12_to_enlightened()
1728 evmcs->guest_ds_selector = vmcs12->guest_ds_selector; in copy_vmcs12_to_enlightened()
1729 evmcs->guest_fs_selector = vmcs12->guest_fs_selector; in copy_vmcs12_to_enlightened()
1730 evmcs->guest_gs_selector = vmcs12->guest_gs_selector; in copy_vmcs12_to_enlightened()
1731 evmcs->guest_ldtr_selector = vmcs12->guest_ldtr_selector; in copy_vmcs12_to_enlightened()
1732 evmcs->guest_tr_selector = vmcs12->guest_tr_selector; in copy_vmcs12_to_enlightened()
1734 evmcs->guest_es_limit = vmcs12->guest_es_limit; in copy_vmcs12_to_enlightened()
1735 evmcs->guest_cs_limit = vmcs12->guest_cs_limit; in copy_vmcs12_to_enlightened()
1736 evmcs->guest_ss_limit = vmcs12->guest_ss_limit; in copy_vmcs12_to_enlightened()
1737 evmcs->guest_ds_limit = vmcs12->guest_ds_limit; in copy_vmcs12_to_enlightened()
1738 evmcs->guest_fs_limit = vmcs12->guest_fs_limit; in copy_vmcs12_to_enlightened()
1739 evmcs->guest_gs_limit = vmcs12->guest_gs_limit; in copy_vmcs12_to_enlightened()
1740 evmcs->guest_ldtr_limit = vmcs12->guest_ldtr_limit; in copy_vmcs12_to_enlightened()
1741 evmcs->guest_tr_limit = vmcs12->guest_tr_limit; in copy_vmcs12_to_enlightened()
1742 evmcs->guest_gdtr_limit = vmcs12->guest_gdtr_limit; in copy_vmcs12_to_enlightened()
1743 evmcs->guest_idtr_limit = vmcs12->guest_idtr_limit; in copy_vmcs12_to_enlightened()
1745 evmcs->guest_es_ar_bytes = vmcs12->guest_es_ar_bytes; in copy_vmcs12_to_enlightened()
1746 evmcs->guest_cs_ar_bytes = vmcs12->guest_cs_ar_bytes; in copy_vmcs12_to_enlightened()
1747 evmcs->guest_ss_ar_bytes = vmcs12->guest_ss_ar_bytes; in copy_vmcs12_to_enlightened()
1748 evmcs->guest_ds_ar_bytes = vmcs12->guest_ds_ar_bytes; in copy_vmcs12_to_enlightened()
1749 evmcs->guest_fs_ar_bytes = vmcs12->guest_fs_ar_bytes; in copy_vmcs12_to_enlightened()
1750 evmcs->guest_gs_ar_bytes = vmcs12->guest_gs_ar_bytes; in copy_vmcs12_to_enlightened()
1751 evmcs->guest_ldtr_ar_bytes = vmcs12->guest_ldtr_ar_bytes; in copy_vmcs12_to_enlightened()
1752 evmcs->guest_tr_ar_bytes = vmcs12->guest_tr_ar_bytes; in copy_vmcs12_to_enlightened()
1754 evmcs->guest_es_base = vmcs12->guest_es_base; in copy_vmcs12_to_enlightened()
1755 evmcs->guest_cs_base = vmcs12->guest_cs_base; in copy_vmcs12_to_enlightened()
1756 evmcs->guest_ss_base = vmcs12->guest_ss_base; in copy_vmcs12_to_enlightened()
1757 evmcs->guest_ds_base = vmcs12->guest_ds_base; in copy_vmcs12_to_enlightened()
1758 evmcs->guest_fs_base = vmcs12->guest_fs_base; in copy_vmcs12_to_enlightened()
1759 evmcs->guest_gs_base = vmcs12->guest_gs_base; in copy_vmcs12_to_enlightened()
1760 evmcs->guest_ldtr_base = vmcs12->guest_ldtr_base; in copy_vmcs12_to_enlightened()
1761 evmcs->guest_tr_base = vmcs12->guest_tr_base; in copy_vmcs12_to_enlightened()
1762 evmcs->guest_gdtr_base = vmcs12->guest_gdtr_base; in copy_vmcs12_to_enlightened()
1763 evmcs->guest_idtr_base = vmcs12->guest_idtr_base; in copy_vmcs12_to_enlightened()
1765 evmcs->guest_ia32_pat = vmcs12->guest_ia32_pat; in copy_vmcs12_to_enlightened()
1766 evmcs->guest_ia32_efer = vmcs12->guest_ia32_efer; in copy_vmcs12_to_enlightened()
1768 evmcs->guest_pdptr0 = vmcs12->guest_pdptr0; in copy_vmcs12_to_enlightened()
1769 evmcs->guest_pdptr1 = vmcs12->guest_pdptr1; in copy_vmcs12_to_enlightened()
1770 evmcs->guest_pdptr2 = vmcs12->guest_pdptr2; in copy_vmcs12_to_enlightened()
1771 evmcs->guest_pdptr3 = vmcs12->guest_pdptr3; in copy_vmcs12_to_enlightened()
1774 vmcs12->guest_pending_dbg_exceptions; in copy_vmcs12_to_enlightened()
1775 evmcs->guest_sysenter_esp = vmcs12->guest_sysenter_esp; in copy_vmcs12_to_enlightened()
1776 evmcs->guest_sysenter_eip = vmcs12->guest_sysenter_eip; in copy_vmcs12_to_enlightened()
1778 evmcs->guest_activity_state = vmcs12->guest_activity_state; in copy_vmcs12_to_enlightened()
1779 evmcs->guest_sysenter_cs = vmcs12->guest_sysenter_cs; in copy_vmcs12_to_enlightened()
1781 evmcs->guest_cr0 = vmcs12->guest_cr0; in copy_vmcs12_to_enlightened()
1782 evmcs->guest_cr3 = vmcs12->guest_cr3; in copy_vmcs12_to_enlightened()
1783 evmcs->guest_cr4 = vmcs12->guest_cr4; in copy_vmcs12_to_enlightened()
1784 evmcs->guest_dr7 = vmcs12->guest_dr7; in copy_vmcs12_to_enlightened()
1786 evmcs->guest_physical_address = vmcs12->guest_physical_address; in copy_vmcs12_to_enlightened()
1788 evmcs->vm_instruction_error = vmcs12->vm_instruction_error; in copy_vmcs12_to_enlightened()
1789 evmcs->vm_exit_reason = vmcs12->vm_exit_reason; in copy_vmcs12_to_enlightened()
1790 evmcs->vm_exit_intr_info = vmcs12->vm_exit_intr_info; in copy_vmcs12_to_enlightened()
1791 evmcs->vm_exit_intr_error_code = vmcs12->vm_exit_intr_error_code; in copy_vmcs12_to_enlightened()
1792 evmcs->idt_vectoring_info_field = vmcs12->idt_vectoring_info_field; in copy_vmcs12_to_enlightened()
1793 evmcs->idt_vectoring_error_code = vmcs12->idt_vectoring_error_code; in copy_vmcs12_to_enlightened()
1794 evmcs->vm_exit_instruction_len = vmcs12->vm_exit_instruction_len; in copy_vmcs12_to_enlightened()
1795 evmcs->vmx_instruction_info = vmcs12->vmx_instruction_info; in copy_vmcs12_to_enlightened()
1797 evmcs->exit_qualification = vmcs12->exit_qualification; in copy_vmcs12_to_enlightened()
1799 evmcs->guest_linear_address = vmcs12->guest_linear_address; in copy_vmcs12_to_enlightened()
1800 evmcs->guest_rsp = vmcs12->guest_rsp; in copy_vmcs12_to_enlightened()
1801 evmcs->guest_rflags = vmcs12->guest_rflags; in copy_vmcs12_to_enlightened()
1804 vmcs12->guest_interruptibility_info; in copy_vmcs12_to_enlightened()
1805 evmcs->cpu_based_vm_exec_control = vmcs12->cpu_based_vm_exec_control; in copy_vmcs12_to_enlightened()
1806 evmcs->vm_entry_controls = vmcs12->vm_entry_controls; in copy_vmcs12_to_enlightened()
1807 evmcs->vm_entry_intr_info_field = vmcs12->vm_entry_intr_info_field; in copy_vmcs12_to_enlightened()
1809 vmcs12->vm_entry_exception_error_code; in copy_vmcs12_to_enlightened()
1810 evmcs->vm_entry_instruction_len = vmcs12->vm_entry_instruction_len; in copy_vmcs12_to_enlightened()
1812 evmcs->guest_rip = vmcs12->guest_rip; in copy_vmcs12_to_enlightened()
1814 evmcs->guest_bndcfgs = vmcs12->guest_bndcfgs; in copy_vmcs12_to_enlightened()
1888 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_handle_enlightened_vmptrld() local
1889 memset(vmcs12, 0, sizeof(*vmcs12)); in nested_vmx_handle_enlightened_vmptrld()
1890 vmcs12->hdr.revision_id = VMCS12_REVISION; in nested_vmx_handle_enlightened_vmptrld()
1966 static u64 nested_vmx_calc_efer(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) in nested_vmx_calc_efer() argument
1969 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_EFER)) in nested_vmx_calc_efer()
1970 return vmcs12->guest_ia32_efer; in nested_vmx_calc_efer()
1971 else if (vmcs12->vm_entry_controls & VM_ENTRY_IA32E_MODE) in nested_vmx_calc_efer()
2035 struct vmcs12 *vmcs12) in prepare_vmcs02_early_rare() argument
2042 if (nested_cpu_has_vpid(vmcs12) && vmx->nested.vpid02) in prepare_vmcs02_early_rare()
2049 static void prepare_vmcs02_early(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) in prepare_vmcs02_early() argument
2052 u64 guest_efer = nested_vmx_calc_efer(vmx, vmcs12); in prepare_vmcs02_early()
2055 prepare_vmcs02_early_rare(vmx, vmcs12); in prepare_vmcs02_early()
2061 exec_control |= (vmcs12->pin_based_vm_exec_control & in prepare_vmcs02_early()
2066 if (nested_cpu_has_posted_intr(vmcs12)) in prepare_vmcs02_early()
2067 vmx->nested.posted_intr_nv = vmcs12->posted_intr_nv; in prepare_vmcs02_early()
2079 exec_control |= vmcs12->cpu_based_vm_exec_control; in prepare_vmcs02_early()
2082 vmcs_write32(TPR_THRESHOLD, vmcs12->tpr_threshold); in prepare_vmcs02_early()
2122 if (nested_cpu_has(vmcs12, in prepare_vmcs02_early()
2124 vmcs12_exec_ctrl = vmcs12->secondary_vm_exec_control & in prepare_vmcs02_early()
2137 (vmcs12->guest_cr4 & X86_CR4_UMIP)) in prepare_vmcs02_early()
2142 vmcs12->guest_intr_status); in prepare_vmcs02_early()
2155 exec_control = (vmcs12->vm_entry_controls | vmx_vmentry_ctrl()) & in prepare_vmcs02_early()
2182 vmcs12->vm_entry_intr_info_field); in prepare_vmcs02_early()
2184 vmcs12->vm_entry_exception_error_code); in prepare_vmcs02_early()
2186 vmcs12->vm_entry_instruction_len); in prepare_vmcs02_early()
2188 vmcs12->guest_interruptibility_info); in prepare_vmcs02_early()
2190 !(vmcs12->guest_interruptibility_info & GUEST_INTR_STATE_NMI); in prepare_vmcs02_early()
2196 static void prepare_vmcs02_rare(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) in prepare_vmcs02_rare() argument
2202 vmcs_write16(GUEST_ES_SELECTOR, vmcs12->guest_es_selector); in prepare_vmcs02_rare()
2203 vmcs_write16(GUEST_CS_SELECTOR, vmcs12->guest_cs_selector); in prepare_vmcs02_rare()
2204 vmcs_write16(GUEST_SS_SELECTOR, vmcs12->guest_ss_selector); in prepare_vmcs02_rare()
2205 vmcs_write16(GUEST_DS_SELECTOR, vmcs12->guest_ds_selector); in prepare_vmcs02_rare()
2206 vmcs_write16(GUEST_FS_SELECTOR, vmcs12->guest_fs_selector); in prepare_vmcs02_rare()
2207 vmcs_write16(GUEST_GS_SELECTOR, vmcs12->guest_gs_selector); in prepare_vmcs02_rare()
2208 vmcs_write16(GUEST_LDTR_SELECTOR, vmcs12->guest_ldtr_selector); in prepare_vmcs02_rare()
2209 vmcs_write16(GUEST_TR_SELECTOR, vmcs12->guest_tr_selector); in prepare_vmcs02_rare()
2210 vmcs_write32(GUEST_ES_LIMIT, vmcs12->guest_es_limit); in prepare_vmcs02_rare()
2211 vmcs_write32(GUEST_CS_LIMIT, vmcs12->guest_cs_limit); in prepare_vmcs02_rare()
2212 vmcs_write32(GUEST_SS_LIMIT, vmcs12->guest_ss_limit); in prepare_vmcs02_rare()
2213 vmcs_write32(GUEST_DS_LIMIT, vmcs12->guest_ds_limit); in prepare_vmcs02_rare()
2214 vmcs_write32(GUEST_FS_LIMIT, vmcs12->guest_fs_limit); in prepare_vmcs02_rare()
2215 vmcs_write32(GUEST_GS_LIMIT, vmcs12->guest_gs_limit); in prepare_vmcs02_rare()
2216 vmcs_write32(GUEST_LDTR_LIMIT, vmcs12->guest_ldtr_limit); in prepare_vmcs02_rare()
2217 vmcs_write32(GUEST_TR_LIMIT, vmcs12->guest_tr_limit); in prepare_vmcs02_rare()
2218 vmcs_write32(GUEST_GDTR_LIMIT, vmcs12->guest_gdtr_limit); in prepare_vmcs02_rare()
2219 vmcs_write32(GUEST_IDTR_LIMIT, vmcs12->guest_idtr_limit); in prepare_vmcs02_rare()
2220 vmcs_write32(GUEST_CS_AR_BYTES, vmcs12->guest_cs_ar_bytes); in prepare_vmcs02_rare()
2221 vmcs_write32(GUEST_SS_AR_BYTES, vmcs12->guest_ss_ar_bytes); in prepare_vmcs02_rare()
2222 vmcs_write32(GUEST_ES_AR_BYTES, vmcs12->guest_es_ar_bytes); in prepare_vmcs02_rare()
2223 vmcs_write32(GUEST_DS_AR_BYTES, vmcs12->guest_ds_ar_bytes); in prepare_vmcs02_rare()
2224 vmcs_write32(GUEST_FS_AR_BYTES, vmcs12->guest_fs_ar_bytes); in prepare_vmcs02_rare()
2225 vmcs_write32(GUEST_GS_AR_BYTES, vmcs12->guest_gs_ar_bytes); in prepare_vmcs02_rare()
2226 vmcs_write32(GUEST_LDTR_AR_BYTES, vmcs12->guest_ldtr_ar_bytes); in prepare_vmcs02_rare()
2227 vmcs_write32(GUEST_TR_AR_BYTES, vmcs12->guest_tr_ar_bytes); in prepare_vmcs02_rare()
2228 vmcs_writel(GUEST_ES_BASE, vmcs12->guest_es_base); in prepare_vmcs02_rare()
2229 vmcs_writel(GUEST_CS_BASE, vmcs12->guest_cs_base); in prepare_vmcs02_rare()
2230 vmcs_writel(GUEST_SS_BASE, vmcs12->guest_ss_base); in prepare_vmcs02_rare()
2231 vmcs_writel(GUEST_DS_BASE, vmcs12->guest_ds_base); in prepare_vmcs02_rare()
2232 vmcs_writel(GUEST_FS_BASE, vmcs12->guest_fs_base); in prepare_vmcs02_rare()
2233 vmcs_writel(GUEST_GS_BASE, vmcs12->guest_gs_base); in prepare_vmcs02_rare()
2234 vmcs_writel(GUEST_LDTR_BASE, vmcs12->guest_ldtr_base); in prepare_vmcs02_rare()
2235 vmcs_writel(GUEST_TR_BASE, vmcs12->guest_tr_base); in prepare_vmcs02_rare()
2236 vmcs_writel(GUEST_GDTR_BASE, vmcs12->guest_gdtr_base); in prepare_vmcs02_rare()
2237 vmcs_writel(GUEST_IDTR_BASE, vmcs12->guest_idtr_base); in prepare_vmcs02_rare()
2244 vmcs_write32(GUEST_SYSENTER_CS, vmcs12->guest_sysenter_cs); in prepare_vmcs02_rare()
2246 vmcs12->guest_pending_dbg_exceptions); in prepare_vmcs02_rare()
2247 vmcs_writel(GUEST_SYSENTER_ESP, vmcs12->guest_sysenter_esp); in prepare_vmcs02_rare()
2248 vmcs_writel(GUEST_SYSENTER_EIP, vmcs12->guest_sysenter_eip); in prepare_vmcs02_rare()
2255 vmcs_write64(GUEST_PDPTR0, vmcs12->guest_pdptr0); in prepare_vmcs02_rare()
2256 vmcs_write64(GUEST_PDPTR1, vmcs12->guest_pdptr1); in prepare_vmcs02_rare()
2257 vmcs_write64(GUEST_PDPTR2, vmcs12->guest_pdptr2); in prepare_vmcs02_rare()
2258 vmcs_write64(GUEST_PDPTR3, vmcs12->guest_pdptr3); in prepare_vmcs02_rare()
2262 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS)) in prepare_vmcs02_rare()
2263 vmcs_write64(GUEST_BNDCFGS, vmcs12->guest_bndcfgs); in prepare_vmcs02_rare()
2266 if (nested_cpu_has_xsaves(vmcs12)) in prepare_vmcs02_rare()
2267 vmcs_write64(XSS_EXIT_BITMAP, vmcs12->xss_exit_bitmap); in prepare_vmcs02_rare()
2284 enable_ept ? vmcs12->page_fault_error_code_mask : 0); in prepare_vmcs02_rare()
2286 enable_ept ? vmcs12->page_fault_error_code_match : 0); in prepare_vmcs02_rare()
2289 vmcs_write64(EOI_EXIT_BITMAP0, vmcs12->eoi_exit_bitmap0); in prepare_vmcs02_rare()
2290 vmcs_write64(EOI_EXIT_BITMAP1, vmcs12->eoi_exit_bitmap1); in prepare_vmcs02_rare()
2291 vmcs_write64(EOI_EXIT_BITMAP2, vmcs12->eoi_exit_bitmap2); in prepare_vmcs02_rare()
2292 vmcs_write64(EOI_EXIT_BITMAP3, vmcs12->eoi_exit_bitmap3); in prepare_vmcs02_rare()
2312 static int prepare_vmcs02(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12, in prepare_vmcs02() argument
2320 prepare_vmcs02_rare(vmx, vmcs12); in prepare_vmcs02()
2329 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS)) { in prepare_vmcs02()
2330 kvm_set_dr(vcpu, 7, vmcs12->guest_dr7); in prepare_vmcs02()
2331 vmcs_write64(GUEST_IA32_DEBUGCTL, vmcs12->guest_ia32_debugctl); in prepare_vmcs02()
2337 !(vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS))) in prepare_vmcs02()
2339 vmx_set_rflags(vcpu, vmcs12->guest_rflags); in prepare_vmcs02()
2346 vcpu->arch.cr0_guest_owned_bits &= ~vmcs12->cr0_guest_host_mask; in prepare_vmcs02()
2350 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_PAT)) { in prepare_vmcs02()
2351 vmcs_write64(GUEST_IA32_PAT, vmcs12->guest_ia32_pat); in prepare_vmcs02()
2352 vcpu->arch.pat = vmcs12->guest_ia32_pat; in prepare_vmcs02()
2371 if (nested_cpu_has_vpid(vmcs12) && nested_has_guest_tlb_tag(vcpu)) { in prepare_vmcs02()
2372 if (vmcs12->virtual_processor_id != vmx->nested.last_vpid) { in prepare_vmcs02()
2373 vmx->nested.last_vpid = vmcs12->virtual_processor_id; in prepare_vmcs02()
2389 if (nested_cpu_has_ept(vmcs12)) in prepare_vmcs02()
2391 else if (nested_cpu_has2(vmcs12, in prepare_vmcs02()
2403 vmx_set_cr0(vcpu, vmcs12->guest_cr0); in prepare_vmcs02()
2404 vmcs_writel(CR0_READ_SHADOW, nested_read_cr0(vmcs12)); in prepare_vmcs02()
2406 vmx_set_cr4(vcpu, vmcs12->guest_cr4); in prepare_vmcs02()
2407 vmcs_writel(CR4_READ_SHADOW, nested_read_cr4(vmcs12)); in prepare_vmcs02()
2409 vcpu->arch.efer = nested_vmx_calc_efer(vmx, vmcs12); in prepare_vmcs02()
2424 if (nested_vmx_load_cr3(vcpu, vmcs12->guest_cr3, nested_cpu_has_ept(vmcs12), in prepare_vmcs02()
2436 vmcs_writel(GUEST_CR3, vmcs12->guest_cr3); in prepare_vmcs02()
2439 if (load_guest_pdptrs_vmcs12 && nested_cpu_has_ept(vmcs12) && in prepare_vmcs02()
2441 vmcs_write64(GUEST_PDPTR0, vmcs12->guest_pdptr0); in prepare_vmcs02()
2442 vmcs_write64(GUEST_PDPTR1, vmcs12->guest_pdptr1); in prepare_vmcs02()
2443 vmcs_write64(GUEST_PDPTR2, vmcs12->guest_pdptr2); in prepare_vmcs02()
2444 vmcs_write64(GUEST_PDPTR3, vmcs12->guest_pdptr3); in prepare_vmcs02()
2450 kvm_rsp_write(vcpu, vmcs12->guest_rsp); in prepare_vmcs02()
2451 kvm_rip_write(vcpu, vmcs12->guest_rip); in prepare_vmcs02()
2455 static int nested_vmx_check_nmi_controls(struct vmcs12 *vmcs12) in nested_vmx_check_nmi_controls() argument
2457 if (CC(!nested_cpu_has_nmi_exiting(vmcs12) && in nested_vmx_check_nmi_controls()
2458 nested_cpu_has_virtual_nmis(vmcs12))) in nested_vmx_check_nmi_controls()
2461 if (CC(!nested_cpu_has_virtual_nmis(vmcs12) && in nested_vmx_check_nmi_controls()
2462 nested_cpu_has(vmcs12, CPU_BASED_NMI_WINDOW_EXITING))) in nested_vmx_check_nmi_controls()
2508 struct vmcs12 *vmcs12) in nested_check_vm_execution_controls() argument
2512 if (CC(!vmx_control_verify(vmcs12->pin_based_vm_exec_control, in nested_check_vm_execution_controls()
2515 CC(!vmx_control_verify(vmcs12->cpu_based_vm_exec_control, in nested_check_vm_execution_controls()
2520 if (nested_cpu_has(vmcs12, CPU_BASED_ACTIVATE_SECONDARY_CONTROLS) && in nested_check_vm_execution_controls()
2521 CC(!vmx_control_verify(vmcs12->secondary_vm_exec_control, in nested_check_vm_execution_controls()
2526 if (CC(vmcs12->cr3_target_count > nested_cpu_vmx_misc_cr3_count(vcpu)) || in nested_check_vm_execution_controls()
2527 nested_vmx_check_io_bitmap_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2528 nested_vmx_check_msr_bitmap_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2529 nested_vmx_check_tpr_shadow_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2530 nested_vmx_check_apic_access_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2531 nested_vmx_check_apicv_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2532 nested_vmx_check_nmi_controls(vmcs12) || in nested_check_vm_execution_controls()
2533 nested_vmx_check_pml_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2534 nested_vmx_check_unrestricted_guest_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2535 nested_vmx_check_mode_based_ept_exec_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2536 nested_vmx_check_shadow_vmcs_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2537 CC(nested_cpu_has_vpid(vmcs12) && !vmcs12->virtual_processor_id)) in nested_check_vm_execution_controls()
2540 if (!nested_cpu_has_preemption_timer(vmcs12) && in nested_check_vm_execution_controls()
2541 nested_cpu_has_save_preemption_timer(vmcs12)) in nested_check_vm_execution_controls()
2544 if (nested_cpu_has_ept(vmcs12) && in nested_check_vm_execution_controls()
2545 CC(!valid_ept_address(vcpu, vmcs12->ept_pointer))) in nested_check_vm_execution_controls()
2548 if (nested_cpu_has_vmfunc(vmcs12)) { in nested_check_vm_execution_controls()
2549 if (CC(vmcs12->vm_function_control & in nested_check_vm_execution_controls()
2553 if (nested_cpu_has_eptp_switching(vmcs12)) { in nested_check_vm_execution_controls()
2554 if (CC(!nested_cpu_has_ept(vmcs12)) || in nested_check_vm_execution_controls()
2555 CC(!page_address_valid(vcpu, vmcs12->eptp_list_address))) in nested_check_vm_execution_controls()
2567 struct vmcs12 *vmcs12) in nested_check_vm_exit_controls() argument
2571 if (CC(!vmx_control_verify(vmcs12->vm_exit_controls, in nested_check_vm_exit_controls()
2574 CC(nested_vmx_check_exit_msr_switch_controls(vcpu, vmcs12))) in nested_check_vm_exit_controls()
2584 struct vmcs12 *vmcs12) in nested_check_vm_entry_controls() argument
2588 if (CC(!vmx_control_verify(vmcs12->vm_entry_controls, in nested_check_vm_entry_controls()
2599 if (vmcs12->vm_entry_intr_info_field & INTR_INFO_VALID_MASK) { in nested_check_vm_entry_controls()
2600 u32 intr_info = vmcs12->vm_entry_intr_info_field; in nested_check_vm_entry_controls()
2605 bool urg = nested_cpu_has2(vmcs12, in nested_check_vm_entry_controls()
2607 bool prot_mode = !urg || vmcs12->guest_cr0 & X86_CR0_PE; in nested_check_vm_entry_controls()
2630 vmcs12->vm_entry_exception_error_code & GENMASK(31, 16))) in nested_check_vm_entry_controls()
2642 if (CC(vmcs12->vm_entry_instruction_len > 15) || in nested_check_vm_entry_controls()
2643 CC(vmcs12->vm_entry_instruction_len == 0 && in nested_check_vm_entry_controls()
2649 if (nested_vmx_check_entry_msr_switch_controls(vcpu, vmcs12)) in nested_check_vm_entry_controls()
2656 struct vmcs12 *vmcs12) in nested_vmx_check_controls() argument
2658 if (nested_check_vm_execution_controls(vcpu, vmcs12) || in nested_vmx_check_controls()
2659 nested_check_vm_exit_controls(vcpu, vmcs12) || in nested_vmx_check_controls()
2660 nested_check_vm_entry_controls(vcpu, vmcs12)) in nested_vmx_check_controls()
2667 struct vmcs12 *vmcs12) in nested_vmx_check_host_state() argument
2671 if (CC(!nested_host_cr0_valid(vcpu, vmcs12->host_cr0)) || in nested_vmx_check_host_state()
2672 CC(!nested_host_cr4_valid(vcpu, vmcs12->host_cr4)) || in nested_vmx_check_host_state()
2673 CC(!nested_cr3_valid(vcpu, vmcs12->host_cr3))) in nested_vmx_check_host_state()
2676 if (CC(is_noncanonical_address(vmcs12->host_ia32_sysenter_esp, vcpu)) || in nested_vmx_check_host_state()
2677 CC(is_noncanonical_address(vmcs12->host_ia32_sysenter_eip, vcpu))) in nested_vmx_check_host_state()
2680 if ((vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PAT) && in nested_vmx_check_host_state()
2681 CC(!kvm_pat_valid(vmcs12->host_ia32_pat))) in nested_vmx_check_host_state()
2691 if (CC(!(vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE)) || in nested_vmx_check_host_state()
2692 CC(!(vmcs12->host_cr4 & X86_CR4_PAE))) in nested_vmx_check_host_state()
2695 if (CC(vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE) || in nested_vmx_check_host_state()
2696 CC(vmcs12->vm_entry_controls & VM_ENTRY_IA32E_MODE) || in nested_vmx_check_host_state()
2697 CC(vmcs12->host_cr4 & X86_CR4_PCIDE) || in nested_vmx_check_host_state()
2698 CC((vmcs12->host_rip) >> 32)) in nested_vmx_check_host_state()
2702 if (CC(vmcs12->host_cs_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2703 CC(vmcs12->host_ss_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2704 CC(vmcs12->host_ds_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2705 CC(vmcs12->host_es_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2706 CC(vmcs12->host_fs_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2707 CC(vmcs12->host_gs_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2708 CC(vmcs12->host_tr_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2709 CC(vmcs12->host_cs_selector == 0) || in nested_vmx_check_host_state()
2710 CC(vmcs12->host_tr_selector == 0) || in nested_vmx_check_host_state()
2711 CC(vmcs12->host_ss_selector == 0 && !ia32e)) in nested_vmx_check_host_state()
2715 if (CC(is_noncanonical_address(vmcs12->host_fs_base, vcpu)) || in nested_vmx_check_host_state()
2716 CC(is_noncanonical_address(vmcs12->host_gs_base, vcpu)) || in nested_vmx_check_host_state()
2717 CC(is_noncanonical_address(vmcs12->host_gdtr_base, vcpu)) || in nested_vmx_check_host_state()
2718 CC(is_noncanonical_address(vmcs12->host_idtr_base, vcpu)) || in nested_vmx_check_host_state()
2719 CC(is_noncanonical_address(vmcs12->host_tr_base, vcpu)) || in nested_vmx_check_host_state()
2720 CC(is_noncanonical_address(vmcs12->host_rip, vcpu))) in nested_vmx_check_host_state()
2730 if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_EFER) { in nested_vmx_check_host_state()
2731 if (CC(!kvm_valid_efer(vcpu, vmcs12->host_ia32_efer)) || in nested_vmx_check_host_state()
2732 CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LMA)) || in nested_vmx_check_host_state()
2733 CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LME))) in nested_vmx_check_host_state()
2741 struct vmcs12 *vmcs12) in nested_vmx_check_vmcs_link_ptr() argument
2744 struct vmcs12 *shadow; in nested_vmx_check_vmcs_link_ptr()
2747 if (vmcs12->vmcs_link_pointer == -1ull) in nested_vmx_check_vmcs_link_ptr()
2750 if (CC(!page_address_valid(vcpu, vmcs12->vmcs_link_pointer))) in nested_vmx_check_vmcs_link_ptr()
2753 if (CC(kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->vmcs_link_pointer), &map))) in nested_vmx_check_vmcs_link_ptr()
2759 CC(shadow->hdr.shadow_vmcs != nested_cpu_has_shadow_vmcs(vmcs12))) in nested_vmx_check_vmcs_link_ptr()
2769 static int nested_check_guest_non_reg_state(struct vmcs12 *vmcs12) in nested_check_guest_non_reg_state() argument
2771 if (CC(vmcs12->guest_activity_state != GUEST_ACTIVITY_ACTIVE && in nested_check_guest_non_reg_state()
2772 vmcs12->guest_activity_state != GUEST_ACTIVITY_HLT)) in nested_check_guest_non_reg_state()
2779 struct vmcs12 *vmcs12, in nested_vmx_check_guest_state() argument
2782 bool ia32e = !!(vmcs12->vm_entry_controls & VM_ENTRY_IA32E_MODE); in nested_vmx_check_guest_state()
2786 if (CC(!nested_guest_cr0_valid(vcpu, vmcs12->guest_cr0)) || in nested_vmx_check_guest_state()
2787 CC(!nested_guest_cr4_valid(vcpu, vmcs12->guest_cr4))) in nested_vmx_check_guest_state()
2790 if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_PAT) && in nested_vmx_check_guest_state()
2791 CC(!kvm_pat_valid(vmcs12->guest_ia32_pat))) in nested_vmx_check_guest_state()
2794 if (nested_vmx_check_vmcs_link_ptr(vcpu, vmcs12)) { in nested_vmx_check_guest_state()
2799 if (CC((vmcs12->guest_cr0 & (X86_CR0_PG | X86_CR0_PE)) == X86_CR0_PG)) in nested_vmx_check_guest_state()
2802 if (CC(ia32e && !(vmcs12->guest_cr4 & X86_CR4_PAE)) || in nested_vmx_check_guest_state()
2803 CC(ia32e && !(vmcs12->guest_cr0 & X86_CR0_PG))) in nested_vmx_check_guest_state()
2816 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_EFER)) { in nested_vmx_check_guest_state()
2817 if (CC(!kvm_valid_efer(vcpu, vmcs12->guest_ia32_efer)) || in nested_vmx_check_guest_state()
2818 CC(ia32e != !!(vmcs12->guest_ia32_efer & EFER_LMA)) || in nested_vmx_check_guest_state()
2819 CC(((vmcs12->guest_cr0 & X86_CR0_PG) && in nested_vmx_check_guest_state()
2820 ia32e != !!(vmcs12->guest_ia32_efer & EFER_LME)))) in nested_vmx_check_guest_state()
2824 if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS) && in nested_vmx_check_guest_state()
2825 (CC(is_noncanonical_address(vmcs12->guest_bndcfgs & PAGE_MASK, vcpu)) || in nested_vmx_check_guest_state()
2826 CC((vmcs12->guest_bndcfgs & MSR_IA32_BNDCFGS_RSVD)))) in nested_vmx_check_guest_state()
2829 if (nested_check_guest_non_reg_state(vmcs12)) in nested_vmx_check_guest_state()
2914 struct vmcs12 *vmcs12);
2918 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_get_vmcs12_pages() local
2924 if (nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES)) { in nested_get_vmcs12_pages()
2935 page = kvm_vcpu_gpa_to_page(vcpu, vmcs12->apic_access_addr); in nested_get_vmcs12_pages()
2951 if (nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) { in nested_get_vmcs12_pages()
2954 if (!kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->virtual_apic_page_addr), map)) { in nested_get_vmcs12_pages()
2956 } else if (nested_cpu_has(vmcs12, CPU_BASED_CR8_LOAD_EXITING) && in nested_get_vmcs12_pages()
2957 nested_cpu_has(vmcs12, CPU_BASED_CR8_STORE_EXITING) && in nested_get_vmcs12_pages()
2958 !nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES)) { in nested_get_vmcs12_pages()
2977 if (nested_cpu_has_posted_intr(vmcs12)) { in nested_get_vmcs12_pages()
2980 if (!kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->posted_intr_desc_addr), map)) { in nested_get_vmcs12_pages()
2983 offset_in_page(vmcs12->posted_intr_desc_addr)); in nested_get_vmcs12_pages()
2985 pfn_to_hpa(map->pfn) + offset_in_page(vmcs12->posted_intr_desc_addr)); in nested_get_vmcs12_pages()
2988 if (nested_vmx_prepare_msr_bitmap(vcpu, vmcs12)) in nested_get_vmcs12_pages()
3026 struct vmcs12 *vmcs12);
3042 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_enter_non_root_mode() local
3053 !(vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS)) in nested_vmx_enter_non_root_mode()
3057 !(vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS))) in nested_vmx_enter_non_root_mode()
3081 prepare_vmcs02_early(vmx, vmcs12); in nested_vmx_enter_non_root_mode()
3094 if (nested_vmx_check_guest_state(vcpu, vmcs12, &exit_qual)) in nested_vmx_enter_non_root_mode()
3099 if (vmcs12->cpu_based_vm_exec_control & CPU_BASED_USE_TSC_OFFSETTING) in nested_vmx_enter_non_root_mode()
3100 vcpu->arch.tsc_offset += vmcs12->tsc_offset; in nested_vmx_enter_non_root_mode()
3102 if (prepare_vmcs02(vcpu, vmcs12, &exit_qual)) in nested_vmx_enter_non_root_mode()
3108 vmcs12->vm_entry_msr_load_addr, in nested_vmx_enter_non_root_mode()
3109 vmcs12->vm_entry_msr_load_count); in nested_vmx_enter_non_root_mode()
3146 if (nested_cpu_has_preemption_timer(vmcs12)) in nested_vmx_enter_non_root_mode()
3163 if (vmcs12->cpu_based_vm_exec_control & CPU_BASED_USE_TSC_OFFSETTING) in nested_vmx_enter_non_root_mode()
3164 vcpu->arch.tsc_offset -= vmcs12->tsc_offset; in nested_vmx_enter_non_root_mode()
3173 load_vmcs12_host_state(vcpu, vmcs12); in nested_vmx_enter_non_root_mode()
3174 vmcs12->vm_exit_reason = exit_reason | VMX_EXIT_REASONS_FAILED_VMENTRY; in nested_vmx_enter_non_root_mode()
3175 vmcs12->exit_qualification = exit_qual; in nested_vmx_enter_non_root_mode()
3187 struct vmcs12 *vmcs12; in nested_vmx_run() local
3201 vmcs12 = get_vmcs12(vcpu); in nested_vmx_run()
3209 if (vmcs12->hdr.shadow_vmcs) in nested_vmx_run()
3215 vmcs12->launch_state = !launch; in nested_vmx_run()
3234 if (vmcs12->launch_state == launch) in nested_vmx_run()
3239 if (nested_vmx_check_controls(vcpu, vmcs12)) in nested_vmx_run()
3242 if (nested_vmx_check_host_state(vcpu, vmcs12)) in nested_vmx_run()
3267 nested_cache_shadow_vmcs12(vcpu, vmcs12); in nested_vmx_run()
3274 if ((vmcs12->guest_activity_state == GUEST_ACTIVITY_HLT) && in nested_vmx_run()
3275 !(vmcs12->vm_entry_intr_info_field & INTR_INFO_VALID_MASK) && in nested_vmx_run()
3276 !(vmcs12->cpu_based_vm_exec_control & CPU_BASED_NMI_WINDOW_EXITING) && in nested_vmx_run()
3277 !((vmcs12->cpu_based_vm_exec_control & CPU_BASED_INTR_WINDOW_EXITING) && in nested_vmx_run()
3278 (vmcs12->guest_rflags & X86_EFLAGS_IF))) { in nested_vmx_run()
3312 vmcs12_guest_cr0(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) in vmcs12_guest_cr0() argument
3316 /*2*/ (vmcs12->guest_cr0 & vmcs12->cr0_guest_host_mask) | in vmcs12_guest_cr0()
3317 /*3*/ (vmcs_readl(CR0_READ_SHADOW) & ~(vmcs12->cr0_guest_host_mask | in vmcs12_guest_cr0()
3322 vmcs12_guest_cr4(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) in vmcs12_guest_cr4() argument
3326 /*2*/ (vmcs12->guest_cr4 & vmcs12->cr4_guest_host_mask) | in vmcs12_guest_cr4()
3327 /*3*/ (vmcs_readl(CR4_READ_SHADOW) & ~(vmcs12->cr4_guest_host_mask | in vmcs12_guest_cr4()
3332 struct vmcs12 *vmcs12) in vmcs12_save_pending_event() argument
3342 vmcs12->vm_exit_instruction_len = in vmcs12_save_pending_event()
3350 vmcs12->idt_vectoring_error_code = in vmcs12_save_pending_event()
3354 vmcs12->idt_vectoring_info_field = idt_vectoring; in vmcs12_save_pending_event()
3356 vmcs12->idt_vectoring_info_field = in vmcs12_save_pending_event()
3364 vmcs12->vm_entry_instruction_len = in vmcs12_save_pending_event()
3369 vmcs12->idt_vectoring_info_field = idt_vectoring; in vmcs12_save_pending_event()
3376 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_mark_vmcs12_pages_dirty() local
3384 if (nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) { in nested_mark_vmcs12_pages_dirty()
3385 gfn = vmcs12->virtual_apic_page_addr >> PAGE_SHIFT; in nested_mark_vmcs12_pages_dirty()
3389 if (nested_cpu_has_posted_intr(vmcs12)) { in nested_mark_vmcs12_pages_dirty()
3390 gfn = vmcs12->posted_intr_desc_addr >> PAGE_SHIFT; in nested_mark_vmcs12_pages_dirty()
3431 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_inject_exception_vmexit() local
3445 vmcs12->vm_exit_intr_error_code = (u16)vcpu->arch.exception.error_code; in nested_vmx_inject_exception_vmexit()
3454 if (!(vmcs12->idt_vectoring_info_field & VECTORING_INFO_VALID_MASK) && in nested_vmx_inject_exception_vmexit()
3581 struct vmcs12 *vmcs12) in sync_vmcs02_to_vmcs12_rare() argument
3585 vmcs12->guest_es_selector = vmcs_read16(GUEST_ES_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3586 vmcs12->guest_cs_selector = vmcs_read16(GUEST_CS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3587 vmcs12->guest_ss_selector = vmcs_read16(GUEST_SS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3588 vmcs12->guest_ds_selector = vmcs_read16(GUEST_DS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3589 vmcs12->guest_fs_selector = vmcs_read16(GUEST_FS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3590 vmcs12->guest_gs_selector = vmcs_read16(GUEST_GS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3591 vmcs12->guest_ldtr_selector = vmcs_read16(GUEST_LDTR_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3592 vmcs12->guest_tr_selector = vmcs_read16(GUEST_TR_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3593 vmcs12->guest_es_limit = vmcs_read32(GUEST_ES_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3594 vmcs12->guest_cs_limit = vmcs_read32(GUEST_CS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3595 vmcs12->guest_ss_limit = vmcs_read32(GUEST_SS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3596 vmcs12->guest_ds_limit = vmcs_read32(GUEST_DS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3597 vmcs12->guest_fs_limit = vmcs_read32(GUEST_FS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3598 vmcs12->guest_gs_limit = vmcs_read32(GUEST_GS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3599 vmcs12->guest_ldtr_limit = vmcs_read32(GUEST_LDTR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3600 vmcs12->guest_tr_limit = vmcs_read32(GUEST_TR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3601 vmcs12->guest_gdtr_limit = vmcs_read32(GUEST_GDTR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3602 vmcs12->guest_idtr_limit = vmcs_read32(GUEST_IDTR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3603 vmcs12->guest_es_ar_bytes = vmcs_read32(GUEST_ES_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
3604 vmcs12->guest_ds_ar_bytes = vmcs_read32(GUEST_DS_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
3605 vmcs12->guest_fs_ar_bytes = vmcs_read32(GUEST_FS_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
3606 vmcs12->guest_gs_ar_bytes = vmcs_read32(GUEST_GS_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
3607 vmcs12->guest_ldtr_ar_bytes = vmcs_read32(GUEST_LDTR_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
3608 vmcs12->guest_tr_ar_bytes = vmcs_read32(GUEST_TR_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
3609 vmcs12->guest_es_base = vmcs_readl(GUEST_ES_BASE); in sync_vmcs02_to_vmcs12_rare()
3610 vmcs12->guest_cs_base = vmcs_readl(GUEST_CS_BASE); in sync_vmcs02_to_vmcs12_rare()
3611 vmcs12->guest_ss_base = vmcs_readl(GUEST_SS_BASE); in sync_vmcs02_to_vmcs12_rare()
3612 vmcs12->guest_ds_base = vmcs_readl(GUEST_DS_BASE); in sync_vmcs02_to_vmcs12_rare()
3613 vmcs12->guest_fs_base = vmcs_readl(GUEST_FS_BASE); in sync_vmcs02_to_vmcs12_rare()
3614 vmcs12->guest_gs_base = vmcs_readl(GUEST_GS_BASE); in sync_vmcs02_to_vmcs12_rare()
3615 vmcs12->guest_ldtr_base = vmcs_readl(GUEST_LDTR_BASE); in sync_vmcs02_to_vmcs12_rare()
3616 vmcs12->guest_tr_base = vmcs_readl(GUEST_TR_BASE); in sync_vmcs02_to_vmcs12_rare()
3617 vmcs12->guest_gdtr_base = vmcs_readl(GUEST_GDTR_BASE); in sync_vmcs02_to_vmcs12_rare()
3618 vmcs12->guest_idtr_base = vmcs_readl(GUEST_IDTR_BASE); in sync_vmcs02_to_vmcs12_rare()
3619 vmcs12->guest_pending_dbg_exceptions = in sync_vmcs02_to_vmcs12_rare()
3622 vmcs12->guest_bndcfgs = vmcs_read64(GUEST_BNDCFGS); in sync_vmcs02_to_vmcs12_rare()
3628 struct vmcs12 *vmcs12) in copy_vmcs02_to_vmcs12_rare() argument
3643 sync_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in copy_vmcs02_to_vmcs12_rare()
3656 static void sync_vmcs02_to_vmcs12(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) in sync_vmcs02_to_vmcs12() argument
3661 sync_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in sync_vmcs02_to_vmcs12()
3665 vmcs12->guest_cr0 = vmcs12_guest_cr0(vcpu, vmcs12); in sync_vmcs02_to_vmcs12()
3666 vmcs12->guest_cr4 = vmcs12_guest_cr4(vcpu, vmcs12); in sync_vmcs02_to_vmcs12()
3668 vmcs12->guest_rsp = kvm_rsp_read(vcpu); in sync_vmcs02_to_vmcs12()
3669 vmcs12->guest_rip = kvm_rip_read(vcpu); in sync_vmcs02_to_vmcs12()
3670 vmcs12->guest_rflags = vmcs_readl(GUEST_RFLAGS); in sync_vmcs02_to_vmcs12()
3672 vmcs12->guest_cs_ar_bytes = vmcs_read32(GUEST_CS_AR_BYTES); in sync_vmcs02_to_vmcs12()
3673 vmcs12->guest_ss_ar_bytes = vmcs_read32(GUEST_SS_AR_BYTES); in sync_vmcs02_to_vmcs12()
3675 vmcs12->guest_sysenter_cs = vmcs_read32(GUEST_SYSENTER_CS); in sync_vmcs02_to_vmcs12()
3676 vmcs12->guest_sysenter_esp = vmcs_readl(GUEST_SYSENTER_ESP); in sync_vmcs02_to_vmcs12()
3677 vmcs12->guest_sysenter_eip = vmcs_readl(GUEST_SYSENTER_EIP); in sync_vmcs02_to_vmcs12()
3679 vmcs12->guest_interruptibility_info = in sync_vmcs02_to_vmcs12()
3683 vmcs12->guest_activity_state = GUEST_ACTIVITY_HLT; in sync_vmcs02_to_vmcs12()
3685 vmcs12->guest_activity_state = GUEST_ACTIVITY_ACTIVE; in sync_vmcs02_to_vmcs12()
3687 if (nested_cpu_has_preemption_timer(vmcs12) && in sync_vmcs02_to_vmcs12()
3688 vmcs12->vm_exit_controls & VM_EXIT_SAVE_VMX_PREEMPTION_TIMER) in sync_vmcs02_to_vmcs12()
3689 vmcs12->vmx_preemption_timer_value = in sync_vmcs02_to_vmcs12()
3701 vmcs12->guest_cr3 = vmcs_readl(GUEST_CR3); in sync_vmcs02_to_vmcs12()
3702 if (nested_cpu_has_ept(vmcs12) && is_pae_paging(vcpu)) { in sync_vmcs02_to_vmcs12()
3703 vmcs12->guest_pdptr0 = vmcs_read64(GUEST_PDPTR0); in sync_vmcs02_to_vmcs12()
3704 vmcs12->guest_pdptr1 = vmcs_read64(GUEST_PDPTR1); in sync_vmcs02_to_vmcs12()
3705 vmcs12->guest_pdptr2 = vmcs_read64(GUEST_PDPTR2); in sync_vmcs02_to_vmcs12()
3706 vmcs12->guest_pdptr3 = vmcs_read64(GUEST_PDPTR3); in sync_vmcs02_to_vmcs12()
3710 vmcs12->guest_linear_address = vmcs_readl(GUEST_LINEAR_ADDRESS); in sync_vmcs02_to_vmcs12()
3712 if (nested_cpu_has_vid(vmcs12)) in sync_vmcs02_to_vmcs12()
3713 vmcs12->guest_intr_status = vmcs_read16(GUEST_INTR_STATUS); in sync_vmcs02_to_vmcs12()
3715 vmcs12->vm_entry_controls = in sync_vmcs02_to_vmcs12()
3716 (vmcs12->vm_entry_controls & ~VM_ENTRY_IA32E_MODE) | in sync_vmcs02_to_vmcs12()
3719 if (vmcs12->vm_exit_controls & VM_EXIT_SAVE_DEBUG_CONTROLS) in sync_vmcs02_to_vmcs12()
3720 kvm_get_dr(vcpu, 7, (unsigned long *)&vmcs12->guest_dr7); in sync_vmcs02_to_vmcs12()
3722 if (vmcs12->vm_exit_controls & VM_EXIT_SAVE_IA32_EFER) in sync_vmcs02_to_vmcs12()
3723 vmcs12->guest_ia32_efer = vcpu->arch.efer; in sync_vmcs02_to_vmcs12()
3737 static void prepare_vmcs12(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12, in prepare_vmcs12() argument
3742 vmcs12->vm_exit_reason = exit_reason; in prepare_vmcs12()
3743 vmcs12->exit_qualification = exit_qualification; in prepare_vmcs12()
3750 if (!(vmcs12->vm_exit_reason & VMX_EXIT_REASONS_FAILED_VMENTRY)) { in prepare_vmcs12()
3751 vmcs12->launch_state = 1; in prepare_vmcs12()
3755 vmcs12->vm_entry_intr_info_field &= ~INTR_INFO_VALID_MASK; in prepare_vmcs12()
3761 vmcs12->idt_vectoring_info_field = 0; in prepare_vmcs12()
3762 vmcs12_save_pending_event(vcpu, vmcs12); in prepare_vmcs12()
3764 vmcs12->vm_exit_intr_info = exit_intr_info; in prepare_vmcs12()
3765 vmcs12->vm_exit_instruction_len = vmcs_read32(VM_EXIT_INSTRUCTION_LEN); in prepare_vmcs12()
3766 vmcs12->vmx_instruction_info = vmcs_read32(VMX_INSTRUCTION_INFO); in prepare_vmcs12()
3775 vmcs12->vm_exit_msr_store_addr, in prepare_vmcs12()
3776 vmcs12->vm_exit_msr_store_count)) in prepare_vmcs12()
3792 struct vmcs12 *vmcs12) in load_vmcs12_host_state() argument
3797 if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_EFER) in load_vmcs12_host_state()
3798 vcpu->arch.efer = vmcs12->host_ia32_efer; in load_vmcs12_host_state()
3799 else if (vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE) in load_vmcs12_host_state()
3805 kvm_rsp_write(vcpu, vmcs12->host_rsp); in load_vmcs12_host_state()
3806 kvm_rip_write(vcpu, vmcs12->host_rip); in load_vmcs12_host_state()
3818 vmx_set_cr0(vcpu, vmcs12->host_cr0); in load_vmcs12_host_state()
3822 vmx_set_cr4(vcpu, vmcs12->host_cr4); in load_vmcs12_host_state()
3830 if (nested_vmx_load_cr3(vcpu, vmcs12->host_cr3, false, &entry_failure_code)) in load_vmcs12_host_state()
3851 (!nested_cpu_has_vpid(vmcs12) || !nested_has_guest_tlb_tag(vcpu))) { in load_vmcs12_host_state()
3855 vmcs_write32(GUEST_SYSENTER_CS, vmcs12->host_ia32_sysenter_cs); in load_vmcs12_host_state()
3856 vmcs_writel(GUEST_SYSENTER_ESP, vmcs12->host_ia32_sysenter_esp); in load_vmcs12_host_state()
3857 vmcs_writel(GUEST_SYSENTER_EIP, vmcs12->host_ia32_sysenter_eip); in load_vmcs12_host_state()
3858 vmcs_writel(GUEST_IDTR_BASE, vmcs12->host_idtr_base); in load_vmcs12_host_state()
3859 vmcs_writel(GUEST_GDTR_BASE, vmcs12->host_gdtr_base); in load_vmcs12_host_state()
3864 if (vmcs12->vm_exit_controls & VM_EXIT_CLEAR_BNDCFGS) in load_vmcs12_host_state()
3867 if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PAT) { in load_vmcs12_host_state()
3868 vmcs_write64(GUEST_IA32_PAT, vmcs12->host_ia32_pat); in load_vmcs12_host_state()
3869 vcpu->arch.pat = vmcs12->host_ia32_pat; in load_vmcs12_host_state()
3871 if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PERF_GLOBAL_CTRL) in load_vmcs12_host_state()
3873 vmcs12->host_ia32_perf_global_ctrl); in load_vmcs12_host_state()
3880 .selector = vmcs12->host_cs_selector, in load_vmcs12_host_state()
3886 if (vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE) in load_vmcs12_host_state()
3900 seg.selector = vmcs12->host_ds_selector; in load_vmcs12_host_state()
3902 seg.selector = vmcs12->host_es_selector; in load_vmcs12_host_state()
3904 seg.selector = vmcs12->host_ss_selector; in load_vmcs12_host_state()
3906 seg.selector = vmcs12->host_fs_selector; in load_vmcs12_host_state()
3907 seg.base = vmcs12->host_fs_base; in load_vmcs12_host_state()
3909 seg.selector = vmcs12->host_gs_selector; in load_vmcs12_host_state()
3910 seg.base = vmcs12->host_gs_base; in load_vmcs12_host_state()
3913 .base = vmcs12->host_tr_base, in load_vmcs12_host_state()
3915 .selector = vmcs12->host_tr_selector, in load_vmcs12_host_state()
3927 if (nested_vmx_load_msr(vcpu, vmcs12->vm_exit_msr_load_addr, in load_vmcs12_host_state()
3928 vmcs12->vm_exit_msr_load_count)) in load_vmcs12_host_state()
3957 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_restore_host_state() local
3965 if (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS) { in nested_vmx_restore_host_state()
4019 for (i = 0; i < vmcs12->vm_entry_msr_load_count; i++) { in nested_vmx_restore_host_state()
4020 gpa = vmcs12->vm_entry_msr_load_addr + (i * sizeof(g)); in nested_vmx_restore_host_state()
4028 for (j = 0; j < vmcs12->vm_exit_msr_load_count; j++) { in nested_vmx_restore_host_state()
4029 gpa = vmcs12->vm_exit_msr_load_addr + (j * sizeof(h)); in nested_vmx_restore_host_state()
4072 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_vmexit() local
4079 if (nested_cpu_has_preemption_timer(vmcs12)) in nested_vmx_vmexit()
4082 if (vmcs12->cpu_based_vm_exec_control & CPU_BASED_USE_TSC_OFFSETTING) in nested_vmx_vmexit()
4083 vcpu->arch.tsc_offset -= vmcs12->tsc_offset; in nested_vmx_vmexit()
4086 sync_vmcs02_to_vmcs12(vcpu, vmcs12); in nested_vmx_vmexit()
4089 prepare_vmcs12(vcpu, vmcs12, exit_reason, exit_intr_info, in nested_vmx_vmexit()
4101 nested_flush_cached_shadow_vmcs12(vcpu, vmcs12); in nested_vmx_vmexit()
4149 } else if (!nested_cpu_has_ept(vmcs12) && in nested_vmx_vmexit()
4150 nested_cpu_has2(vmcs12, in nested_vmx_vmexit()
4181 vmcs12->vm_exit_intr_info = irq | in nested_vmx_vmexit()
4186 trace_kvm_nested_vmexit_inject(vmcs12->vm_exit_reason, in nested_vmx_vmexit()
4187 vmcs12->exit_qualification, in nested_vmx_vmexit()
4188 vmcs12->idt_vectoring_info_field, in nested_vmx_vmexit()
4189 vmcs12->vm_exit_intr_info, in nested_vmx_vmexit()
4190 vmcs12->vm_exit_intr_error_code, in nested_vmx_vmexit()
4193 load_vmcs12_host_state(vcpu, vmcs12); in nested_vmx_vmexit()
4593 vmptr + offsetof(struct vmcs12, in handle_vmclear()
4625 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmread() local
4651 copy_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in handle_vmread()
4654 field_value = vmcs12_read_any(vmcs12, field, offset); in handle_vmread()
4720 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmwrite() local
4772 copy_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in handle_vmwrite()
4785 vmcs12_write_any(vmcs12, field, offset, field_value); in handle_vmwrite()
4852 struct vmcs12 *new_vmcs12; in handle_vmptrld()
5054 struct vmcs12 *vmcs12) in nested_vmx_eptp_switching() argument
5061 if (!nested_cpu_has_eptp_switching(vmcs12) || in nested_vmx_eptp_switching()
5062 !nested_cpu_has_ept(vmcs12)) in nested_vmx_eptp_switching()
5069 if (kvm_vcpu_read_guest_page(vcpu, vmcs12->eptp_list_address >> PAGE_SHIFT, in nested_vmx_eptp_switching()
5079 if (vmcs12->ept_pointer != address) { in nested_vmx_eptp_switching()
5086 vmcs12->ept_pointer = address; in nested_vmx_eptp_switching()
5101 struct vmcs12 *vmcs12; in handle_vmfunc() local
5114 vmcs12 = get_vmcs12(vcpu); in handle_vmfunc()
5115 if (!(vmcs12->vm_function_control & BIT_ULL(function))) in handle_vmfunc()
5120 if (nested_vmx_eptp_switching(vcpu, vmcs12)) in handle_vmfunc()
5142 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_check_io_bitmaps() local
5151 bitmap = vmcs12->io_bitmap_a; in nested_vmx_check_io_bitmaps()
5153 bitmap = vmcs12->io_bitmap_b; in nested_vmx_check_io_bitmaps()
5173 struct vmcs12 *vmcs12) in nested_vmx_exit_handled_io() argument
5179 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_IO_BITMAPS)) in nested_vmx_exit_handled_io()
5180 return nested_cpu_has(vmcs12, CPU_BASED_UNCOND_IO_EXITING); in nested_vmx_exit_handled_io()
5197 struct vmcs12 *vmcs12, u32 exit_reason) in nested_vmx_exit_handled_msr() argument
5202 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_MSR_BITMAPS)) in nested_vmx_exit_handled_msr()
5210 bitmap = vmcs12->msr_bitmap; in nested_vmx_exit_handled_msr()
5234 struct vmcs12 *vmcs12) in nested_vmx_exit_handled_cr() argument
5247 if (vmcs12->cr0_guest_host_mask & in nested_vmx_exit_handled_cr()
5248 (val ^ vmcs12->cr0_read_shadow)) in nested_vmx_exit_handled_cr()
5252 if ((vmcs12->cr3_target_count >= 1 && in nested_vmx_exit_handled_cr()
5253 vmcs12->cr3_target_value0 == val) || in nested_vmx_exit_handled_cr()
5254 (vmcs12->cr3_target_count >= 2 && in nested_vmx_exit_handled_cr()
5255 vmcs12->cr3_target_value1 == val) || in nested_vmx_exit_handled_cr()
5256 (vmcs12->cr3_target_count >= 3 && in nested_vmx_exit_handled_cr()
5257 vmcs12->cr3_target_value2 == val) || in nested_vmx_exit_handled_cr()
5258 (vmcs12->cr3_target_count >= 4 && in nested_vmx_exit_handled_cr()
5259 vmcs12->cr3_target_value3 == val)) in nested_vmx_exit_handled_cr()
5261 if (nested_cpu_has(vmcs12, CPU_BASED_CR3_LOAD_EXITING)) in nested_vmx_exit_handled_cr()
5265 if (vmcs12->cr4_guest_host_mask & in nested_vmx_exit_handled_cr()
5266 (vmcs12->cr4_read_shadow ^ val)) in nested_vmx_exit_handled_cr()
5270 if (nested_cpu_has(vmcs12, CPU_BASED_CR8_LOAD_EXITING)) in nested_vmx_exit_handled_cr()
5276 if ((vmcs12->cr0_guest_host_mask & X86_CR0_TS) && in nested_vmx_exit_handled_cr()
5277 (vmcs12->cr0_read_shadow & X86_CR0_TS)) in nested_vmx_exit_handled_cr()
5283 if (vmcs12->cpu_based_vm_exec_control & in nested_vmx_exit_handled_cr()
5288 if (vmcs12->cpu_based_vm_exec_control & in nested_vmx_exit_handled_cr()
5300 if (vmcs12->cr0_guest_host_mask & 0xe & in nested_vmx_exit_handled_cr()
5301 (val ^ vmcs12->cr0_read_shadow)) in nested_vmx_exit_handled_cr()
5303 if ((vmcs12->cr0_guest_host_mask & 0x1) && in nested_vmx_exit_handled_cr()
5304 !(vmcs12->cr0_read_shadow & 0x1) && in nested_vmx_exit_handled_cr()
5313 struct vmcs12 *vmcs12, gpa_t bitmap) in nested_vmx_exit_handled_vmcs_access() argument
5319 if (!nested_cpu_has_shadow_vmcs(vmcs12)) in nested_vmx_exit_handled_vmcs_access()
5345 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_exit_reflected() local
5390 return vmcs12->exception_bitmap & in nested_vmx_exit_reflected()
5397 return nested_cpu_has(vmcs12, CPU_BASED_INTR_WINDOW_EXITING); in nested_vmx_exit_reflected()
5399 return nested_cpu_has(vmcs12, CPU_BASED_NMI_WINDOW_EXITING); in nested_vmx_exit_reflected()
5405 return nested_cpu_has(vmcs12, CPU_BASED_HLT_EXITING); in nested_vmx_exit_reflected()
5409 return nested_cpu_has(vmcs12, CPU_BASED_INVLPG_EXITING); in nested_vmx_exit_reflected()
5411 return nested_cpu_has(vmcs12, CPU_BASED_RDPMC_EXITING); in nested_vmx_exit_reflected()
5413 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_RDRAND_EXITING); in nested_vmx_exit_reflected()
5415 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_RDSEED_EXITING); in nested_vmx_exit_reflected()
5417 return nested_cpu_has(vmcs12, CPU_BASED_RDTSC_EXITING); in nested_vmx_exit_reflected()
5419 return nested_vmx_exit_handled_vmcs_access(vcpu, vmcs12, in nested_vmx_exit_reflected()
5420 vmcs12->vmread_bitmap); in nested_vmx_exit_reflected()
5422 return nested_vmx_exit_handled_vmcs_access(vcpu, vmcs12, in nested_vmx_exit_reflected()
5423 vmcs12->vmwrite_bitmap); in nested_vmx_exit_reflected()
5435 return nested_vmx_exit_handled_cr(vcpu, vmcs12); in nested_vmx_exit_reflected()
5437 return nested_cpu_has(vmcs12, CPU_BASED_MOV_DR_EXITING); in nested_vmx_exit_reflected()
5439 return nested_vmx_exit_handled_io(vcpu, vmcs12); in nested_vmx_exit_reflected()
5441 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_DESC); in nested_vmx_exit_reflected()
5444 return nested_vmx_exit_handled_msr(vcpu, vmcs12, exit_reason); in nested_vmx_exit_reflected()
5448 return nested_cpu_has(vmcs12, CPU_BASED_MWAIT_EXITING); in nested_vmx_exit_reflected()
5450 return nested_cpu_has(vmcs12, CPU_BASED_MONITOR_TRAP_FLAG); in nested_vmx_exit_reflected()
5452 return nested_cpu_has(vmcs12, CPU_BASED_MONITOR_EXITING); in nested_vmx_exit_reflected()
5454 return nested_cpu_has(vmcs12, CPU_BASED_PAUSE_EXITING) || in nested_vmx_exit_reflected()
5455 nested_cpu_has2(vmcs12, in nested_vmx_exit_reflected()
5460 return nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW); in nested_vmx_exit_reflected()
5488 nested_cpu_has2(vmcs12, SECONDARY_EXEC_ENABLE_INVPCID) && in nested_vmx_exit_reflected()
5489 nested_cpu_has(vmcs12, CPU_BASED_INVLPG_EXITING); in nested_vmx_exit_reflected()
5491 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_WBINVD_EXITING); in nested_vmx_exit_reflected()
5501 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_XSAVES); in nested_vmx_exit_reflected()
5515 return nested_cpu_has2(vmcs12, in nested_vmx_exit_reflected()
5528 struct vmcs12 *vmcs12; in vmx_get_nested_state() local
5543 vmcs12 = get_vmcs12(vcpu); in vmx_get_nested_state()
5551 kvm_state.size += sizeof(user_vmx_nested_state->vmcs12); in vmx_get_nested_state()
5557 nested_cpu_has_shadow_vmcs(vmcs12) && in vmx_get_nested_state()
5558 vmcs12->vmcs_link_pointer != -1ull) in vmx_get_nested_state()
5593 sync_vmcs02_to_vmcs12(vcpu, vmcs12); in vmx_get_nested_state()
5594 sync_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in vmx_get_nested_state()
5605 BUILD_BUG_ON(sizeof(user_vmx_nested_state->vmcs12) < VMCS12_SIZE); in vmx_get_nested_state()
5612 if (copy_to_user(user_vmx_nested_state->vmcs12, vmcs12, VMCS12_SIZE)) in vmx_get_nested_state()
5615 if (nested_cpu_has_shadow_vmcs(vmcs12) && in vmx_get_nested_state()
5616 vmcs12->vmcs_link_pointer != -1ull) { in vmx_get_nested_state()
5643 struct vmcs12 *vmcs12; in vmx_set_nested_state() local
5716 if (kvm_state->size < sizeof(*kvm_state) + sizeof(*vmcs12)) in vmx_set_nested_state()
5743 vmcs12 = get_vmcs12(vcpu); in vmx_set_nested_state()
5744 if (copy_from_user(vmcs12, user_vmx_nested_state->vmcs12, sizeof(*vmcs12))) in vmx_set_nested_state()
5747 if (vmcs12->hdr.revision_id != VMCS12_REVISION) in vmx_set_nested_state()
5757 if (nested_cpu_has_shadow_vmcs(vmcs12) && in vmx_set_nested_state()
5758 vmcs12->vmcs_link_pointer != -1ull) { in vmx_set_nested_state()
5759 struct vmcs12 *shadow_vmcs12 = get_shadow_vmcs12(vcpu); in vmx_set_nested_state()
5763 sizeof(user_vmx_nested_state->vmcs12) + sizeof(*shadow_vmcs12)) in vmx_set_nested_state()
5778 if (nested_vmx_check_controls(vcpu, vmcs12) || in vmx_set_nested_state()
5779 nested_vmx_check_host_state(vcpu, vmcs12) || in vmx_set_nested_state()
5780 nested_vmx_check_guest_state(vcpu, vmcs12, &exit_qual)) in vmx_set_nested_state()