• Home
  • Raw
  • Download

Lines Matching refs:vcpu_vmx

621 static inline int __find_msr_index(struct vcpu_vmx *vmx, u32 msr)  in __find_msr_index()
631 struct shared_msr_entry *find_msr_entry(struct vcpu_vmx *vmx, u32 msr) in find_msr_entry()
723 static bool vmx_segment_cache_test_set(struct vcpu_vmx *vmx, unsigned seg, in vmx_segment_cache_test_set()
738 static u16 vmx_read_guest_seg_selector(struct vcpu_vmx *vmx, unsigned seg) in vmx_read_guest_seg_selector()
747 static ulong vmx_read_guest_seg_base(struct vcpu_vmx *vmx, unsigned seg) in vmx_read_guest_seg_base()
756 static u32 vmx_read_guest_seg_limit(struct vcpu_vmx *vmx, unsigned seg) in vmx_read_guest_seg_limit()
765 static u32 vmx_read_guest_seg_ar(struct vcpu_vmx *vmx, unsigned seg) in vmx_read_guest_seg_ar()
831 static void clear_atomic_switch_msr_special(struct vcpu_vmx *vmx, in clear_atomic_switch_msr_special()
849 static void clear_atomic_switch_msr(struct vcpu_vmx *vmx, unsigned msr) in clear_atomic_switch_msr()
889 static void add_atomic_switch_msr_special(struct vcpu_vmx *vmx, in add_atomic_switch_msr_special()
901 static void add_atomic_switch_msr(struct vcpu_vmx *vmx, unsigned msr, in add_atomic_switch_msr()
967 static bool update_transition_efer(struct vcpu_vmx *vmx, int efer_offset) in update_transition_efer()
1072 static void pt_guest_enter(struct vcpu_vmx *vmx) in pt_guest_enter()
1089 static void pt_guest_exit(struct vcpu_vmx *vmx) in pt_guest_exit()
1132 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_prepare_switch_to_guest()
1197 static void vmx_prepare_switch_to_host(struct vcpu_vmx *vmx) in vmx_prepare_switch_to_host()
1237 static u64 vmx_read_guest_kernel_gs_base(struct vcpu_vmx *vmx) in vmx_read_guest_kernel_gs_base()
1246 static void vmx_write_guest_kernel_gs_base(struct vcpu_vmx *vmx, u64 data) in vmx_write_guest_kernel_gs_base()
1314 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_vcpu_load_vmcs()
1381 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_vcpu_load()
1483 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_rtit_ctl_check()
1596 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_queue_exception()
1644 static void move_msr_up(struct vcpu_vmx *vmx, int from, int to) in move_msr_up()
1658 static void setup_msrs(struct vcpu_vmx *vmx) in setup_msrs()
1767 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_get_msr()
1907 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_msr()
2674 struct vcpu_vmx *vmx = to_vmx(vcpu); in enter_pmode()
2747 struct vcpu_vmx *vmx = to_vmx(vcpu); in enter_rmode()
2795 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_efer()
2914 struct vcpu_vmx *vmx = to_vmx(vcpu); in ept_update_paging_mode_cr0()
2938 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_cr0()
3031 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_cr4()
3106 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_get_segment()
3153 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_get_cpl()
3185 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_segment()
3724 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_update_msr_bitmap()
3738 void pt_update_intercept_for_msr(struct vcpu_vmx *vmx) in pt_update_intercept_for_msr()
3767 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_guest_apic_has_interrupt()
3827 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_deliver_nested_posted_interrupt()
3853 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_deliver_posted_interrupt()
3877 void vmx_set_constant_host_state(struct vcpu_vmx *vmx) in vmx_set_constant_host_state()
3934 void set_cr4_guest_host_mask(struct vcpu_vmx *vmx) in set_cr4_guest_host_mask()
3945 u32 vmx_pin_based_exec_ctrl(struct vcpu_vmx *vmx) in vmx_pin_based_exec_ctrl()
3963 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_refresh_apicv_exec_ctrl()
3981 u32 vmx_exec_control(struct vcpu_vmx *vmx) in vmx_exec_control()
4008 static void vmx_compute_secondary_exec_control(struct vcpu_vmx *vmx) in vmx_compute_secondary_exec_control()
4167 static void vmx_vcpu_setup(struct vcpu_vmx *vmx) in vmx_vcpu_setup()
4276 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_vcpu_reset()
4391 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_inject_irq()
4419 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_inject_nmi()
4450 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_get_nmi_mask()
4464 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_nmi_mask()
4611 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_exception_nmi()
5107 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_task_switch()
5236 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_invalid_guest_state()
5290 struct vcpu_vmx *vmx = to_vmx(vcpu); in grow_ple_window()
5306 struct vcpu_vmx *vmx = to_vmx(vcpu); in shrink_ple_window()
5526 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_preemption_timer()
5623 static void vmx_destroy_pml_buffer(struct vcpu_vmx *vmx) in vmx_destroy_pml_buffer()
5633 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_flush_pml_buffer()
5861 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_handle_exit()
6049 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_virtual_apic_mode()
6150 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_sync_pir_to_irr()
6207 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_apicv_post_state_restore()
6213 static void handle_exception_nmi_irqoff(struct vcpu_vmx *vmx) in handle_exception_nmi_irqoff()
6281 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_handle_exit_irqoff()
6313 static void vmx_recover_nmi_blocking(struct vcpu_vmx *vmx) in vmx_recover_nmi_blocking()
6410 static void vmx_complete_interrupts(struct vcpu_vmx *vmx) in vmx_complete_interrupts()
6427 static void atomic_switch_perf_msrs(struct vcpu_vmx *vmx) in atomic_switch_perf_msrs()
6445 static void atomic_switch_umwait_control_msr(struct vcpu_vmx *vmx) in atomic_switch_umwait_control_msr()
6464 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_update_hv_timer()
6488 void vmx_update_host_rsp(struct vcpu_vmx *vmx, unsigned long host_rsp) in vmx_update_host_rsp()
6496 bool __vmx_vcpu_run(struct vcpu_vmx *vmx, unsigned long *regs, bool launched);
6500 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_vcpu_run()
6688 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_free_vcpu()
6705 struct vcpu_vmx *vmx; in vmx_create_vcpu()
6709 BUILD_BUG_ON_MSG(offsetof(struct vcpu_vmx, vcpu) != 0, in vmx_create_vcpu()
6944 static void vmcs_set_secondary_exec_control(struct vcpu_vmx *vmx) in vmcs_set_secondary_exec_control()
6970 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_cr_fixed1_bits_update()
7009 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_entry_exit_ctls_update()
7026 struct vcpu_vmx *vmx = to_vmx(vcpu); in update_intel_pt_cfg()
7095 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_cpuid_update()
7174 struct vcpu_vmx *vmx; in vmx_set_hv_timer()
7248 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_write_pml_buffer()
7533 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_pre_enter_smm()
7547 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_pre_leave_smm()
7992 r = kvm_init(&vmx_x86_ops, sizeof(struct vcpu_vmx), in vmx_init()
7993 __alignof__(struct vcpu_vmx), THIS_MODULE); in vmx_init()