Lines Matching refs:hv_vcpu
168 struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu); in kvm_hv_notify_acked_sint() local
175 for (idx = 0; idx < ARRAY_SIZE(hv_vcpu->stimer); idx++) { in kvm_hv_notify_acked_sint()
176 stimer = &hv_vcpu->stimer[idx]; in kvm_hv_notify_acked_sint()
193 struct kvm_vcpu_hv *hv_vcpu = &vcpu->arch.hyperv; in synic_exit() local
195 hv_vcpu->exit.type = KVM_EXIT_HYPERV_SYNIC; in synic_exit()
196 hv_vcpu->exit.u.synic.msr = msr; in synic_exit()
197 hv_vcpu->exit.u.synic.control = synic->control; in synic_exit()
198 hv_vcpu->exit.u.synic.evt_page = synic->evt_page; in synic_exit()
199 hv_vcpu->exit.u.synic.msg_page = synic->msg_page; in synic_exit()
300 struct kvm_vcpu_hv *hv_vcpu = &vcpu->arch.hyperv; in syndbg_exit() local
302 hv_vcpu->exit.type = KVM_EXIT_HYPERV_SYNDBG; in syndbg_exit()
303 hv_vcpu->exit.u.syndbg.msr = msr; in syndbg_exit()
304 hv_vcpu->exit.u.syndbg.control = syndbg->control.control; in syndbg_exit()
305 hv_vcpu->exit.u.syndbg.send_page = syndbg->control.send_page; in syndbg_exit()
306 hv_vcpu->exit.u.syndbg.recv_page = syndbg->control.recv_page; in syndbg_exit()
307 hv_vcpu->exit.u.syndbg.pending_page = syndbg->control.pending_page; in syndbg_exit()
812 struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu); in kvm_hv_process_stimers() local
817 for (i = 0; i < ARRAY_SIZE(hv_vcpu->stimer); i++) in kvm_hv_process_stimers()
818 if (test_and_clear_bit(i, hv_vcpu->stimer_pending_bitmap)) { in kvm_hv_process_stimers()
819 stimer = &hv_vcpu->stimer[i]; in kvm_hv_process_stimers()
842 struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu); in kvm_hv_vcpu_uninit() local
845 for (i = 0; i < ARRAY_SIZE(hv_vcpu->stimer); i++) in kvm_hv_vcpu_uninit()
846 stimer_cleanup(&hv_vcpu->stimer[i]); in kvm_hv_vcpu_uninit()
893 struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu); in kvm_hv_vcpu_init() local
896 synic_init(&hv_vcpu->synic); in kvm_hv_vcpu_init()
898 bitmap_zero(hv_vcpu->stimer_pending_bitmap, HV_SYNIC_STIMER_COUNT); in kvm_hv_vcpu_init()
899 for (i = 0; i < ARRAY_SIZE(hv_vcpu->stimer); i++) in kvm_hv_vcpu_init()
900 stimer_init(&hv_vcpu->stimer[i], i); in kvm_hv_vcpu_init()
905 struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu); in kvm_hv_vcpu_postcreate() local
907 hv_vcpu->vp_index = kvm_vcpu_get_idx(vcpu); in kvm_hv_vcpu_postcreate()
1227 struct kvm_vcpu_hv *hv_vcpu = &vcpu->arch.hyperv; in kvm_hv_set_msr() local
1238 if (new_vp_index == hv_vcpu->vp_index) in kvm_hv_set_msr()
1247 if (hv_vcpu->vp_index == vcpu_idx) in kvm_hv_set_msr()
1252 hv_vcpu->vp_index = new_vp_index; in kvm_hv_set_msr()
1260 hv_vcpu->hv_vapic = data; in kvm_hv_set_msr()
1277 hv_vcpu->hv_vapic = data; in kvm_hv_set_msr()
1294 hv_vcpu->runtime_offset = data - current_task_runtime_100ns(); in kvm_hv_set_msr()
1390 struct kvm_vcpu_hv *hv_vcpu = &vcpu->arch.hyperv; in kvm_hv_get_msr() local
1394 data = hv_vcpu->vp_index; in kvm_hv_get_msr()
1403 data = hv_vcpu->hv_vapic; in kvm_hv_get_msr()
1406 data = current_task_runtime_100ns() + hv_vcpu->runtime_offset; in kvm_hv_get_msr()
1505 struct kvm_vcpu_hv *hv_vcpu = ¤t_vcpu->arch.hyperv; in kvm_hv_flush_tlb() local
1565 cpumask_clear(&hv_vcpu->tlb_flush); in kvm_hv_flush_tlb()
1578 NULL, vcpu_mask, &hv_vcpu->tlb_flush); in kvm_hv_flush_tlb()