Lines Matching refs:hv_vcpu
168 struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu); in kvm_hv_notify_acked_sint() local
175 for (idx = 0; idx < ARRAY_SIZE(hv_vcpu->stimer); idx++) { in kvm_hv_notify_acked_sint()
176 stimer = &hv_vcpu->stimer[idx]; in kvm_hv_notify_acked_sint()
193 struct kvm_vcpu_hv *hv_vcpu = &vcpu->arch.hyperv; in synic_exit() local
195 hv_vcpu->exit.type = KVM_EXIT_HYPERV_SYNIC; in synic_exit()
196 hv_vcpu->exit.u.synic.msr = msr; in synic_exit()
197 hv_vcpu->exit.u.synic.control = synic->control; in synic_exit()
198 hv_vcpu->exit.u.synic.evt_page = synic->evt_page; in synic_exit()
199 hv_vcpu->exit.u.synic.msg_page = synic->msg_page; in synic_exit()
297 struct kvm_vcpu_hv *hv_vcpu = &vcpu->arch.hyperv; in syndbg_exit() local
299 hv_vcpu->exit.type = KVM_EXIT_HYPERV_SYNDBG; in syndbg_exit()
300 hv_vcpu->exit.u.syndbg.msr = msr; in syndbg_exit()
301 hv_vcpu->exit.u.syndbg.control = syndbg->control.control; in syndbg_exit()
302 hv_vcpu->exit.u.syndbg.send_page = syndbg->control.send_page; in syndbg_exit()
303 hv_vcpu->exit.u.syndbg.recv_page = syndbg->control.recv_page; in syndbg_exit()
304 hv_vcpu->exit.u.syndbg.pending_page = syndbg->control.pending_page; in syndbg_exit()
804 struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu); in kvm_hv_process_stimers() local
809 for (i = 0; i < ARRAY_SIZE(hv_vcpu->stimer); i++) in kvm_hv_process_stimers()
810 if (test_and_clear_bit(i, hv_vcpu->stimer_pending_bitmap)) { in kvm_hv_process_stimers()
811 stimer = &hv_vcpu->stimer[i]; in kvm_hv_process_stimers()
834 struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu); in kvm_hv_vcpu_uninit() local
837 for (i = 0; i < ARRAY_SIZE(hv_vcpu->stimer); i++) in kvm_hv_vcpu_uninit()
838 stimer_cleanup(&hv_vcpu->stimer[i]); in kvm_hv_vcpu_uninit()
885 struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu); in kvm_hv_vcpu_init() local
888 synic_init(&hv_vcpu->synic); in kvm_hv_vcpu_init()
890 bitmap_zero(hv_vcpu->stimer_pending_bitmap, HV_SYNIC_STIMER_COUNT); in kvm_hv_vcpu_init()
891 for (i = 0; i < ARRAY_SIZE(hv_vcpu->stimer); i++) in kvm_hv_vcpu_init()
892 stimer_init(&hv_vcpu->stimer[i], i); in kvm_hv_vcpu_init()
897 struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu); in kvm_hv_vcpu_postcreate() local
899 hv_vcpu->vp_index = kvm_vcpu_get_idx(vcpu); in kvm_hv_vcpu_postcreate()
1219 struct kvm_vcpu_hv *hv_vcpu = &vcpu->arch.hyperv; in kvm_hv_set_msr() local
1230 if (new_vp_index == hv_vcpu->vp_index) in kvm_hv_set_msr()
1239 if (hv_vcpu->vp_index == vcpu_idx) in kvm_hv_set_msr()
1244 hv_vcpu->vp_index = new_vp_index; in kvm_hv_set_msr()
1252 hv_vcpu->hv_vapic = data; in kvm_hv_set_msr()
1269 hv_vcpu->hv_vapic = data; in kvm_hv_set_msr()
1286 hv_vcpu->runtime_offset = data - current_task_runtime_100ns(); in kvm_hv_set_msr()
1382 struct kvm_vcpu_hv *hv_vcpu = &vcpu->arch.hyperv; in kvm_hv_get_msr() local
1386 data = hv_vcpu->vp_index; in kvm_hv_get_msr()
1395 data = hv_vcpu->hv_vapic; in kvm_hv_get_msr()
1398 data = current_task_runtime_100ns() + hv_vcpu->runtime_offset; in kvm_hv_get_msr()
1497 struct kvm_vcpu_hv *hv_vcpu = ¤t_vcpu->arch.hyperv; in kvm_hv_flush_tlb() local
1557 cpumask_clear(&hv_vcpu->tlb_flush); in kvm_hv_flush_tlb()
1568 NULL, vcpu_mask, &hv_vcpu->tlb_flush); in kvm_hv_flush_tlb()