Lines Matching refs:hc
1752 static u64 kvm_hv_flush_tlb(struct kvm_vcpu *vcpu, struct kvm_hv_hcall *hc, bool ex) in kvm_hv_flush_tlb() argument
1769 if (hc->fast) { in kvm_hv_flush_tlb()
1770 flush.address_space = hc->ingpa; in kvm_hv_flush_tlb()
1771 flush.flags = hc->outgpa; in kvm_hv_flush_tlb()
1772 flush.processor_mask = sse128_lo(hc->xmm[0]); in kvm_hv_flush_tlb()
1774 if (unlikely(kvm_read_guest(kvm, hc->ingpa, in kvm_hv_flush_tlb()
1795 if (hc->fast) { in kvm_hv_flush_tlb()
1796 flush_ex.address_space = hc->ingpa; in kvm_hv_flush_tlb()
1797 flush_ex.flags = hc->outgpa; in kvm_hv_flush_tlb()
1799 &hc->xmm[0], sizeof(hc->xmm[0])); in kvm_hv_flush_tlb()
1801 if (unlikely(kvm_read_guest(kvm, hc->ingpa, &flush_ex, in kvm_hv_flush_tlb()
1821 if (hc->fast) { in kvm_hv_flush_tlb()
1825 sparse_banks[i] = sse128_lo(hc->xmm[i / 2 + 1]); in kvm_hv_flush_tlb()
1826 sparse_banks[i + 1] = sse128_hi(hc->xmm[i / 2 + 1]); in kvm_hv_flush_tlb()
1829 gpa = hc->ingpa + offsetof(struct hv_tlb_flush_ex, in kvm_hv_flush_tlb()
1855 ((u64)hc->rep_cnt << HV_HYPERCALL_REP_COMP_OFFSET); in kvm_hv_flush_tlb()
1877 static u64 kvm_hv_send_ipi(struct kvm_vcpu *vcpu, struct kvm_hv_hcall *hc, bool ex) in kvm_hv_send_ipi() argument
1892 if (!hc->fast) { in kvm_hv_send_ipi()
1893 if (unlikely(kvm_read_guest(kvm, hc->ingpa, &send_ipi, in kvm_hv_send_ipi()
1900 if (unlikely(hc->ingpa >> 32 != 0)) in kvm_hv_send_ipi()
1902 sparse_banks[0] = hc->outgpa; in kvm_hv_send_ipi()
1903 vector = (u32)hc->ingpa; in kvm_hv_send_ipi()
1910 if (unlikely(kvm_read_guest(kvm, hc->ingpa, &send_ipi_ex, in kvm_hv_send_ipi()
1930 hc->ingpa + offsetof(struct hv_send_ipi_ex, in kvm_hv_send_ipi()
2047 static u16 kvm_hvcall_signal_event(struct kvm_vcpu *vcpu, struct kvm_hv_hcall *hc) in kvm_hvcall_signal_event() argument
2052 if (unlikely(!hc->fast)) { in kvm_hvcall_signal_event()
2054 gpa_t gpa = hc->ingpa; in kvm_hvcall_signal_event()
2056 if ((gpa & (__alignof__(hc->ingpa) - 1)) || in kvm_hvcall_signal_event()
2057 offset_in_page(gpa) + sizeof(hc->ingpa) > PAGE_SIZE) in kvm_hvcall_signal_event()
2061 &hc->ingpa, sizeof(hc->ingpa)); in kvm_hvcall_signal_event()
2071 if (hc->ingpa & 0xffff00000000ULL) in kvm_hvcall_signal_event()
2074 if (hc->ingpa & ~KVM_HYPERV_CONN_ID_MASK) in kvm_hvcall_signal_event()
2079 eventfd = idr_find(&hv->conn_to_evt, hc->ingpa); in kvm_hvcall_signal_event()
2088 static bool is_xmm_fast_hypercall(struct kvm_hv_hcall *hc) in is_xmm_fast_hypercall() argument
2090 switch (hc->code) { in is_xmm_fast_hypercall()
2101 static void kvm_hv_hypercall_read_xmm(struct kvm_hv_hcall *hc) in kvm_hv_hypercall_read_xmm() argument
2107 _kvm_read_sse_reg(reg, &hc->xmm[reg]); in kvm_hv_hypercall_read_xmm()
2161 struct kvm_hv_hcall hc; in kvm_hv_hypercall() local
2175 hc.param = kvm_rcx_read(vcpu); in kvm_hv_hypercall()
2176 hc.ingpa = kvm_rdx_read(vcpu); in kvm_hv_hypercall()
2177 hc.outgpa = kvm_r8_read(vcpu); in kvm_hv_hypercall()
2181 hc.param = ((u64)kvm_rdx_read(vcpu) << 32) | in kvm_hv_hypercall()
2183 hc.ingpa = ((u64)kvm_rbx_read(vcpu) << 32) | in kvm_hv_hypercall()
2185 hc.outgpa = ((u64)kvm_rdi_read(vcpu) << 32) | in kvm_hv_hypercall()
2189 hc.code = hc.param & 0xffff; in kvm_hv_hypercall()
2190 hc.fast = !!(hc.param & HV_HYPERCALL_FAST_BIT); in kvm_hv_hypercall()
2191 hc.rep_cnt = (hc.param >> HV_HYPERCALL_REP_COMP_OFFSET) & 0xfff; in kvm_hv_hypercall()
2192 hc.rep_idx = (hc.param >> HV_HYPERCALL_REP_START_OFFSET) & 0xfff; in kvm_hv_hypercall()
2193 hc.rep = !!(hc.rep_cnt || hc.rep_idx); in kvm_hv_hypercall()
2195 trace_kvm_hv_hypercall(hc.code, hc.fast, hc.rep_cnt, hc.rep_idx, in kvm_hv_hypercall()
2196 hc.ingpa, hc.outgpa); in kvm_hv_hypercall()
2198 if (unlikely(!hv_check_hypercall_access(hv_vcpu, hc.code))) { in kvm_hv_hypercall()
2203 if (hc.fast && is_xmm_fast_hypercall(&hc)) { in kvm_hv_hypercall()
2211 kvm_hv_hypercall_read_xmm(&hc); in kvm_hv_hypercall()
2214 switch (hc.code) { in kvm_hv_hypercall()
2216 if (unlikely(hc.rep)) { in kvm_hv_hypercall()
2223 if (unlikely(hc.rep)) { in kvm_hv_hypercall()
2227 ret = kvm_hvcall_signal_event(vcpu, &hc); in kvm_hv_hypercall()
2233 if (unlikely(hc.rep || !to_hv_synic(vcpu)->active)) { in kvm_hv_hypercall()
2239 vcpu->run->hyperv.u.hcall.input = hc.param; in kvm_hv_hypercall()
2240 vcpu->run->hyperv.u.hcall.params[0] = hc.ingpa; in kvm_hv_hypercall()
2241 vcpu->run->hyperv.u.hcall.params[1] = hc.outgpa; in kvm_hv_hypercall()
2246 if (unlikely(!hc.rep_cnt || hc.rep_idx)) { in kvm_hv_hypercall()
2250 ret = kvm_hv_flush_tlb(vcpu, &hc, false); in kvm_hv_hypercall()
2253 if (unlikely(hc.rep)) { in kvm_hv_hypercall()
2257 ret = kvm_hv_flush_tlb(vcpu, &hc, false); in kvm_hv_hypercall()
2260 if (unlikely(!hc.rep_cnt || hc.rep_idx)) { in kvm_hv_hypercall()
2264 ret = kvm_hv_flush_tlb(vcpu, &hc, true); in kvm_hv_hypercall()
2267 if (unlikely(hc.rep)) { in kvm_hv_hypercall()
2271 ret = kvm_hv_flush_tlb(vcpu, &hc, true); in kvm_hv_hypercall()
2274 if (unlikely(hc.rep)) { in kvm_hv_hypercall()
2278 ret = kvm_hv_send_ipi(vcpu, &hc, false); in kvm_hv_hypercall()
2281 if (unlikely(hc.fast || hc.rep)) { in kvm_hv_hypercall()
2285 ret = kvm_hv_send_ipi(vcpu, &hc, true); in kvm_hv_hypercall()
2289 if (unlikely(hc.fast)) { in kvm_hv_hypercall()
2308 vcpu->run->hyperv.u.hcall.input = hc.param; in kvm_hv_hypercall()
2309 vcpu->run->hyperv.u.hcall.params[0] = hc.ingpa; in kvm_hv_hypercall()
2310 vcpu->run->hyperv.u.hcall.params[1] = hc.outgpa; in kvm_hv_hypercall()