Lines Matching refs:hc

1752 static u64 kvm_hv_flush_tlb(struct kvm_vcpu *vcpu, struct kvm_hv_hcall *hc, bool ex)  in kvm_hv_flush_tlb()  argument
1768 if (hc->fast) { in kvm_hv_flush_tlb()
1769 flush.address_space = hc->ingpa; in kvm_hv_flush_tlb()
1770 flush.flags = hc->outgpa; in kvm_hv_flush_tlb()
1771 flush.processor_mask = sse128_lo(hc->xmm[0]); in kvm_hv_flush_tlb()
1773 if (unlikely(kvm_read_guest(kvm, hc->ingpa, in kvm_hv_flush_tlb()
1794 if (hc->fast) { in kvm_hv_flush_tlb()
1795 flush_ex.address_space = hc->ingpa; in kvm_hv_flush_tlb()
1796 flush_ex.flags = hc->outgpa; in kvm_hv_flush_tlb()
1798 &hc->xmm[0], sizeof(hc->xmm[0])); in kvm_hv_flush_tlb()
1800 if (unlikely(kvm_read_guest(kvm, hc->ingpa, &flush_ex, in kvm_hv_flush_tlb()
1820 if (hc->fast) { in kvm_hv_flush_tlb()
1824 sparse_banks[i] = sse128_lo(hc->xmm[i / 2 + 1]); in kvm_hv_flush_tlb()
1825 sparse_banks[i + 1] = sse128_hi(hc->xmm[i / 2 + 1]); in kvm_hv_flush_tlb()
1828 gpa = hc->ingpa + offsetof(struct hv_tlb_flush_ex, in kvm_hv_flush_tlb()
1855 ((u64)hc->rep_cnt << HV_HYPERCALL_REP_COMP_OFFSET); in kvm_hv_flush_tlb()
1877 static u64 kvm_hv_send_ipi(struct kvm_vcpu *vcpu, struct kvm_hv_hcall *hc, bool ex) in kvm_hv_send_ipi() argument
1892 if (!hc->fast) { in kvm_hv_send_ipi()
1893 if (unlikely(kvm_read_guest(kvm, hc->ingpa, &send_ipi, in kvm_hv_send_ipi()
1900 if (unlikely(hc->ingpa >> 32 != 0)) in kvm_hv_send_ipi()
1902 sparse_banks[0] = hc->outgpa; in kvm_hv_send_ipi()
1903 vector = (u32)hc->ingpa; in kvm_hv_send_ipi()
1910 if (unlikely(kvm_read_guest(kvm, hc->ingpa, &send_ipi_ex, in kvm_hv_send_ipi()
1932 hc->ingpa + offsetof(struct hv_send_ipi_ex, in kvm_hv_send_ipi()
2050 static u16 kvm_hvcall_signal_event(struct kvm_vcpu *vcpu, struct kvm_hv_hcall *hc) in kvm_hvcall_signal_event() argument
2055 if (unlikely(!hc->fast)) { in kvm_hvcall_signal_event()
2057 gpa_t gpa = hc->ingpa; in kvm_hvcall_signal_event()
2059 if ((gpa & (__alignof__(hc->ingpa) - 1)) || in kvm_hvcall_signal_event()
2060 offset_in_page(gpa) + sizeof(hc->ingpa) > PAGE_SIZE) in kvm_hvcall_signal_event()
2064 &hc->ingpa, sizeof(hc->ingpa)); in kvm_hvcall_signal_event()
2074 if (hc->ingpa & 0xffff00000000ULL) in kvm_hvcall_signal_event()
2077 if (hc->ingpa & ~KVM_HYPERV_CONN_ID_MASK) in kvm_hvcall_signal_event()
2082 eventfd = idr_find(&hv->conn_to_evt, hc->ingpa); in kvm_hvcall_signal_event()
2091 static bool is_xmm_fast_hypercall(struct kvm_hv_hcall *hc) in is_xmm_fast_hypercall() argument
2093 switch (hc->code) { in is_xmm_fast_hypercall()
2104 static void kvm_hv_hypercall_read_xmm(struct kvm_hv_hcall *hc) in kvm_hv_hypercall_read_xmm() argument
2110 _kvm_read_sse_reg(reg, &hc->xmm[reg]); in kvm_hv_hypercall_read_xmm()
2164 struct kvm_hv_hcall hc; in kvm_hv_hypercall() local
2178 hc.param = kvm_rcx_read(vcpu); in kvm_hv_hypercall()
2179 hc.ingpa = kvm_rdx_read(vcpu); in kvm_hv_hypercall()
2180 hc.outgpa = kvm_r8_read(vcpu); in kvm_hv_hypercall()
2184 hc.param = ((u64)kvm_rdx_read(vcpu) << 32) | in kvm_hv_hypercall()
2186 hc.ingpa = ((u64)kvm_rbx_read(vcpu) << 32) | in kvm_hv_hypercall()
2188 hc.outgpa = ((u64)kvm_rdi_read(vcpu) << 32) | in kvm_hv_hypercall()
2192 hc.code = hc.param & 0xffff; in kvm_hv_hypercall()
2193 hc.fast = !!(hc.param & HV_HYPERCALL_FAST_BIT); in kvm_hv_hypercall()
2194 hc.rep_cnt = (hc.param >> HV_HYPERCALL_REP_COMP_OFFSET) & 0xfff; in kvm_hv_hypercall()
2195 hc.rep_idx = (hc.param >> HV_HYPERCALL_REP_START_OFFSET) & 0xfff; in kvm_hv_hypercall()
2196 hc.rep = !!(hc.rep_cnt || hc.rep_idx); in kvm_hv_hypercall()
2198 trace_kvm_hv_hypercall(hc.code, hc.fast, hc.rep_cnt, hc.rep_idx, in kvm_hv_hypercall()
2199 hc.ingpa, hc.outgpa); in kvm_hv_hypercall()
2201 if (unlikely(!hv_check_hypercall_access(hv_vcpu, hc.code))) { in kvm_hv_hypercall()
2206 if (hc.fast && is_xmm_fast_hypercall(&hc)) { in kvm_hv_hypercall()
2214 kvm_hv_hypercall_read_xmm(&hc); in kvm_hv_hypercall()
2217 switch (hc.code) { in kvm_hv_hypercall()
2219 if (unlikely(hc.rep)) { in kvm_hv_hypercall()
2226 if (unlikely(hc.rep)) { in kvm_hv_hypercall()
2230 ret = kvm_hvcall_signal_event(vcpu, &hc); in kvm_hv_hypercall()
2236 if (unlikely(hc.rep || !to_hv_synic(vcpu)->active)) { in kvm_hv_hypercall()
2242 vcpu->run->hyperv.u.hcall.input = hc.param; in kvm_hv_hypercall()
2243 vcpu->run->hyperv.u.hcall.params[0] = hc.ingpa; in kvm_hv_hypercall()
2244 vcpu->run->hyperv.u.hcall.params[1] = hc.outgpa; in kvm_hv_hypercall()
2249 if (unlikely(!hc.rep_cnt || hc.rep_idx)) { in kvm_hv_hypercall()
2253 ret = kvm_hv_flush_tlb(vcpu, &hc, false); in kvm_hv_hypercall()
2256 if (unlikely(hc.rep)) { in kvm_hv_hypercall()
2260 ret = kvm_hv_flush_tlb(vcpu, &hc, false); in kvm_hv_hypercall()
2263 if (unlikely(!hc.rep_cnt || hc.rep_idx)) { in kvm_hv_hypercall()
2267 ret = kvm_hv_flush_tlb(vcpu, &hc, true); in kvm_hv_hypercall()
2270 if (unlikely(hc.rep)) { in kvm_hv_hypercall()
2274 ret = kvm_hv_flush_tlb(vcpu, &hc, true); in kvm_hv_hypercall()
2277 if (unlikely(hc.rep)) { in kvm_hv_hypercall()
2281 ret = kvm_hv_send_ipi(vcpu, &hc, false); in kvm_hv_hypercall()
2284 if (unlikely(hc.fast || hc.rep)) { in kvm_hv_hypercall()
2288 ret = kvm_hv_send_ipi(vcpu, &hc, true); in kvm_hv_hypercall()
2292 if (unlikely(hc.fast)) { in kvm_hv_hypercall()
2311 vcpu->run->hyperv.u.hcall.input = hc.param; in kvm_hv_hypercall()
2312 vcpu->run->hyperv.u.hcall.params[0] = hc.ingpa; in kvm_hv_hypercall()
2313 vcpu->run->hyperv.u.hcall.params[1] = hc.outgpa; in kvm_hv_hypercall()