Lines Matching refs:curr

93     struct vcpu *curr = current;  in set_context_data()  local
95 if ( curr->arch.vm_event ) in set_context_data()
98 min(size, curr->arch.vm_event->emul.read.size); in set_context_data()
100 memcpy(buffer, curr->arch.vm_event->emul.read.data, safe_size); in set_context_data()
160 struct vcpu *curr = current; in hvmemul_do_io() local
161 struct domain *currd = curr->domain; in hvmemul_do_io()
162 struct hvm_vcpu_io *vio = &curr->arch.hvm.hvm_io; in hvmemul_do_io()
578 struct vcpu *curr = current; in hvmemul_map_linear_addr() local
617 res = hvm_translate_get_page(curr, addr, true, pfec, in hvmemul_map_linear_addr()
670 if ( unlikely(curr->arch.vm_event) && in hvmemul_map_linear_addr()
671 curr->arch.vm_event->send_event && in hvmemul_map_linear_addr()
754 struct vcpu *curr = current; in hvmemul_linear_to_phys() local
765 if ( !(curr->arch.hvm.guest_cr[0] & X86_CR0_PG) ) in hvmemul_linear_to_phys()
785 else if ( (pfn = paging_gva_to_gfn(curr, addr, &pfec)) == gfn_x(INVALID_GFN) ) in hvmemul_linear_to_phys()
800 npfn = paging_gva_to_gfn(curr, addr, &pfec); in hvmemul_linear_to_phys()
1599 struct vcpu *curr = current; in hvmemul_cmpxchg() local
1602 struct hvm_vcpu_io *vio = &curr->arch.hvm.hvm_io; in hvmemul_cmpxchg()
1825 struct vcpu *curr = current; in hvmemul_rep_movs() local
1826 struct hvm_vcpu_io *vio = &curr->arch.hvm.hvm_io; in hvmemul_rep_movs()
1886 get_gfn_query_unlocked(curr->domain, sgpa >> PAGE_SHIFT, &sp2mt); in hvmemul_rep_movs()
1887 get_gfn_query_unlocked(curr->domain, dgpa >> PAGE_SHIFT, &dp2mt); in hvmemul_rep_movs()
1945 unsigned int token = hvmemul_cache_disable(curr); in hvmemul_rep_movs()
1953 hvmemul_cache_restore(curr, token); in hvmemul_rep_movs()
1957 rc = hvm_copy_to_guest_phys(dgpa, buf, bytes, curr); in hvmemul_rep_movs()
1994 struct vcpu *curr = current; in hvmemul_rep_stos() local
1995 struct hvm_vcpu_io *vio = &curr->arch.hvm.hvm_io; in hvmemul_rep_stos()
2028 get_gfn_query_unlocked(curr->domain, gpa >> PAGE_SHIFT, &p2mt); in hvmemul_rep_stos()
2077 rc = hvm_copy_to_guest_phys(gpa, buf, bytes, curr); in hvmemul_rep_stos()
2362 struct vcpu *curr = current; in hvmemul_get_fpu() local
2364 if ( !curr->fpu_dirtied ) in hvmemul_get_fpu()
2368 const typeof(curr->arch.xsave_area->fpu_sse) *fpu_ctxt = in hvmemul_get_fpu()
2369 curr->arch.fpu_ctxt; in hvmemul_get_fpu()
2383 curr->fpu_initialised = true; in hvmemul_get_fpu()
2384 curr->fpu_dirtied = true; in hvmemul_get_fpu()
2405 struct vcpu *curr = current; in hvmemul_put_fpu() local
2409 typeof(curr->arch.xsave_area->fpu_sse) *fpu_ctxt = curr->arch.fpu_ctxt; in hvmemul_put_fpu()
2411 int mode = hvm_guest_x86_mode(curr); in hvmemul_put_fpu()
2476 if ( curr->arch.fully_eager_fpu ) in hvmemul_put_fpu()
2477 vcpu_restore_fpu_nonlazy(curr, false); in hvmemul_put_fpu()
2480 curr->fpu_dirtied = false; in hvmemul_put_fpu()
2482 alternative_vcall(hvm_funcs.fpu_leave, curr); in hvmemul_put_fpu()
2625 struct vcpu *curr = current; in _hvm_emulate_one() local
2627 struct hvm_vcpu_io *vio = &curr->arch.hvm.hvm_io; in _hvm_emulate_one()
2666 hvmemul_cache_disable(curr); in _hvm_emulate_one()
2705 curr, new_intr_shadow); in _hvm_emulate_one()
2709 !hvm_local_events_need_delivery(curr) ) in _hvm_emulate_one()
2788 struct vcpu *curr = current; in hvm_emulate_one_vm_event() local
2789 struct hvm_vcpu_io *vio = &curr->arch.hvm.hvm_io; in hvm_emulate_one_vm_event()
2792 sizeof(curr->arch.vm_event->emul.insn.data)); in hvm_emulate_one_vm_event()
2800 memcpy(vio->mmio_insn, curr->arch.vm_event->emul.insn.data, in hvm_emulate_one_vm_event()
2840 struct vcpu *curr = current; in hvm_emulate_init_once() local
2845 alternative_call(hvm_funcs.get_interrupt_shadow, curr); in hvm_emulate_init_once()
2851 hvmemul_ctxt->ctxt.cpuid = curr->domain->arch.cpuid; in hvm_emulate_init_once()
2860 struct vcpu *curr = current; in hvm_emulate_init_per_insn() local
2862 hvmemul_ctxt->ctxt.lma = hvm_long_mode_active(curr); in hvm_emulate_init_per_insn()
2883 hvm_get_insn_bytes(curr, hvmemul_ctxt->insn_buf)) ) in hvm_emulate_init_per_insn()
2959 struct vcpu *curr = current; in hvm_dump_emulation_state() local
2960 const char *mode_str = guest_x86_mode_to_str(hvm_guest_x86_mode(curr)); in hvm_dump_emulation_state()
2965 loglvl, prefix, rc, curr, mode_str, cs->sel, in hvm_dump_emulation_state()