Home
last modified time | relevance | path

Searched refs:v (Results 1 – 25 of 393) sorted by relevance

12345678910>>...16

/xen/xen/include/asm-x86/hvm/
A Dhvm.h755 #define is_viridian_vcpu(v) ((void)(v), false) argument
758 #define hvm_get_guest_time(v) ((void)(v), 0) argument
764 #define hvm_paging_enabled(v) ((void)(v), false) argument
765 #define hvm_wp_enabled(v) ((void)(v), false) argument
766 #define hvm_pcid_enabled(v) ((void)(v), false) argument
767 #define hvm_pae_enabled(v) ((void)(v), false) argument
768 #define hvm_smep_enabled(v) ((void)(v), false) argument
769 #define hvm_smap_enabled(v) ((void)(v), false) argument
770 #define hvm_nx_enabled(v) ((void)(v), false) argument
771 #define hvm_pku_enabled(v) ((void)(v), false) argument
[all …]
A Dnestedhvm.h45 void nestedhvm_vcpu_destroy(struct vcpu *v);
46 void nestedhvm_vcpu_reset(struct vcpu *v);
48 #define nestedhvm_vcpu_enter_guestmode(v) \ argument
49 vcpu_nestedhvm(v).nv_guestmode = 1
50 #define nestedhvm_vcpu_exit_guestmode(v) \ argument
51 vcpu_nestedhvm(v).nv_guestmode = 0
72 #define nestedhvm_paging_mode_hap(v) (!!nhvm_vmcx_hap_enabled(v)) argument
80 if ( !nestedhvm_enabled(v->domain) || in nestedhvm_is_n2()
82 !nestedhvm_paging_mode_hap(v) ) in nestedhvm_is_n2()
85 return nestedhvm_vcpu_in_guestmode(v); in nestedhvm_is_n2()
[all …]
/xen/xen/arch/x86/
A Di387.c35 xrstor(v, mask); in fpu_xrstor()
43 const typeof(v->arch.xsave_area->fpu_sse) *fpu_ctxt = v->arch.fpu_ctxt; in fpu_fxrstor()
148 xsave(v, mask); in fpu_xsave()
156 typeof(v->arch.xsave_area->fpu_sse) *fpu_ctxt = v->arch.fpu_ctxt; in fpu_fxsave()
210 if ( !v->arch.fully_eager_fpu && !v->arch.nonlazy_xstate_used ) in vcpu_restore_fpu_nonlazy()
223 if ( v->arch.fully_eager_fpu || (v->arch.xsave_area && xstate_all(v)) ) in vcpu_restore_fpu_nonlazy()
234 need_stts = is_pv_vcpu(v) && (v->arch.pv.ctrlreg[0] & X86_CR0_TS); in vcpu_restore_fpu_nonlazy()
277 if ( !v->fpu_dirtied && !v->arch.nonlazy_xstate_used ) in _vcpu_save_fpu()
286 fpu_xsave(v); in _vcpu_save_fpu()
318 v->arch.fpu_ctxt = &v->arch.xsave_area->fpu_sse; in vcpu_init_fpu()
[all …]
A Ddomain.c179 cpumask_set_cpu(v->processor, v->domain->dirty_cpumask); in startup_cpu_idle_loop()
180 write_atomic(&v->dirty_cpu, v->processor); in startup_cpu_idle_loop()
350 return v; in alloc_vcpu_struct()
361 memset(&v->arch.user_regs, 0, sizeof(v->arch.user_regs)); in arch_vcpu_regs_init()
364 memset(v->arch.dr, 0, sizeof(v->arch.dr)); in arch_vcpu_regs_init()
899 vcpu_setup_fpu(v, v->arch.xsave_area, in arch_set_info_guest()
940 v->arch.pv.iopl = v->arch.user_regs.eflags & X86_EFLAGS_IOPL; in arch_set_info_guest()
956 v->arch.pv.ldt_base = v->arch.pv.ldt_ents in arch_set_info_guest()
1019 v->arch.pv.ctrlreg[4] = pv_fixup_guest_cr4(v, v->arch.pv.ctrlreg[4]); in arch_set_info_guest()
1021 memset(v->arch.dr, 0, sizeof(v->arch.dr)); in arch_set_info_guest()
[all …]
/xen/xen/arch/x86/hvm/vmx/
A Dvvmx.c203 struct vcpu *v; in nvmx_domain_relinquish_resources() local
1113 hvm_set_guest_pat(v, get_vvmcs(v, GUEST_PAT)); in load_shadow_guest_state()
1122 hvm_set_tsc_offset(v, v->arch.hvm.cache_tsc_offset, 0); in load_shadow_guest_state()
1236 if ( nvmx_ept_enabled(v) && hvm_pae_enabled(v) && in virtual_vmentry()
1284 set_vvmcs(v, GUEST_CR4, v->arch.hvm.guest_cr[4]); in sync_vvmcs_guest_state()
1327 hvm_set_guest_pat(v, get_vvmcs(v, HOST_PAT)); in load_vvmcs_host_state()
1336 hvm_set_tsc_offset(v, v->arch.hvm.cache_tsc_offset, 0); in load_vvmcs_host_state()
1435 sync_vvmcs_ro(v); in virtual_vmexit()
1439 if ( nvmx_ept_enabled(v) && hvm_pae_enabled(v) && in virtual_vmexit()
2360 vmx_vmcs_enter(v); in nvmx_hap_walk_L1_p2m()
[all …]
A Dvmx.c321 struct vcpu *v; in vmx_pi_hooks_assign() local
358 struct vcpu *v; in vmx_pi_hooks_deassign() local
532 nvmx_update_exec_control(v, v->arch.hvm.vmx.exec_control); in vmx_update_cpu_exec_control()
878 vmx_save_dr(v); in vmx_ctxt_switch_from()
1488 v->arch.hvm.hw_cr[4] |= v->arch.hvm.guest_cr[4]; in vmx_update_guest_cr()
1555 v->arch.hvm.vmx.cr4_host_mask |= get_vvmcs(v, in vmx_update_guest_cr()
2059 vcpu_kick(v); in vmx_deliver_posted_intr()
3684 v->arch.hvm.guest_cr[4] &= v->arch.hvm.vmx.cr4_host_mask; in vmx_vmexit_handler()
3685 v->arch.hvm.guest_cr[4] |= (v->arch.hvm.hw_cr[4] & in vmx_vmexit_handler()
3689 if ( vmx_unrestricted_guest(v) || hvm_paging_enabled(v) ) in vmx_vmexit_handler()
[all …]
A Dintr.c113 vmx_update_cpu_exec_control(v); in vmx_enable_intr_window()
155 if ( nestedhvm_vcpu_in_guestmode(v) ) in nvmx_intr_blocked()
190 if ( nestedhvm_vcpu_in_guestmode(v) ) in nvmx_intr_intercept()
206 pt_intr_post(v, intack); in nvmx_intr_intercept()
242 struct vcpu *v = current; in vmx_intr_assist() local
251 vmx_update_cpu_exec_control(v); in vmx_intr_assist()
256 if ( unlikely(v->arch.vm_event) && v->arch.vm_event->sync_event ) in vmx_intr_assist()
266 pt_vector = pt_update_irq(v); in vmx_intr_assist()
399 vmx_sync_exit_bitmap(v); in vmx_intr_assist()
401 pt_intr_post(v, intack); in vmx_intr_assist()
[all …]
A Dvmcs.c791 struct vcpu *v; member
809 if ( fv->v == v ) in vmx_vmcs_try_enter()
824 fv->v = v; in vmx_vmcs_try_enter()
847 BUG_ON(fv->v != v); in vmx_vmcs_exit()
1374 ASSERT(v == current || !vcpu_runnable(v)); in vmx_find_msr()
1415 ASSERT(v == current || !vcpu_runnable(v)); in vmx_add_msr()
1526 ASSERT(v == current || !vcpu_runnable(v)); in vmx_del_msr()
1608 v->arch.hvm.vmx.pml_pg = v->domain->arch.paging.alloc_page(v->domain); in vmx_vcpu_enable_pml()
1643 v->domain->arch.paging.free_page(v->domain, v->arch.hvm.vmx.pml_pg); in vmx_vcpu_disable_pml()
1652 ASSERT((v == current) || (!vcpu_runnable(v) && !v->is_running)); in vmx_vcpu_flush_pml_buffer()
[all …]
/xen/xen/arch/arm/
A Dvtimer.c49 vgic_inject_irq(t->v->domain, t->v, t->irq, true); in phys_timer_expired()
59 vgic_inject_irq(t->v->domain, t->v, t->irq, true); in virt_timer_expired()
113 t->v = v; in vcpu_vtimer_init()
115 t = &v->arch.virt_timer; in vcpu_vtimer_init()
121 t->v = v; in vcpu_vtimer_init()
147 set_timer(&v->arch.virt_timer.timer, ticks_to_ns(v->arch.virt_timer.cval + in virt_timer_save()
157 migrate_timer(&v->arch.virt_timer.timer, v->processor); in virt_timer_restore()
158 migrate_timer(&v->arch.phys_timer.timer, v->processor); in virt_timer_restore()
366 vgic_inject_irq(v->domain, v, vtimer->irq, level); in vtimer_update_irq()
391 vtimer_update_irq(v, &v->arch.virt_timer, in vtimer_update_irqs()
[all …]
/xen/xen/arch/x86/hvm/
A Dvpt.c208 v->arch.hvm.guest_time = hvm_get_guest_time(v); in pt_freeze_time()
219 hvm_set_guest_time(v, v->arch.hvm.guest_time); in pt_thaw_time()
231 pt_vcpu_lock(v); in pt_save_timer()
247 pt_vcpu_lock(v); in pt_restore_timer()
258 pt_thaw_time(v); in pt_restore_timer()
321 pt_vcpu_lock(v); in pt_update_irq()
453 pt_vcpu_lock(v); in pt_intr_post()
478 pt_vcpu_lock(v); in pt_migrate()
518 pt->vcpu = v; in create_periodic_time()
583 pt->vcpu = v; in pt_adjust_vcpu()
[all …]
A Dvm_event.c31 ASSERT(v == current); in hvm_vm_event_set_registers()
33 if ( unlikely(v->arch.vm_event->set_gprs) ) in hvm_vm_event_set_registers()
46 regs->r8 = v->arch.vm_event->gprs.r8; in hvm_vm_event_set_registers()
47 regs->r9 = v->arch.vm_event->gprs.r9; in hvm_vm_event_set_registers()
58 v->arch.vm_event->set_gprs = false; in hvm_vm_event_set_registers()
62 void hvm_vm_event_do_resume(struct vcpu *v) in hvm_vm_event_do_resume() argument
66 ASSERT(v->arch.vm_event); in hvm_vm_event_do_resume()
68 hvm_vm_event_set_registers(v); in hvm_vm_event_do_resume()
70 w = &v->arch.vm_event->write_data; in hvm_vm_event_do_resume()
93 v->arch.vm_event->emulate_flags = 0; in hvm_vm_event_do_resume()
[all …]
/xen/xen/include/asm-x86/
A Datomic.h92 return read_atomic(&v->counter); in atomic_read()
95 static inline int _atomic_read(atomic_t v) in _atomic_read() argument
97 return v.counter; in _atomic_read()
102 write_atomic(&v->counter, i); in atomic_set()
107 v->counter = i; in _atomic_set()
112 return cmpxchg(&v->counter, old, new); in atomic_cmpxchg()
154 static inline void atomic_inc(atomic_t *v) in atomic_inc() argument
164 return atomic_add_return(1, v); in atomic_inc_return()
190 return atomic_sub_return(1, v); in atomic_dec_return()
223 c = atomic_read(v); in atomic_add_unless()
[all …]
A Dpaging.h236 #define paging_get_hostmode(v) ((v)->arch.paging.mode) argument
237 #define paging_get_nestedmode(v) ((v)->arch.paging.nestedmode) argument
253 struct vcpu *v = current; in paging_fault() local
254 return paging_get_hostmode(v)->page_fault(v, va, regs); in paging_fault()
289 return paging_get_hostmode(v)->p2m_ga_to_gfn(v, p2m, cr3, ga, pfec, in paging_ga_to_gfn_cr3()
298 paging_get_hostmode(v)->update_cr3(v, 1, noflush); in paging_update_cr3()
306 paging_get_hostmode(v)->update_paging_modes(v); in paging_update_paging_modes()
320 if ( unlikely(paging_mode_shadow(v->domain)) && paging_get_hostmode(v) ) in paging_write_guest_entry()
321 return paging_get_hostmode(v)->shadow.write_guest_entry(v, p, new, in paging_write_guest_entry()
338 if ( unlikely(paging_mode_shadow(v->domain)) && paging_get_hostmode(v) ) in paging_cmpxchg_guest_entry()
[all …]
/xen/xen/arch/x86/pv/
A Ddomain.c139 v->arch.guest_table_user = v->arch.guest_table; in setup_compat_l4()
223 struct vcpu *v; in switch_compat() local
238 for_each_vcpu( d, v ) in switch_compat()
268 return create_perdomain_mapping(v->domain, GDT_VIRT_START(v), in pv_create_gdt_ldt_l1tab()
276 destroy_perdomain_mapping(v->domain, GDT_VIRT_START(v), in pv_destroy_gdt_ldt_l1tab()
315 v->arch.pv.ctrlreg[4] = pv_fixup_guest_cr4(v, 0); in pv_vcpu_initialise()
418 update_cr3(v); in _toggle_guest_pt()
428 cr3 = v->arch.cr3; in _toggle_guest_pt()
436 if ( v->arch.pv.need_update_runstate_area && update_runstate_area(v) ) in _toggle_guest_pt()
440 update_secondary_system_time(v, &v->arch.pv.pending_system_time) ) in _toggle_guest_pt()
[all …]
/xen/xen/arch/x86/mm/
A Daltp2m.c26 if ( v != current ) in altp2m_vcpu_initialise()
27 vcpu_pause(v); in altp2m_vcpu_initialise()
29 vcpu_altp2m(v).p2midx = 0; in altp2m_vcpu_initialise()
32 altp2m_vcpu_update_p2m(v); in altp2m_vcpu_initialise()
34 if ( v != current ) in altp2m_vcpu_initialise()
35 vcpu_unpause(v); in altp2m_vcpu_initialise()
43 if ( v != current ) in altp2m_vcpu_destroy()
44 vcpu_pause(v); in altp2m_vcpu_destroy()
49 altp2m_vcpu_disable_ve(v); in altp2m_vcpu_destroy()
54 if ( v != current ) in altp2m_vcpu_destroy()
[all …]
/xen/xen/common/
A Ddomain.c87 struct vcpu *v; in __domain_finalise_shutdown() local
127 v->vcpu_info = ((v->vcpu_id < XEN_LEGACY_MAX_VCPUS) in vcpu_info_reset()
140 struct vcpu *v; in vcpu_create() local
159 v->domain = d; in vcpu_create()
203 return v; in vcpu_create()
209 vcpu_destroy(v); in vcpu_create()
703 struct vcpu *v; in domain_kill() local
998 vcpu_pause(v); in vcpu_pause_by_systemcontroller()
1196 vcpu_pause(v); in vcpu_reset()
1256 if ( (v != current) && !(v->pause_flags & VPF_down) ) in map_vcpu_info()
[all …]
/xen/xen/arch/x86/hvm/svm/
A Dsvm.c170 struct vcpu *v; in svm_enable_msr_interception() local
523 ? &vcpu_nestedhvm(v).nv_n2asid : &v->arch.hvm.n1asid); in svm_update_guest_cr()
643 ASSERT((v == current) || !vcpu_runnable(v)); in svm_get_segment_register()
688 ASSERT((v == current) || !vcpu_runnable(v)); in svm_set_segment_register()
952 svm_save_dr(v); in svm_ctxt_switch_from()
980 svm_restore_dr(v); in svm_ctxt_switch_to()
1000 if ( nestedhvm_enabled(v->domain) && nestedhvm_vcpu_in_guestmode(v) ) in svm_do_resume()
1034 hvm_do_resume(v); in svm_do_resume()
1353 eventinj.v = true; in svm_inject_event()
1431 return v->arch.hvm.svm.vmcb->event_inj.v; in svm_event_pending()
[all …]
A Dintr.c42 static void svm_inject_nmi(struct vcpu *v) in svm_inject_nmi() argument
49 event.v = true; in svm_inject_nmi()
53 ASSERT(!vmcb->event_inj.v); in svm_inject_nmi()
70 event.v = true; in svm_inject_extint()
74 ASSERT(!vmcb->event_inj.v); in svm_inject_extint()
136 struct vcpu *v = current; in svm_intr_assist() local
142 if ( unlikely(v->arch.vm_event) && v->arch.vm_event->sync_event ) in svm_intr_assist()
146 pt_update_irq(v); in svm_intr_assist()
162 if ( nestedhvm_enabled(v->domain) && nestedhvm_vcpu_in_guestmode(v) ) in svm_intr_assist()
211 svm_inject_nmi(v); in svm_intr_assist()
[all …]
A Dnestedsvm.c38 vcpu_nestedsvm(v).ns_gif = 0; in nestedsvm_vcpu_clgi()
46 vcpu_nestedsvm(v).ns_gif = 1; in nestedsvm_vcpu_stgi()
71 ASSERT(vvmcx_valid(v)); in nestedsvm_vmcb_map()
124 nsvm_vcpu_destroy(v); in nsvm_vcpu_initialise()
190 nestedsvm_vcpu_stgi(v); in nsvm_vcpu_reset()
308 hvm_update_guest_cr(v, 2); in nsvm_vcpu_hostrestore()
410 ASSERT(v != NULL); in nestedsvm_vmcb_set_nestedp2m()
733 nestedsvm_vcpu_stgi(v); in nsvm_vcpu_vmentry()
1505 nv = &vcpu_nestedhvm(v); in nsvm_vcpu_switch()
1618 nestedsvm_vcpu_stgi(v); in svm_vmexit_do_stgi()
[all …]
/xen/xen/include/asm-arm/arm32/
A Datomic.h24 prefetchw(&v->counter); in atomic_add()
32 : "r" (&v->counter), "Ir" (i) in atomic_add()
42 prefetchw(&v->counter); in atomic_add_return()
51 : "r" (&v->counter), "Ir" (i) in atomic_add_return()
64 prefetchw(&v->counter); in atomic_sub()
72 : "r" (&v->counter), "Ir" (i) in atomic_sub()
82 prefetchw(&v->counter); in atomic_sub_return()
91 : "r" (&v->counter), "Ir" (i) in atomic_sub_return()
104 prefetchw(&v->counter); in atomic_and()
112 : "r" (&v->counter), "Ir" (m) in atomic_and()
[all …]
/xen/xen/arch/x86/cpu/mcheck/
A Dvmce.c55 void vmce_init_vcpu(struct vcpu *v) in vmce_init_vcpu() argument
65 v->arch.vmce.mcg_status = 0; in vmce_init_vcpu()
117 v, bank, *val); in bank_mce_rdmsr()
371 struct vcpu *v; in vmce_load_vcpu_ctxt() local
402 struct vcpu *v; in inject_vmce() local
405 for_each_vcpu ( d, v ) in inject_vmce()
419 vcpu_kick(v); in inject_vmce()
492 for_each_vcpu ( d, v ) in fill_vmsr_data()
494 if ( !v->vcpu_id ) in fill_vmsr_data()
548 struct vcpu *v; in vmce_enable_mca_cap() local
[all …]
/xen/xen/include/xen/
A Datomic.h35 static inline int atomic_read(const atomic_t *v);
43 static inline int _atomic_read(atomic_t v);
52 static inline void atomic_set(atomic_t *v, int i);
61 static inline void _atomic_set(atomic_t *v, int i);
93 static inline void atomic_add(int i, atomic_t *v);
111 static inline void atomic_sub(int i, atomic_t *v);
139 static inline void atomic_inc(atomic_t *v);
147 static inline int atomic_inc_return(atomic_t *v);
157 static inline int atomic_inc_and_test(atomic_t *v);
165 static inline void atomic_dec(atomic_t *v);
[all …]
/xen/xen/arch/arm/arm32/
A Dvfp.c6 void vfp_save_state(struct vcpu *v) in vfp_save_state() argument
8 v->arch.vfp.fpexc = READ_CP32(FPEXC); in vfp_save_state()
12 v->arch.vfp.fpscr = READ_CP32(FPSCR); in vfp_save_state()
18 if ( v->arch.vfp.fpexc & FPEXC_FP2V ) in vfp_save_state()
26 : "=Q" (*v->arch.vfp.fpregs1) : "r" (v->arch.vfp.fpregs1)); in vfp_save_state()
33 : "=Q" (*v->arch.vfp.fpregs2) : "r" (v->arch.vfp.fpregs2)); in vfp_save_state()
39 void vfp_restore_state(struct vcpu *v) in vfp_restore_state() argument
46 : : "Q" (*v->arch.vfp.fpregs1), "r" (v->arch.vfp.fpregs1)); in vfp_restore_state()
52 : : "Q" (*v->arch.vfp.fpregs2), "r" (v->arch.vfp.fpregs2)); in vfp_restore_state()
54 if ( v->arch.vfp.fpexc & FPEXC_EX ) in vfp_restore_state()
[all …]
/xen/xen/include/asm-x86/hvm/svm/
A Dnestedsvm.h93 #define vcpu_nestedsvm(v) (vcpu_nestedhvm(v).u.nsvm) argument
96 #define nsvm_efer_svm_enabled(v) \ argument
97 (!!((v)->arch.hvm.guest_efer & EFER_SVME))
100 void nestedsvm_vmexit_defer(struct vcpu *v,
110 void nsvm_vcpu_destroy(struct vcpu *v);
111 int nsvm_vcpu_initialise(struct vcpu *v);
112 int nsvm_vcpu_reset(struct vcpu *v);
115 uint64_t nsvm_vcpu_hostcr3(struct vcpu *v);
118 bool_t nsvm_vmcb_hap_enabled(struct vcpu *v);
119 enum hvm_intblk nsvm_intr_blocked(struct vcpu *v);
[all …]
/xen/xen/common/sched/
A Dcore.c156 return (v && v->new_state == RUNSTATE_running) ? v : idle_vcpu[cpu]; in sched_unit2vcpu_cpu()
245 !test_bit(v->vcpu_id, v->domain->poll_mask) ) in vcpu_urgent_count_update()
285 v->runstate.time[v->runstate.state] += delta; in vcpu_runstate_change()
853 while ( !vcpu_runnable(v) && v->is_running ) in vcpu_sleep_sync()
1466 sched_yield(vcpu_scheduler(v), v->sched_unit); in vcpu_yield()
2063 migrate_timer(&v->periodic_timer, v->processor); in vcpu_periodic_timer_work_locked()
2356 if ( vcpu_runnable(v) == v->is_running ) in sched_force_context_switch()
2374 v->new_state = vcpu_runstate_blocked(v); in sched_force_context_switch()
2375 vcpu_runstate_change(v, v->new_state, now); in sched_force_context_switch()
2433 if ( v && v->force_context_switch ) in sched_wait_rendezvous_in()
[all …]

Completed in 90 milliseconds

12345678910>>...16