Searched refs:vcpu_is_preempted (Results 1 – 18 of 18) sorted by relevance
21 #define vcpu_is_preempted vcpu_is_preempted macro22 static inline bool vcpu_is_preempted(int cpu) in vcpu_is_preempted() function
92 #define vcpu_is_preempted vcpu_is_preempted macro93 static inline bool vcpu_is_preempted(int cpu) in vcpu_is_preempted() function
59 #define vcpu_is_preempted vcpu_is_preempted macro60 static inline bool vcpu_is_preempted(long cpu) in vcpu_is_preempted() function
260 struct paravirt_callee_save vcpu_is_preempted; member
613 return PVOP_ALT_CALLEE1(bool, lock.vcpu_is_preempted, cpu, in pv_vcpu_is_preempted()
32 return pv_ops.lock.vcpu_is_preempted.func == in pv_is_native_vcpu_is_preempted()
622 if (vcpu_is_preempted(cpu)) { in kvm_smp_send_call_func_ipi()1080 pv_ops.lock.vcpu_is_preempted = in kvm_spinlock_init()
409 .lock.vcpu_is_preempted =
84 pv_ops.lock.vcpu_is_preempted = PV_CALLEE_SAVE(hv_vcpu_is_preempted); in hv_init_spinlocks()
25 #define vcpu_is_preempted arch_vcpu_is_preempted macro
144 pv_ops.lock.vcpu_is_preempted = PV_CALLEE_SAVE(xen_vcpu_stolen); in xen_init_spinlocks()
144 vcpu_is_preempted(node_cpu(node->prev)))) in osq_lock()
371 vcpu_is_preempted(task_cpu(owner))) { in mutex_spin_on_owner()413 retval = owner->on_cpu && !vcpu_is_preempted(task_cpu(owner)); in mutex_can_spin_on_owner()
1384 vcpu_is_preempted(task_cpu(owner))) { in rtmutex_spin_on_owner()
667 return owner->on_cpu && !vcpu_is_preempted(task_cpu(owner)); in owner_on_cpu()
189 if (!vcpu_is_preempted(tcpu->cpu)) in __diag_time_slice_end_directed()
2159 #ifndef vcpu_is_preempted2160 static inline bool vcpu_is_preempted(int cpu) in vcpu_is_preempted() function
7003 if (vcpu_is_preempted(cpu)) in available_idle_cpu()
Completed in 64 milliseconds