Searched refs:schedule_lock (Results 1 – 6 of 6) sorted by relevance
46 spinlock_t *schedule_lock, member214 spinlock_t *lock = get_sched_res(cpu)->schedule_lock; \224 if ( likely(lock == get_sched_res(cpu)->schedule_lock) ) \234 ASSERT(lock == get_sched_res(cpu)->schedule_lock); \263 spinlock_t *lock = get_sched_res(cpu)->schedule_lock; in sched_lock()267 if ( lock == get_sched_res(cpu)->schedule_lock ) in sched_lock()
268 ASSERT(spin_is_locked(get_sched_res(v->processor)->schedule_lock)); in vcpu_runstate_change()812 ASSERT(spin_is_locked(get_sched_res(v->processor)->schedule_lock)); in vcpu_sleep_nosync_locked()1019 old_lock = get_sched_res(old_cpu)->schedule_lock; in sched_unit_migrate_finish()1020 new_lock = get_sched_res(new_cpu)->schedule_lock; in sched_unit_migrate_finish()1025 if ( old_lock == get_sched_res(old_cpu)->schedule_lock ) in sched_unit_migrate_finish()1032 (new_lock == get_sched_res(new_cpu)->schedule_lock) && in sched_unit_migrate_finish()2698 sr->schedule_lock = &sched_free_cpu_lock; in cpu_schedule_up()3088 sr->schedule_lock = new_lock; in schedule_cpu_add()3154 sr_new[idx]->schedule_lock = sr->schedule_lock; in schedule_cpu_rm()3202 sr_new[idx]->schedule_lock = &sched_free_cpu_lock; in schedule_cpu_rm()[all …]
1430 ASSERT(spin_is_locked(get_sched_res(cpu)->schedule_lock)); in runq_insert()1939 ASSERT(spin_is_locked(get_sched_res(cpu)->schedule_lock)); in unit_grab_budget()1986 ASSERT(spin_is_locked(get_sched_res(cpu)->schedule_lock)); in unit_return_budget()2240 ASSERT(spin_is_locked(get_sched_res(cpu)->schedule_lock)); in csched2_unit_wake()2366 ASSERT(spin_is_locked(get_sched_res(cpu)->schedule_lock)); in csched2_res_pick()2703 ASSERT(spin_is_locked(get_sched_res(cpu)->schedule_lock)); in balance_load()3560 ASSERT(spin_is_locked(get_sched_res(sched_cpu)->schedule_lock)); in csched2_schedule()3999 ASSERT(get_sched_res(cpu)->schedule_lock != &rqd->lock); in csched2_switch_sched()
279 ASSERT(spin_is_locked(get_sched_res(cpu)->schedule_lock)); in pick_res()895 ASSERT(lock != get_sched_res(sched_cpu)->schedule_lock); in null_schedule()
254 ASSERT(spin_is_locked(get_sched_res(cpu)->schedule_lock)); in is_runq_idle()263 ASSERT(spin_is_locked(get_sched_res(cpu)->schedule_lock)); in inc_nr_runnable()271 ASSERT(spin_is_locked(get_sched_res(cpu)->schedule_lock)); in dec_nr_runnable()
732 ASSERT(get_sched_res(cpu)->schedule_lock != &prv->lock); in rt_switch_sched()
Completed in 25 milliseconds