Searched refs:cpus_ptr (Results 1 – 15 of 15) sorted by relevance
/linux/samples/trace_events/ |
A D | trace-events-sample.c | 37 current->cpus_ptr); in simple_thread_func()
|
/linux/arch/mips/kernel/ |
A D | mips-mt-fpaff.c | 180 cpumask_or(&allowed, &p->thread.user_cpus_allowed, p->cpus_ptr); in mipsmt_sys_sched_getaffinity()
|
/linux/init/ |
A D | init_task.c | 82 .cpus_ptr = &init_task.cpus_mask,
|
/linux/kernel/sched/ |
A D | fair.c | 1578 !cpumask_test_cpu(cpu, env->p->cpus_ptr)) in update_numa_stats() 1610 !cpumask_test_cpu(cpu, env->p->cpus_ptr)) { in task_numa_assign() 1723 if (!cpumask_test_cpu(env->src_cpu, cur->cpus_ptr)) in task_numa_compare() 1913 if (!cpumask_test_cpu(cpu, env->p->cpus_ptr)) in task_numa_find_cpu() 6200 if (!cpumask_test_cpu(cpu, p->cpus_ptr) || in select_idle_smt() 6251 cpumask_and(cpus, sched_domain_span(sd), p->cpus_ptr); in select_idle_cpu() 6327 cpumask_and(cpus, sched_domain_span(sd), p->cpus_ptr); in select_idle_capacity() 6787 if (!cpumask_test_cpu(cpu, p->cpus_ptr)) in find_energy_efficient_cpu() 7770 if (!cpumask_test_cpu(env->dst_cpu, p->cpus_ptr)) { in can_migrate_task() 7793 if (cpumask_test_cpu(cpu, p->cpus_ptr)) { in can_migrate_task() [all …]
|
A D | core.c | 2142 if (p->cpus_ptr != &p->cpus_mask) in migrate_disable_switch() 2181 if (p->cpus_ptr != &p->cpus_mask) in migrate_enable() 2207 if (!cpumask_test_cpu(cpu, p->cpus_ptr)) in is_cpu_allowed() 2387 if (cpumask_test_cpu(task_cpu(p), p->cpus_ptr)) { in migration_cpu_stop() 2465 p->cpus_ptr = new_mask; in set_cpus_allowed_common() 2924 cpumask_copy(user_mask, p->cpus_ptr); in restrict_cpus_allowed_ptr() 3170 if (!cpumask_test_cpu(arg.dst_cpu, arg.src_task->cpus_ptr)) in migrate_swap() 3363 for_each_cpu(dest_cpu, p->cpus_ptr) { in select_fallback_rq() 3421 cpu = cpumask_any(p->cpus_ptr); in select_task_rq() 7364 if (!cpumask_subset(span, p->cpus_ptr) || in __sched_setscheduler() [all …]
|
A D | sched.h | 1241 for_each_cpu_and(cpu, sched_group_span(group), p->cpus_ptr) { in sched_group_cookie_match()
|
A D | deadline.c | 607 cpu = cpumask_any_and(cpu_active_mask, p->cpus_ptr); in dl_task_offline_migration()
|
/linux/kernel/trace/ |
A D | trace_hwlat.c | 325 if (!cpumask_equal(current_mask, current->cpus_ptr)) in move_to_next_cpu()
|
/linux/arch/x86/kernel/cpu/resctrl/ |
A D | pseudo_lock.c | 1508 if (!cpumask_subset(current->cpus_ptr, &plr->d->cpu_mask)) { in pseudo_lock_dev_mmap()
|
/linux/drivers/infiniband/hw/hfi1/ |
A D | affinity.c | 1001 *proc_mask = current->cpus_ptr; in hfi1_get_proc_affinity()
|
/linux/arch/powerpc/platforms/cell/spufs/ |
A D | sched.c | 131 cpumask_copy(&ctx->cpus_allowed, current->cpus_ptr); in __spu_update_sched_info()
|
/linux/kernel/ |
A D | fork.c | 936 if (orig->cpus_ptr == &orig->cpus_mask) in dup_task_struct() 937 tsk->cpus_ptr = &tsk->cpus_mask; in dup_task_struct()
|
/linux/include/linux/ |
A D | sched.h | 815 const cpumask_t *cpus_ptr; member
|
/linux/drivers/infiniband/hw/qib/ |
A D | qib_file_ops.c | 1626 const unsigned int cpu = cpumask_first(current->cpus_ptr); in qib_assign_ctxt()
|
/linux/kernel/cgroup/ |
A D | cpuset.c | 2956 set_cpus_allowed_ptr(task, current->cpus_ptr); in cpuset_fork()
|
Completed in 74 milliseconds