| /linux/arch/sparc/include/asm/ |
| A D | smp_32.h | 71 sparc32_ipi_ops->cross_call(func, *cpu_online_mask, 0, 0, 0, 0); in xc0() 76 sparc32_ipi_ops->cross_call(func, *cpu_online_mask, arg1, 0, 0, 0); in xc1() 80 sparc32_ipi_ops->cross_call(func, *cpu_online_mask, arg1, arg2, 0, 0); in xc2() 86 sparc32_ipi_ops->cross_call(func, *cpu_online_mask, in xc3() 93 sparc32_ipi_ops->cross_call(func, *cpu_online_mask, in xc4()
|
| /linux/drivers/net/wireguard/ |
| A D | queueing.h | 112 !cpumask_test_cpu(cpu, cpu_online_mask))) { in wg_cpumask_choose_online() 113 cpu_index = id % cpumask_weight(cpu_online_mask); in wg_cpumask_choose_online() 114 cpu = cpumask_first(cpu_online_mask); in wg_cpumask_choose_online() 116 cpu = cpumask_next(cpu, cpu_online_mask); in wg_cpumask_choose_online() 133 while (unlikely(!cpumask_test_cpu(cpu, cpu_online_mask))) in wg_cpumask_next_online() 134 cpu = cpumask_next(cpu, cpu_online_mask) % nr_cpumask_bits; in wg_cpumask_next_online() 135 *next = cpumask_next(cpu, cpu_online_mask) % nr_cpumask_bits; in wg_cpumask_next_online()
|
| /linux/arch/ia64/kernel/ |
| A D | msi_ia64.c | 21 int cpu = cpumask_first_and(cpu_mask, cpu_online_mask); in ia64_set_msi_irq_affinity() 58 cpu_online_mask)); in arch_setup_msi_irq() 122 int cpu = cpumask_first_and(mask, cpu_online_mask); in dmar_msi_set_affinity() 159 cpu_online_mask)); in msi_compose_msg()
|
| A D | irq.c | 102 cpu_online_mask) >= nr_cpu_ids) { in migrate_irqs() 108 new_cpu = cpumask_any(cpu_online_mask); in migrate_irqs() 141 time_keeper_id = cpumask_first(cpu_online_mask); in fixup_irqs()
|
| A D | irq_ia64.c | 106 cpumask_and(&mask, &domain, cpu_online_mask); in find_unassigned_vector() 129 cpumask_and(&mask, &domain, cpu_online_mask); in __bind_irq_vector() 166 for_each_cpu_and(cpu, &cfg->domain, cpu_online_mask) in __clear_irq_vector() 308 cpumask_and(&cleanup_mask, &cfg->old_domain, cpu_online_mask); in irq_complete_move()
|
| /linux/kernel/irq/ |
| A D | cpuhotplug.c | 40 cpumask_any_and(m, cpu_online_mask) >= nr_cpu_ids) { in irq_needs_fixup() 113 if (cpumask_any_and(affinity, cpu_online_mask) >= nr_cpu_ids) { in migrate_one_irq() 123 affinity = cpu_online_mask; in migrate_one_irq()
|
| A D | migration.c | 29 if (cpumask_any_and(desc->pending_mask, cpu_online_mask) >= nr_cpu_ids) { in irq_fixup_move_pending() 77 if (cpumask_any_and(desc->pending_mask, cpu_online_mask) < nr_cpu_ids) { in irq_move_masked_irq()
|
| /linux/arch/x86/xen/ |
| A D | smp.c | 140 for_each_cpu_and(cpu, mask, cpu_online_mask) in __xen_send_IPI_mask() 211 __xen_send_IPI_mask(cpu_online_mask, xen_vector); in xen_send_IPI_all() 232 for_each_cpu_and(cpu, mask, cpu_online_mask) { in xen_send_IPI_mask_allbutself() 242 xen_send_IPI_mask_allbutself(cpu_online_mask, vector); in xen_send_IPI_allbutself()
|
| /linux/include/asm-generic/ |
| A D | topology.h | 49 #define cpumask_of_node(node) ((node) == 0 ? cpu_online_mask : cpu_none_mask) 51 #define cpumask_of_node(node) ((void)(node), cpu_online_mask)
|
| /linux/lib/ |
| A D | cpumask.c | 214 for_each_cpu(cpu, cpu_online_mask) in cpumask_local_spread() 219 for_each_cpu_and(cpu, cpumask_of_node(node), cpu_online_mask) in cpumask_local_spread() 223 for_each_cpu(cpu, cpu_online_mask) { in cpumask_local_spread()
|
| /linux/arch/x86/kernel/apic/ |
| A D | ipi.c | 57 apic->send_IPI_mask_allbutself(cpu_online_mask, vector); in apic_send_IPI_allbutself() 84 if (!cpumask_or_equal(mask, cpumask_of(cpu), cpu_online_mask)) in native_send_call_func_ipi() 299 WARN_ON(mask & ~cpumask_bits(cpu_online_mask)[0]); in default_send_IPI_mask_logical()
|
| A D | bigsmp_32.c | 78 default_send_IPI_mask_allbutself_phys(cpu_online_mask, vector); in bigsmp_send_IPI_allbutself() 83 default_send_IPI_mask_sequence_phys(cpu_online_mask, vector); in bigsmp_send_IPI_all()
|
| /linux/arch/arm/mach-omap2/ |
| A D | cpuidle44xx.c | 133 if (dev->cpu == 0 && cpumask_test_cpu(1, cpu_online_mask)) { in omap_enter_idle_coupled() 191 if (dev->cpu == 0 && cpumask_test_cpu(1, cpu_online_mask)) { in omap_enter_idle_coupled() 326 return cpuidle_register(idle_driver, cpu_online_mask); in omap4_idle_init()
|
| /linux/arch/powerpc/include/asm/ |
| A D | cputhreads.h | 55 cpu = cpumask_next_and(-1, &tmp, cpu_online_mask); in cpu_thread_mask_to_cores() 70 return cpu_thread_mask_to_cores(cpu_online_mask); in cpu_online_cores_map()
|
| /linux/drivers/powercap/ |
| A D | dtpm_cpu.c | 50 cpumask_and(&cpus, cpu_online_mask, to_cpumask(pd->cpus)); in set_pd_power_limit() 76 for_each_cpu_and(cpu, pd_mask, cpu_online_mask) { in scale_pd_power_uw() 136 cpumask_and(&cpus, cpu_online_mask, to_cpumask(em->cpus)); in update_pd_power_uw()
|
| /linux/include/linux/ |
| A D | nmi.h | 146 arch_trigger_cpumask_backtrace(cpu_online_mask, false); in trigger_all_cpu_backtrace() 152 arch_trigger_cpumask_backtrace(cpu_online_mask, true); in trigger_allbutself_cpu_backtrace()
|
| A D | smp.h | 71 on_each_cpu_cond_mask(NULL, func, info, wait, cpu_online_mask); in on_each_cpu() 105 on_each_cpu_cond_mask(cond_func, func, info, wait, cpu_online_mask); in on_each_cpu_cond()
|
| /linux/kernel/time/ |
| A D | clocksource.c | 249 cpumask_copy(&cpus_chosen, cpu_online_mask); in clocksource_verify_choose_cpus() 260 cpu = cpumask_next(-1, cpu_online_mask); in clocksource_verify_choose_cpus() 262 cpu = cpumask_next(cpu, cpu_online_mask); in clocksource_verify_choose_cpus() 280 cpu = cpumask_next(cpu - 1, cpu_online_mask); in clocksource_verify_choose_cpus() 282 cpu = cpumask_next(-1, cpu_online_mask); in clocksource_verify_choose_cpus() 468 next_cpu = cpumask_next(raw_smp_processor_id(), cpu_online_mask); in clocksource_watchdog() 470 next_cpu = cpumask_first(cpu_online_mask); in clocksource_watchdog() 490 add_timer_on(&watchdog_timer, cpumask_first(cpu_online_mask)); in clocksource_start_watchdog()
|
| /linux/arch/s390/kernel/ |
| A D | processor.c | 346 unsigned long first = cpumask_first(cpu_online_mask); in show_cpuinfo() 360 *pos = cpumask_next(*pos - 1, cpu_online_mask); in c_update() 362 *pos = cpumask_first(cpu_online_mask); in c_update()
|
| /linux/arch/powerpc/kernel/ |
| A D | irq.c | 840 if (cpumask_equal(mask, cpu_online_mask)) { in irq_choose_cpu() 849 irq_rover = cpumask_next(irq_rover, cpu_online_mask); in irq_choose_cpu() 851 irq_rover = cpumask_first(cpu_online_mask); in irq_choose_cpu() 857 cpuid = cpumask_first_and(mask, cpu_online_mask); in irq_choose_cpu()
|
| A D | rtasd.c | 435 cpu = cpumask_next(raw_smp_processor_id(), cpu_online_mask); in rtas_event_scan() 437 cpu = cpumask_first(cpu_online_mask); in rtas_event_scan() 490 schedule_delayed_work_on(cpumask_first(cpu_online_mask), in start_event_scan()
|
| /linux/arch/riscv/kernel/ |
| A D | smp.c | 236 cpumask_copy(&mask, cpu_online_mask); in smp_send_stop() 251 cpumask_pr_args(cpu_online_mask)); in smp_send_stop()
|
| /linux/drivers/cpufreq/ |
| A D | speedstep-ich.c | 263 policy_cpu = cpumask_any_and(policy->cpus, cpu_online_mask); in speedstep_target() 298 policy_cpu = cpumask_any_and(policy->cpus, cpu_online_mask); in speedstep_cpu_init()
|
| /linux/crypto/ |
| A D | pcrypt.c | 180 cpumask_weight(cpu_online_mask); in pcrypt_aead_init_tfm() 182 ctx->cb_cpu = cpumask_first(cpu_online_mask); in pcrypt_aead_init_tfm() 184 ctx->cb_cpu = cpumask_next(ctx->cb_cpu, cpu_online_mask); in pcrypt_aead_init_tfm()
|
| /linux/arch/powerpc/platforms/powernv/ |
| A D | subcore.c | 303 cpu_online_mask); in cpu_update_split_mode() 367 cpu_online_mask); in set_subcores_per_core()
|