Home
last modified time | relevance | path

Searched refs:ncpus (Results 1 – 25 of 51) sorted by relevance

123

/linux/kernel/irq/
A Daffinity.c103 unsigned ncpus; member
112 return ln->ncpus - rn->ncpus; in ncpus_cmp_func()
143 unsigned ncpus; in alloc_nodes_vectors() local
148 if (!ncpus) in alloc_nodes_vectors()
150 remaining_ncpus += ncpus; in alloc_nodes_vectors()
151 node_vectors[n].ncpus = ncpus; in alloc_nodes_vectors()
228 unsigned nvectors, ncpus; in alloc_nodes_vectors() local
235 ncpus = node_vectors[n].ncpus; in alloc_nodes_vectors()
291 unsigned int ncpus, v; in __irq_build_affinity_masks() local
300 if (!ncpus) in __irq_build_affinity_masks()
[all …]
/linux/tools/testing/selftests/rcutorture/bin/
A Dcpus2use.sh17 ncpus=`grep '^processor' /proc/cpuinfo | wc -l`
21 awk -v ncpus=$ncpus '{ print ncpus * ($7 + $NF) / 100 }'`
24 idlecpus=$ncpus
26 awk -v ncpus=$ncpus -v idlecpus=$idlecpus < /dev/null '
A Dkvm.sh336 awk < $T/cfgcpu.sort > $T/cfgcpu.pack -v ncpus=$cpus '
454 if (ja[1] == -1 && ncpus == 0)
457 njitter = ncpus;
472 if (cpusr[jn] > ncpus && ncpus != 0)
540 nc = ncpus;
546 if (ncpus == 0) {
555 nc = ncpus;
562 if (ncpus != 0)
570 -v ncpus=$cpus \
A Dkvm-build.sh45 ncpus="`getconf _NPROCESSORS_ONLN`"
46 make -j$((2 * ncpus)) $TORTURE_KMAKE_ARG > $resdir/Make.out 2>&1
/linux/tools/perf/util/
A Dcounts.c9 struct perf_counts *perf_counts__new(int ncpus, int nthreads) in perf_counts__new() argument
16 values = xyarray__new(ncpus, nthreads, sizeof(struct perf_counts_values)); in perf_counts__new()
24 values = xyarray__new(ncpus, nthreads, sizeof(bool)); in perf_counts__new()
58 int evsel__alloc_counts(struct evsel *evsel, int ncpus, int nthreads) in evsel__alloc_counts() argument
60 evsel->counts = perf_counts__new(ncpus, nthreads); in evsel__alloc_counts()
A Dstat.c155 static int evsel__alloc_prev_raw_counts(struct evsel *evsel, int ncpus, int nthreads) in evsel__alloc_prev_raw_counts() argument
159 counts = perf_counts__new(ncpus, nthreads); in evsel__alloc_prev_raw_counts()
180 int ncpus = evsel__nr_cpus(evsel); in evsel__alloc_stats() local
184 evsel__alloc_counts(evsel, ncpus, nthreads) < 0 || in evsel__alloc_stats()
185 (alloc_raw && evsel__alloc_prev_raw_counts(evsel, ncpus, nthreads) < 0)) in evsel__alloc_stats()
238 int ncpus = evsel__nr_cpus(evsel); in evsel__copy_prev_raw_counts() local
242 for (int cpu = 0; cpu < ncpus; cpu++) { in evsel__copy_prev_raw_counts()
413 int ncpus = evsel__nr_cpus(counter); in process_counter_maps() local
420 for (cpu = 0; cpu < ncpus; cpu++) { in process_counter_maps()
A Dcounts.h38 struct perf_counts *perf_counts__new(int ncpus, int nthreads);
43 int evsel__alloc_counts(struct evsel *evsel, int ncpus, int nthreads);
/linux/arch/x86/include/asm/trace/
A Dhyperv.h16 __field(unsigned int, ncpus)
21 TP_fast_assign(__entry->ncpus = cpumask_weight(cpus);
27 __entry->ncpus, __entry->mm,
64 __field(unsigned int, ncpus)
67 TP_fast_assign(__entry->ncpus = cpumask_weight(cpus);
71 __entry->ncpus, __entry->vector)
/linux/arch/powerpc/platforms/powermac/
A Dsmp.c271 int i, ncpus; in smp_psurge_probe() local
297 ncpus = 4; in smp_psurge_probe()
309 ncpus = 2; in smp_psurge_probe()
323 if (ncpus > NR_CPUS) in smp_psurge_probe()
324 ncpus = NR_CPUS; in smp_psurge_probe()
325 for (i = 1; i < ncpus ; ++i) in smp_psurge_probe()
750 for (i = 1; i < ncpus; ++i) in smp_core99_setup()
763 int ncpus = 0; in smp_core99_probe() local
769 ++ncpus; in smp_core99_probe()
774 if (ncpus <= 1) in smp_core99_probe()
[all …]
/linux/arch/powerpc/kexec/
A Dcrash.c108 volatile unsigned int ncpus = num_online_cpus() - 1;/* Excluding the panic cpu */ in crash_kexec_prepare_cpus() local
115 ncpus = num_present_cpus() - 1; in crash_kexec_prepare_cpus()
127 while ((atomic_read(&cpus_in_crash) < ncpus) && (--msecs > 0)) in crash_kexec_prepare_cpus()
132 if (atomic_read(&cpus_in_crash) >= ncpus) { in crash_kexec_prepare_cpus()
138 ncpus - atomic_read(&cpus_in_crash)); in crash_kexec_prepare_cpus()
169 while (atomic_read(&cpus_in_crash) < ncpus) in crash_kexec_prepare_cpus()
/linux/tools/testing/selftests/powerpc/
A Dutils.c91 int ncpus, cpu = -1; in pick_online_cpu() local
95 ncpus = get_nprocs_conf(); in pick_online_cpu()
96 size = CPU_ALLOC_SIZE(ncpus); in pick_online_cpu()
97 mask = CPU_ALLOC(ncpus); in pick_online_cpu()
111 for (cpu = 8; cpu < ncpus; cpu += 8) in pick_online_cpu()
116 for (cpu = ncpus - 1; cpu >= 0; cpu--) in pick_online_cpu()
/linux/tools/lib/perf/
A Devsel.c49 int perf_evsel__alloc_fd(struct perf_evsel *evsel, int ncpus, int nthreads) in perf_evsel__alloc_fd() argument
51 evsel->fd = xyarray__new(ncpus, nthreads, sizeof(int)); in perf_evsel__alloc_fd()
55 for (cpu = 0; cpu < ncpus; cpu++) { in perf_evsel__alloc_fd()
68 static int perf_evsel__alloc_mmap(struct perf_evsel *evsel, int ncpus, int nthreads) in perf_evsel__alloc_mmap() argument
70 evsel->mmap = xyarray__new(ncpus, nthreads, sizeof(struct perf_mmap)); in perf_evsel__alloc_mmap()
405 int perf_evsel__alloc_id(struct perf_evsel *evsel, int ncpus, int nthreads) in perf_evsel__alloc_id() argument
407 if (ncpus == 0 || nthreads == 0) in perf_evsel__alloc_id()
413 evsel->sample_id = xyarray__new(ncpus, nthreads, sizeof(struct perf_sample_id)); in perf_evsel__alloc_id()
417 evsel->id = zalloc(ncpus * nthreads * sizeof(u64)); in perf_evsel__alloc_id()
/linux/drivers/clk/mvebu/
A Dclk-cpu.c171 int ncpus = 0; in of_cpu_clk_setup() local
185 ncpus++; in of_cpu_clk_setup()
187 cpuclk = kcalloc(ncpus, sizeof(*cpuclk), GFP_KERNEL); in of_cpu_clk_setup()
191 clks = kcalloc(ncpus, sizeof(*clks), GFP_KERNEL); in of_cpu_clk_setup()
236 while(ncpus--) in of_cpu_clk_setup()
237 kfree(cpuclk[ncpus].clk_name); in of_cpu_clk_setup()
/linux/arch/sparc/kernel/
A Dsetup_32.c391 int i, ncpus, err; in topology_init() local
397 ncpus = 0; in topology_init()
398 while (!cpu_find_by_instance(ncpus, NULL, NULL)) in topology_init()
399 ncpus++; in topology_init()
400 ncpus_probed = ncpus; in topology_init()
A Dds.c502 tag->num_records = ncpus; in dr_cpu_init_response()
511 BUG_ON(i != ncpus); in dr_cpu_init_response()
524 for (i = 0; i < ncpus; i++) { in dr_cpu_mark()
537 int resp_len, ncpus, cpu; in dr_cpu_configure() local
540 ncpus = cpumask_weight(mask); in dr_cpu_configure()
541 resp_len = dr_cpu_size_response(ncpus); in dr_cpu_configure()
547 resp_len, ncpus, mask, in dr_cpu_configure()
596 int resp_len, ncpus, cpu; in dr_cpu_unconfigure() local
599 ncpus = cpumask_weight(mask); in dr_cpu_unconfigure()
606 resp_len, ncpus, mask, in dr_cpu_unconfigure()
[all …]
A Dsun4m_smp.c177 register int ncpus = SUN4M_NCPUS; in sun4m_cross_call() local
196 for (i = 0; i < ncpus; i++) { in sun4m_cross_call()
217 } while (++i < ncpus); in sun4m_cross_call()
225 } while (++i < ncpus); in sun4m_cross_call()
/linux/arch/mips/kernel/
A Dcrash.c59 unsigned int ncpus; in crash_kexec_prepare_cpus() local
64 ncpus = num_online_cpus() - 1;/* Excluding the panic cpu */ in crash_kexec_prepare_cpus()
75 while ((cpumask_weight(&cpus_in_crash) < ncpus) && (--msecs > 0)) { in crash_kexec_prepare_cpus()
/linux/drivers/xen/
A Dmcelog.c58 static uint32_t ncpus; variable
240 for (i = 0; i < ncpus; i++) in convert_log()
243 if (unlikely(i == ncpus)) { in convert_log()
378 ncpus = mc_op.u.mc_physcpuinfo.ncpus; in bind_virq_for_mce()
379 g_physinfo = kcalloc(ncpus, sizeof(struct mcinfo_logical_cpu), in bind_virq_for_mce()
/linux/arch/xtensa/kernel/
A Dsmp.c89 unsigned int ncpus = get_core_count(); in smp_init_cpus() local
92 pr_info("%s: Core Count = %d\n", __func__, ncpus); in smp_init_cpus()
95 if (ncpus > NR_CPUS) { in smp_init_cpus()
96 ncpus = NR_CPUS; in smp_init_cpus()
97 pr_info("%s: limiting core count by %d\n", __func__, ncpus); in smp_init_cpus()
100 for (i = 0; i < ncpus; ++i) in smp_init_cpus()
/linux/tools/lib/perf/include/internal/
A Devsel.h58 int perf_evsel__alloc_fd(struct perf_evsel *evsel, int ncpus, int nthreads);
64 int perf_evsel__alloc_id(struct perf_evsel *evsel, int ncpus, int nthreads);
/linux/kernel/locking/
A Dtest-ww_mutex.c368 static int test_cycle(unsigned int ncpus) in test_cycle() argument
373 for (n = 2; n <= ncpus + 1; n++) { in test_cycle()
623 int ncpus = num_online_cpus(); in test_ww_mutex_init() local
650 ret = test_cycle(ncpus); in test_ww_mutex_init()
654 ret = stress(16, 2*ncpus, STRESS_INORDER); in test_ww_mutex_init()
658 ret = stress(16, 2*ncpus, STRESS_REORDER); in test_ww_mutex_init()
662 ret = stress(4095, hweight32(STRESS_ALL)*ncpus, STRESS_ALL); in test_ww_mutex_init()
/linux/tools/testing/selftests/powerpc/benchmarks/
A Dcontext_switch.c108 int pid, ncpus; in start_process_on() local
121 ncpus = get_nprocs(); in start_process_on()
122 size = CPU_ALLOC_SIZE(ncpus); in start_process_on()
123 cpuset = CPU_ALLOC(ncpus); in start_process_on()
/linux/arch/x86/kernel/
A Dkvmclock.c203 unsigned long ncpus; in kvmclock_init_mem() local
211 ncpus = num_possible_cpus() - HVC_BOOT_ARRAY_SIZE; in kvmclock_init_mem()
212 order = get_order(ncpus * sizeof(*hvclock_mem)); in kvmclock_init_mem()
/linux/drivers/misc/sgi-gru/
A Dgrukservices.c145 int ctxnum, ncpus; in gru_load_kernel_context() local
163 ncpus = uv_blade_nr_possible_cpus(blade_id); in gru_load_kernel_context()
165 GRU_NUM_KERNEL_CBR * ncpus + bs->bs_async_cbrs); in gru_load_kernel_context()
167 GRU_NUM_KERNEL_DSR_BYTES * ncpus + in gru_load_kernel_context()
363 int ncpus; in gru_lock_async_resource() local
366 ncpus = uv_blade_nr_possible_cpus(blade_id); in gru_lock_async_resource()
368 *cb = bs->kernel_cb + ncpus * GRU_HANDLE_STRIDE; in gru_lock_async_resource()
370 *dsr = bs->kernel_dsr + ncpus * GRU_NUM_KERNEL_DSR_BYTES; in gru_lock_async_resource()
/linux/arch/x86/platform/uv/
A Duv_time.c52 int ncpus; member
158 head->ncpus = uv_blade_nr_possible_cpus(bid); in uv_rtc_allocate_timers()
177 for (c = 0; c < head->ncpus; c++) { in uv_rtc_find_next_timer()

Completed in 49 milliseconds

123