Home
last modified time | relevance | path

Searched refs:per_cpu_ptr (Results 1 – 25 of 319) sorted by relevance

12345678910>>...13

/linux/fs/xfs/
A Dxfs_stats.h165 per_cpu_ptr(xfsstats.xs_stats, current_cpu())->s.v++; \
166 per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->s.v++; \
171 per_cpu_ptr(xfsstats.xs_stats, current_cpu())->s.v--; \
172 per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->s.v--; \
177 per_cpu_ptr(xfsstats.xs_stats, current_cpu())->s.v += (inc); \
178 per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->s.v += (inc); \
183 per_cpu_ptr(xfsstats.xs_stats, current_cpu())->a[off]++; \
184 per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->a[off]++; \
189 per_cpu_ptr(xfsstats.xs_stats, current_cpu())->a[off]; \
190 per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->a[off]; \
[all …]
A Dxfs_stats.c15 val += *(((__u32 *)per_cpu_ptr(stats, cpu) + idx)); in counter_val()
71 xs_xstrat_bytes += per_cpu_ptr(stats, i)->s.xs_xstrat_bytes; in xfs_stats_format()
72 xs_write_bytes += per_cpu_ptr(stats, i)->s.xs_write_bytes; in xfs_stats_format()
73 xs_read_bytes += per_cpu_ptr(stats, i)->s.xs_read_bytes; in xfs_stats_format()
74 defer_relog += per_cpu_ptr(stats, i)->s.defer_relog; in xfs_stats_format()
100 vn_active = per_cpu_ptr(stats, c)->s.vn_active; in xfs_stats_clearall()
101 memset(per_cpu_ptr(stats, c), 0, sizeof(*stats)); in xfs_stats_clearall()
102 per_cpu_ptr(stats, c)->s.vn_active = vn_active; in xfs_stats_clearall()
/linux/arch/x86/events/amd/
A Duncore.c75 return *per_cpu_ptr(amd_uncore_nb, event->cpu); in event_to_amd_uncore()
77 return *per_cpu_ptr(amd_uncore_llc, event->cpu); in event_to_amd_uncore()
388 *per_cpu_ptr(amd_uncore_nb, cpu) = uncore_nb; in amd_uncore_cpu_up_prepare()
402 *per_cpu_ptr(amd_uncore_llc, cpu) = uncore_llc; in amd_uncore_cpu_up_prepare()
409 *per_cpu_ptr(amd_uncore_nb, cpu) = NULL; in amd_uncore_cpu_up_prepare()
422 that = *per_cpu_ptr(uncores, cpu); in amd_uncore_find_online_sibling()
447 uncore = *per_cpu_ptr(amd_uncore_nb, cpu); in amd_uncore_cpu_starting()
452 *per_cpu_ptr(amd_uncore_nb, cpu) = uncore; in amd_uncore_cpu_starting()
456 uncore = *per_cpu_ptr(amd_uncore_llc, cpu); in amd_uncore_cpu_starting()
460 *per_cpu_ptr(amd_uncore_llc, cpu) = uncore; in amd_uncore_cpu_starting()
[all …]
/linux/drivers/infiniband/ulp/rtrs/
A Drtrs-clt-stats.c27 s = per_cpu_ptr(stats->pcpu_stats, con->cpu); in rtrs_clt_update_wc_stats()
51 s = per_cpu_ptr(stats->pcpu_stats, cpu); in rtrs_clt_stats_migration_from_cnt_to_str()
70 s = per_cpu_ptr(stats->pcpu_stats, cpu); in rtrs_clt_stats_migration_to_cnt_to_str()
94 r = &per_cpu_ptr(stats->pcpu_stats, cpu)->rdma; in rtrs_clt_stats_rdma_to_str()
123 s = per_cpu_ptr(stats->pcpu_stats, cpu); in rtrs_clt_reset_rdma_stats()
139 s = per_cpu_ptr(stats->pcpu_stats, cpu); in rtrs_clt_reset_cpu_migr_stats()
/linux/kernel/sched/
A Dtopology.c888 sibling = *per_cpu_ptr(sdd->sd, i); in build_balance_mask()
1167 sg = *per_cpu_ptr(sdd->sg, cpu); in get_group()
1474 *per_cpu_ptr(sdd->sd, cpu) = NULL; in claim_allocations()
1480 *per_cpu_ptr(sdd->sg, cpu) = NULL; in claim_allocations()
2051 *per_cpu_ptr(sdd->sd, j) = sd; in __sdt_alloc()
2058 *per_cpu_ptr(sdd->sds, j) = sds; in __sdt_alloc()
2067 *per_cpu_ptr(sdd->sg, j) = sg; in __sdt_alloc()
2078 *per_cpu_ptr(sdd->sgc, j) = sgc; in __sdt_alloc()
2097 sd = *per_cpu_ptr(sdd->sd, j); in __sdt_free()
2223 *per_cpu_ptr(d.sd, i) = sd; in build_sched_domains()
[all …]
/linux/kernel/irq/
A Dmatrix.c138 cm = per_cpu_ptr(m->maps, cpu); in matrix_find_best_cpu()
159 cm = per_cpu_ptr(m->maps, cpu); in matrix_find_best_cpu_managed()
215 struct cpumap *cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_reserve_managed()
256 struct cpumap *cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_remove_managed()
299 cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_alloc_managed()
394 cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_alloc()
421 struct cpumap *cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_free()
504 struct cpumap *cm = per_cpu_ptr(m->maps, cpu); in irq_matrix_debug_show()
/linux/kernel/bpf/
A Dpercpu_freelist.c15 struct pcpu_freelist_head *head = per_cpu_ptr(s->freelist, cpu); in pcpu_freelist_init()
65 head = per_cpu_ptr(s->freelist, cpu); in ___pcpu_freelist_push_nmi()
112 head = per_cpu_ptr(s->freelist, cpu); in pcpu_freelist_populate()
132 head = per_cpu_ptr(s->freelist, cpu); in ___pcpu_freelist_pop()
166 head = per_cpu_ptr(s->freelist, cpu); in ___pcpu_freelist_pop_nmi()
A Dbpf_lru_list.c408 l = per_cpu_ptr(lru->percpu_lru, cpu); in bpf_percpu_lru_pop_free()
441 loc_l = per_cpu_ptr(clru->local_list, cpu); in bpf_common_lru_pop_free()
470 steal_loc_l = per_cpu_ptr(clru->local_list, steal); in bpf_common_lru_pop_free()
515 loc_l = per_cpu_ptr(lru->common_lru.local_list, node->cpu); in bpf_common_lru_push_free()
542 l = per_cpu_ptr(lru->percpu_lru, node->cpu); in bpf_percpu_lru_push_free()
592 l = per_cpu_ptr(lru->percpu_lru, cpu); in bpf_percpu_lru_populate()
659 l = per_cpu_ptr(lru->percpu_lru, cpu); in bpf_lru_init()
673 loc_l = per_cpu_ptr(clru->local_list, cpu); in bpf_lru_init()
/linux/kernel/
A Drelay.c207 *per_cpu_ptr(chan->buf, buf->cpu) = NULL; in relay_destroy_buf()
337 if ((buf = *per_cpu_ptr(chan->buf, i))) in relay_reset()
385 return *per_cpu_ptr(chan->buf, 0); in relay_open_buf()
409 *per_cpu_ptr(chan->buf, 0) = buf; in relay_open_buf()
451 *per_cpu_ptr(chan->buf, cpu) = buf; in relay_prepare_cpu()
523 *per_cpu_ptr(chan->buf, i) = buf; in relay_open()
532 if ((buf = *per_cpu_ptr(chan->buf, i))) in relay_open()
597 buf = *per_cpu_ptr(chan->buf, 0); in relay_late_setup_files()
616 buf = *per_cpu_ptr(chan->buf, i); in relay_late_setup_files()
738 buf = *per_cpu_ptr(chan->buf, cpu); in relay_subbufs_consumed()
[all …]
A Dsmpboot.c172 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in __smpboot_create_thread()
197 *per_cpu_ptr(ht->store, cpu) = tsk; in __smpboot_create_thread()
230 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in smpboot_unpark_thread()
249 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in smpboot_park_thread()
272 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in smpboot_destroy_threads()
277 *per_cpu_ptr(ht->store, cpu) = NULL; in smpboot_destroy_threads()
A Dcpu.c170 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in cpuhp_invoke_callback()
560 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in bringup_wait_for_ap()
719 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in cpuhp_create()
817 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in cpuhp_invoke_ap_callback()
866 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in cpuhp_kick_ap_work()
1028 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in takedown_cpu()
1123 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in _cpu_down()
1285 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in notify_cpu_starting()
1325 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in _cpu_up()
1937 struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); in cpuhp_rollback_install()
[all …]
/linux/kernel/trace/
A Dtrace_functions_graph.c173 data = per_cpu_ptr(tr->array_buffer.data, cpu); in trace_graph_entry()
253 data = per_cpu_ptr(tr->array_buffer.data, cpu); in trace_graph_return()
391 last_pid = &(per_cpu_ptr(data->cpu_data, cpu)->last_pid); in verif_pid()
644 cpu_data = per_cpu_ptr(data->cpu_data, cpu); in print_graph_entry_leaf()
688 cpu_data = per_cpu_ptr(data->cpu_data, cpu); in print_graph_entry_nested()
918 cpu_data = per_cpu_ptr(data->cpu_data, cpu); in print_graph_return()
980 depth = per_cpu_ptr(data->cpu_data, iter->cpu)->depth; in print_graph_comment()
1046 if (data && per_cpu_ptr(data->cpu_data, cpu)->ignore) { in print_graph_function_flags()
1047 per_cpu_ptr(data->cpu_data, cpu)->ignore = 0; in print_graph_function_flags()
1060 per_cpu_ptr(data->cpu_data, iter->cpu)->ignore = 1; in print_graph_function_flags()
[all …]
A Dtrace_sched_wakeup.c85 *data = per_cpu_ptr(tr->array_buffer.data, cpu); in func_prolog_preempt_disable()
457 disabled = atomic_inc_return(&per_cpu_ptr(wakeup_trace->array_buffer.data, cpu)->disabled); in probe_wakeup_sched_switch()
471 data = per_cpu_ptr(wakeup_trace->array_buffer.data, wakeup_cpu); in probe_wakeup_sched_switch()
494 atomic_dec(&per_cpu_ptr(wakeup_trace->array_buffer.data, cpu)->disabled); in probe_wakeup_sched_switch()
549 disabled = atomic_inc_return(&per_cpu_ptr(wakeup_trace->array_buffer.data, cpu)->disabled); in probe_wakeup()
581 data = per_cpu_ptr(wakeup_trace->array_buffer.data, wakeup_cpu); in probe_wakeup()
596 atomic_dec(&per_cpu_ptr(wakeup_trace->array_buffer.data, cpu)->disabled); in probe_wakeup()
/linux/tools/testing/selftests/rcutorture/formal/srcu-cbmc/src/
A Dpercpu.h28 #define per_cpu_ptr(ptr, cpu) \ macro
69 THIS_CPU_ADD_HELPER(per_cpu_ptr(&(pcp), thread_cpu_id), \
76 THIS_CPU_ADD_HELPER(per_cpu_ptr(&(pcp), this_cpu_add_impl_cpu), \
/linux/fs/squashfs/
A Ddecompressor_multi_percpu.c40 stream = per_cpu_ptr(percpu, cpu); in squashfs_decompressor_create()
54 stream = per_cpu_ptr(percpu, cpu); in squashfs_decompressor_create()
71 stream = per_cpu_ptr(percpu, cpu); in squashfs_decompressor_destroy()
/linux/drivers/powercap/
A Didle_inject.c92 iit = per_cpu_ptr(&idle_inject_thread, cpu); in idle_inject_wakeup()
137 iit = per_cpu_ptr(&idle_inject_thread, cpu); in idle_inject_fn()
254 iit = per_cpu_ptr(&idle_inject_thread, cpu); in idle_inject_stop()
284 per_cpu_ptr(&idle_inject_thread, cpu); in idle_inject_should_run()
/linux/drivers/hv/
A Dhv.c129 hv_cpu = per_cpu_ptr(hv_context.cpu_context, cpu); in hv_synic_alloc()
141 hv_cpu = per_cpu_ptr(hv_context.cpu_context, cpu); in hv_synic_alloc()
189 = per_cpu_ptr(hv_context.cpu_context, cpu); in hv_synic_free()
209 = per_cpu_ptr(hv_context.cpu_context, cpu); in hv_synic_enable_regs()
294 = per_cpu_ptr(hv_context.cpu_context, cpu); in hv_synic_disable_regs()
/linux/include/linux/
A Dpart_stat.h29 (per_cpu_ptr((part)->bd_stats, (cpu))->field)
39 res += per_cpu_ptr((part)->bd_stats, _cpu)->field; \
48 memset(per_cpu_ptr(part->bd_stats, i), value, in part_stat_set_all()
/linux/drivers/clocksource/
A Dtimer-mp-csky.c78 struct timer_of *to = per_cpu_ptr(&csky_to, cpu); in csky_mptimer_starting_cpu()
145 to = per_cpu_ptr(&csky_to, cpu); in csky_mptimer_init()
168 to = per_cpu_ptr(&csky_to, cpu_rollback); in csky_mptimer_init()
A Dtimer-tegra.c134 struct timer_of *to = per_cpu_ptr(&tegra_to, cpu); in tegra_timer_setup()
159 struct timer_of *to = per_cpu_ptr(&tegra_to, cpu); in tegra_timer_stop()
302 struct timer_of *cpu_to = per_cpu_ptr(&tegra_to, cpu); in tegra_init_timer()
359 cpu_to = per_cpu_ptr(&tegra_to, cpu); in tegra_init_timer()
/linux/tools/testing/radix-tree/linux/
A Dpercpu.h10 #define per_cpu_ptr(ptr, cpu) ({ (void)(cpu); (ptr); }) macro
11 #define per_cpu(var, cpu) (*per_cpu_ptr(&(var), cpu))
/linux/arch/x86/kernel/
A Dkgdb.c208 bp = *per_cpu_ptr(breakinfo[breakno].pev, cpu); in kgdb_correct_hw_break()
237 pevent = per_cpu_ptr(breakinfo[breakno].pev, cpu); in hw_break_reserve_slot()
249 pevent = per_cpu_ptr(breakinfo[breakno].pev, cpu); in hw_break_reserve_slot()
264 pevent = per_cpu_ptr(breakinfo[breakno].pev, cpu); in hw_break_release_slot()
304 bp = *per_cpu_ptr(breakinfo[i].pev, cpu); in kgdb_remove_all_hw_break()
397 bp = *per_cpu_ptr(breakinfo[i].pev, cpu); in kgdb_disable_hw_debug()
666 pevent = per_cpu_ptr(breakinfo[i].pev, cpu); in kgdb_arch_late()
A Dirq_64.c37 char *stack = (char *)per_cpu_ptr(&irq_stack_backing_store, cpu); in map_irq_stack()
63 void *va = per_cpu_ptr(&irq_stack_backing_store, cpu); in map_irq_stack()
/linux/kernel/rcu/
A Dtree_exp.h244 rdp = per_cpu_ptr(&rcu_data, cpu); in rcu_report_exp_cpu_mult()
282 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, raw_smp_processor_id()); in exp_funnel_lock()
353 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in sync_rcu_exp_select_node_cpus()
381 struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); in sync_rcu_exp_select_node_cpus()
505 rdp = per_cpu_ptr(&rcu_data, cpu); in synchronize_rcu_expedited_wait()
536 rdp = per_cpu_ptr(&rcu_data, cpu); in synchronize_rcu_expedited_wait()
757 rdp = per_cpu_ptr(&rcu_data, cpu); in sync_sched_exp_online_cleanup()
/linux/mm/
A Dvmstat.c45 per_cpu_ptr(zone->per_cpu_zonestats, cpu)->vm_numa_event[item] in zero_zone_numa_counters()
178 pzstats = per_cpu_ptr(zone->per_cpu_zonestats, cpu); in fold_vm_zone_numa_events()
295 per_cpu_ptr(zone->per_cpu_zonestats, cpu)->stat_threshold in refresh_zone_stat_thresholds()
300 per_cpu_ptr(pgdat->per_cpu_nodestats, cpu)->stat_threshold in refresh_zone_stat_thresholds()
332 per_cpu_ptr(zone->per_cpu_zonestats, cpu)->stat_threshold in set_pgdat_percpu_threshold()
915 pzstats = per_cpu_ptr(zone->per_cpu_zonestats, cpu); in cpu_vm_stats_fold()
943 p = per_cpu_ptr(pgdat->per_cpu_nodestats, cpu); in cpu_vm_stats_fold()
1715 pcp = per_cpu_ptr(zone->per_cpu_pageset, i); in zoneinfo_show_print()
1726 pzstats = per_cpu_ptr(zone->per_cpu_zonestats, i); in zoneinfo_show_print()
1956 n = per_cpu_ptr(zone->zone_pgdat->per_cpu_nodestats, cpu); in need_update()
[all …]

Completed in 39 milliseconds

12345678910>>...13