Home
last modified time | relevance | path

Searched refs:__this_cpu_read (Results 1 – 25 of 115) sorted by relevance

12345

/linux/arch/mips/kernel/
A Dmips-r2-to-r6-emul.c2243 (unsigned long)__this_cpu_read(mipsr2emustats.movs), in mipsr2_emul_show()
2246 (unsigned long)__this_cpu_read(mipsr2emustats.hilo), in mipsr2_emul_show()
2249 (unsigned long)__this_cpu_read(mipsr2emustats.muls), in mipsr2_emul_show()
2252 (unsigned long)__this_cpu_read(mipsr2emustats.divs), in mipsr2_emul_show()
2255 (unsigned long)__this_cpu_read(mipsr2emustats.dsps), in mipsr2_emul_show()
2258 (unsigned long)__this_cpu_read(mipsr2emustats.bops), in mipsr2_emul_show()
2261 (unsigned long)__this_cpu_read(mipsr2emustats.traps), in mipsr2_emul_show()
2264 (unsigned long)__this_cpu_read(mipsr2emustats.fpus), in mipsr2_emul_show()
2267 (unsigned long)__this_cpu_read(mipsr2emustats.loads), in mipsr2_emul_show()
2270 (unsigned long)__this_cpu_read(mipsr2emustats.stores), in mipsr2_emul_show()
[all …]
/linux/arch/sparc/kernel/
A Dnmi.c102 if (__this_cpu_read(nmi_touch)) { in perfctr_irq()
106 if (!touched && __this_cpu_read(last_irq_sum) == sum) { in perfctr_irq()
108 if (__this_cpu_read(alert_counter) == 30 * nmi_hz) in perfctr_irq()
115 if (__this_cpu_read(wd_enabled)) { in perfctr_irq()
155 if (!__this_cpu_read(wd_enabled)) in stop_nmi_watchdog()
212 if (__this_cpu_read(wd_enabled)) in start_nmi_watchdog()
226 if (!__this_cpu_read(wd_enabled)) in nmi_adjust_hz_one()
/linux/kernel/
A Dcontext_tracking.c70 if ( __this_cpu_read(context_tracking.state) != state) { in __context_tracking_enter()
71 if (__this_cpu_read(context_tracking.active)) { in __context_tracking_enter()
151 if (__this_cpu_read(context_tracking.state) == state) { in __context_tracking_exit()
152 if (__this_cpu_read(context_tracking.active)) { in __context_tracking_exit()
A Dsoftirq.c77 struct task_struct *tsk = __this_cpu_read(ksoftirqd); in wakeup_softirqd()
91 struct task_struct *tsk = __this_cpu_read(ksoftirqd); in ksoftirqd_running()
155 return __this_cpu_read(softirq_ctrl.cnt) != 0; in local_bh_blocked()
229 curcnt = __this_cpu_read(softirq_ctrl.cnt); in __local_bh_enable_ip()
425 if (!force_irqthreads() || !__this_cpu_read(ksoftirqd)) { in invoke_softirq()
571 __this_cpu_read(ksoftirqd) == current) in __do_softirq()
937 *__this_cpu_read(tasklet_vec.tail) = per_cpu(tasklet_vec, cpu).head; in takeover_tasklets()
945 *__this_cpu_read(tasklet_hi_vec.tail) = per_cpu(tasklet_hi_vec, cpu).head; in takeover_tasklets()
A Dwatchdog_hld.c80 delta = now - __this_cpu_read(last_timestamp); in watchdog_check_timestamp()
117 if (__this_cpu_read(watchdog_nmi_touch) == true) { in watchdog_overflow_callback()
135 if (__this_cpu_read(hard_watchdog_warn) == true) in watchdog_overflow_callback()
A Dwatchdog.c320 unsigned long hrint = __this_cpu_read(hrtimer_interrupts); in is_hardlockup()
322 if (__this_cpu_read(hrtimer_interrupts_saved) == hrint) in is_hardlockup()
398 if (unlikely(__this_cpu_read(softlockup_touch_sync))) { in watchdog_timer_fn()
412 touch_ts = __this_cpu_read(watchdog_touch_ts); in watchdog_timer_fn()
/linux/arch/x86/kernel/cpu/mce/
A Dintel.c132 if (__this_cpu_read(cmci_storm_state) == CMCI_STORM_NONE) in mce_intel_cmci_poll()
179 (__this_cpu_read(cmci_storm_state) == CMCI_STORM_ACTIVE)) { in cmci_intel_adjust_timer()
184 switch (__this_cpu_read(cmci_storm_state)) { in cmci_intel_adjust_timer()
218 unsigned int cnt = __this_cpu_read(cmci_storm_cnt); in cmci_storm_detect()
219 unsigned long ts = __this_cpu_read(cmci_time_stamp); in cmci_storm_detect()
223 if (__this_cpu_read(cmci_storm_state) != CMCI_STORM_NONE) in cmci_storm_detect()
/linux/kernel/time/
A Dtick-oneshot.c25 struct clock_event_device *dev = __this_cpu_read(tick_cpu_device.evtdev); in tick_program_event()
52 struct clock_event_device *dev = __this_cpu_read(tick_cpu_device.evtdev); in tick_resume_oneshot()
112 ret = __this_cpu_read(tick_cpu_device.mode) == TICKDEV_MODE_ONESHOT; in tick_oneshot_mode_active()
/linux/include/asm-generic/
A Dirq_regs.h21 return __this_cpu_read(__irq_regs); in get_irq_regs()
28 old_regs = __this_cpu_read(__irq_regs); in set_irq_regs()
/linux/lib/
A Dpercpu_test.c11 WARN(__this_cpu_read(pcp) != (expected), \
13 __this_cpu_read(pcp), __this_cpu_read(pcp), \
A Dcpumask.c252 prev = __this_cpu_read(distribute_cpu_mask_prev); in cpumask_any_and_distribute()
270 prev = __this_cpu_read(distribute_cpu_mask_prev); in cpumask_any_distribute()
A Dpercpu_counter.c87 count = __this_cpu_read(*fbc->counters) + amount; in percpu_counter_add_batch()
113 count = __this_cpu_read(*fbc->counters); in percpu_counter_sync()
/linux/arch/x86/kernel/
A Dhw_breakpoint.c485 set_debugreg(__this_cpu_read(cpu_debugreg[0]), 0); in hw_breakpoint_restore()
486 set_debugreg(__this_cpu_read(cpu_debugreg[1]), 1); in hw_breakpoint_restore()
487 set_debugreg(__this_cpu_read(cpu_debugreg[2]), 2); in hw_breakpoint_restore()
488 set_debugreg(__this_cpu_read(cpu_debugreg[3]), 3); in hw_breakpoint_restore()
490 set_debugreg(__this_cpu_read(cpu_dr7), 7); in hw_breakpoint_restore()
A Dirq.c248 desc = __this_cpu_read(vector_irq[vector]); in DEFINE_IDTENTRY_IRQ()
359 if (IS_ERR_OR_NULL(__this_cpu_read(vector_irq[vector]))) in fixup_irqs()
364 desc = __this_cpu_read(vector_irq[vector]); in fixup_irqs()
375 if (__this_cpu_read(vector_irq[vector]) != VECTOR_RETRIGGERED) in fixup_irqs()
A Dirq_32.c80 irqstk = __this_cpu_read(hardirq_stack_ptr); in execute_on_irq_stack()
141 irqstk = __this_cpu_read(softirq_stack_ptr); in do_softirq_own_stack()
A Dkvm.c234 if (__this_cpu_read(apf_reason.enabled)) { in kvm_read_and_reset_apf_flags()
235 flags = __this_cpu_read(apf_reason.flags); in kvm_read_and_reset_apf_flags()
285 if (__this_cpu_read(apf_reason.enabled)) { in DEFINE_IDTENTRY_SYSVEC()
286 token = __this_cpu_read(apf_reason.token); in DEFINE_IDTENTRY_SYSVEC()
373 if (!__this_cpu_read(apf_reason.enabled)) in kvm_pv_disable_apf()
/linux/include/linux/
A Dcontext_tracking_state.h41 return context_tracking_enabled() && __this_cpu_read(context_tracking.active); in context_tracking_enabled_this_cpu()
46 return __this_cpu_read(context_tracking.state) == CONTEXT_USER; in context_tracking_in_user()
/linux/drivers/cpuidle/
A Dcpuidle-psci.c48 return __this_cpu_read(domain_state); in psci_get_domain_state()
113 struct device *pd_dev = __this_cpu_read(psci_cpuidle_data.dev); in psci_idle_cpuhp_up()
123 struct device *pd_dev = __this_cpu_read(psci_cpuidle_data.dev); in psci_idle_cpuhp_down()
152 u32 *state = __this_cpu_read(psci_cpuidle_data.psci_states); in psci_enter_idle_state()
/linux/drivers/xen/events/
A Devents_2l.c123 struct vcpu_info *vcpu_info = __this_cpu_read(xen_vcpu); in evtchn_2l_unmask()
173 struct vcpu_info *vcpu_info = __this_cpu_read(xen_vcpu); in evtchn_2l_handle_events()
192 start_word_idx = __this_cpu_read(current_word_idx); in evtchn_2l_handle_events()
193 start_bit_idx = __this_cpu_read(current_bit_idx); in evtchn_2l_handle_events()
/linux/kernel/rcu/
A Dtree_plugin.h285 if (__this_cpu_read(rcu_data.cpu_no_qs.s)) { in rcu_qs()
287 __this_cpu_read(rcu_data.gp_seq), in rcu_qs()
583 return (__this_cpu_read(rcu_data.exp_deferred_qs) || in rcu_preempt_need_deferred_qs()
732 __this_cpu_read(rcu_data.core_needs_qs) && in rcu_flavor_sched_clock_irq()
733 __this_cpu_read(rcu_data.cpu_no_qs.b.norm) && in rcu_flavor_sched_clock_irq()
843 if (!__this_cpu_read(rcu_data.cpu_no_qs.s)) in rcu_qs()
846 __this_cpu_read(rcu_data.gp_seq), TPS("cpuqs")); in rcu_qs()
848 if (!__this_cpu_read(rcu_data.cpu_no_qs.b.exp)) in rcu_qs()
1140 return __this_cpu_read(rcu_data.rcu_cpu_kthread_task) == current; in rcu_is_callbacks_kthread()
/linux/arch/x86/include/asm/
A Dirq_stack.h117 call_on_stack(__this_cpu_read(hardirq_stack_ptr), \
136 if (user_mode(regs) || __this_cpu_read(hardirq_stack_inuse)) { \
A Dcpu_entry_area.h152 CEA_ESTACK_TOP(__this_cpu_read(cea_exception_stacks), name)
155 CEA_ESTACK_BOT(__this_cpu_read(cea_exception_stacks), name)
/linux/mm/
A Dvmstat.c360 x = delta + __this_cpu_read(*p); in __mod_zone_page_state()
362 t = __this_cpu_read(pcp->stat_threshold); in __mod_zone_page_state()
398 x = delta + __this_cpu_read(*p); in __mod_node_page_state()
400 t = __this_cpu_read(pcp->stat_threshold); in __mod_node_page_state()
447 t = __this_cpu_read(pcp->stat_threshold); in __inc_zone_state()
472 t = __this_cpu_read(pcp->stat_threshold); in __inc_node_state()
507 t = __this_cpu_read(pcp->stat_threshold); in __dec_zone_state()
532 t = __this_cpu_read(pcp->stat_threshold); in __dec_node_state()
858 if (!__this_cpu_read(pcp->expire) || in refresh_cpu_vm_stats()
859 !__this_cpu_read(pcp->count)) in refresh_cpu_vm_stats()
[all …]
/linux/drivers/irqchip/
A Dirq-xtensa-mx.c82 mask = __this_cpu_read(cached_irq_mask) & ~mask; in xtensa_mx_irq_mask()
100 mask |= __this_cpu_read(cached_irq_mask); in xtensa_mx_irq_unmask()
/linux/arch/arm64/include/asm/
A Darch_timer.h27 __wa = __this_cpu_read(timer_unstable_counter_workaround); \
34 __wa = __this_cpu_read(timer_unstable_counter_workaround); \

Completed in 34 milliseconds

12345