Home
last modified time | relevance | path

Searched refs:READ_ONCE (Results 1 – 25 of 1125) sorted by relevance

12345678910>>...45

/linux/kernel/rcu/
A Dtree_stall.h166 if (!READ_ONCE(rcu_kick_kthreads)) in rcu_stall_kick_kthreads()
507 gpk && !READ_ONCE(gpk->on_rq)) { in rcu_check_gp_kthread_expired_fqs_timer()
685 gs1 = READ_ONCE(rcu_state.gp_seq); in check_cpu_stall()
689 gps = READ_ONCE(rcu_state.gp_start); in check_cpu_stall()
691 gs2 = READ_ONCE(rcu_state.gp_seq); in check_cpu_stall()
773 if (READ_ONCE(rnp->gp_tasks)) in rcu_check_boost_fail()
833 if (ULONG_CMP_GE(READ_ONCE(rcu_state.gp_seq), READ_ONCE(rnp->gp_seq_needed)) && in show_rcu_gp_kthreads()
834 !data_race(READ_ONCE(rnp->qsmask)) && !data_race(READ_ONCE(rnp->boost_tasks)) && in show_rcu_gp_kthreads()
835 !data_race(READ_ONCE(rnp->exp_tasks)) && !data_race(READ_ONCE(rnp->gp_tasks))) in show_rcu_gp_kthreads()
841 data_race(READ_ONCE(rnp->qsmask)), in show_rcu_gp_kthreads()
[all …]
A Dtasks.h166 if (needwake && READ_ONCE(rtp->kthread_ptr)) in call_rcu_tasks_generic()
213 READ_ONCE(rtp->cbs_head)); in rcu_tasks_kthread()
350 rtst = READ_ONCE(rcu_task_stall_timeout); in rcu_tasks_wait_gp()
444 t->rcu_tasks_nvcsw = READ_ONCE(t->nvcsw); in rcu_tasks_pertask()
469 if (!READ_ONCE(t->rcu_tasks_holdout) || in check_holdout_task()
471 !READ_ONCE(t->on_rq) || in check_holdout_task()
1089 ".I"[READ_ONCE(t->trc_ipi_to_cpu) >= 0], in show_stalled_task_trace()
1092 READ_ONCE(t->trc_reader_nesting), in show_stalled_task_trace()
1120 !READ_ONCE(t->trc_reader_checked)) in check_all_holdout_tasks_trace()
1124 if (READ_ONCE(t->trc_reader_checked)) in check_all_holdout_tasks_trace()
[all …]
A Dsync.c78 WARN_ON_ONCE(READ_ONCE(rsp->gp_state) == GP_IDLE); in rcu_sync_func()
79 WARN_ON_ONCE(READ_ONCE(rsp->gp_state) == GP_PASSED); in rcu_sync_func()
155 wait_event(rsp->gp_wait, READ_ONCE(rsp->gp_state) >= GP_PASSED); in rcu_sync_enter()
170 WARN_ON_ONCE(READ_ONCE(rsp->gp_state) == GP_IDLE); in rcu_sync_exit()
171 WARN_ON_ONCE(READ_ONCE(rsp->gp_count) == 0); in rcu_sync_exit()
193 WARN_ON_ONCE(READ_ONCE(rsp->gp_count)); in rcu_sync_dtor()
194 WARN_ON_ONCE(READ_ONCE(rsp->gp_state) == GP_PASSED); in rcu_sync_dtor()
A Dsrcutiny.c99 int newval = READ_ONCE(ssp->srcu_lock_nesting[idx]) - 1; in __srcu_read_unlock()
102 if (!newval && READ_ONCE(ssp->srcu_gp_waiting)) in __srcu_read_unlock()
120 if (ssp->srcu_gp_running || USHORT_CMP_GE(ssp->srcu_idx, READ_ONCE(ssp->srcu_idx_max))) in srcu_drive_gp()
133 swait_event_exclusive(ssp->srcu_wq, !READ_ONCE(ssp->srcu_lock_nesting[idx])); in srcu_drive_gp()
153 if (USHORT_CMP_LT(ssp->srcu_idx, READ_ONCE(ssp->srcu_idx_max))) in srcu_drive_gp()
163 if (USHORT_CMP_GE(READ_ONCE(ssp->srcu_idx_max), cookie)) in srcu_gp_start_if_needed()
166 if (!READ_ONCE(ssp->srcu_gp_running)) { in srcu_gp_start_if_needed()
216 ret = (READ_ONCE(ssp->srcu_idx) + 3) & ~0x1; in get_state_synchronize_srcu()
243 bool ret = USHORT_CMP_GE(READ_ONCE(ssp->srcu_idx), cookie); in poll_state_synchronize_srcu()
A Drcu_segcblist.h15 return READ_ONCE(rclp->len); in rcu_cblist_n_cbs()
43 return !READ_ONCE(rsclp->head); in rcu_segcblist_empty()
52 return READ_ONCE(rsclp->len); in rcu_segcblist_n_cbs()
71 return READ_ONCE(rsclp->flags) & flags; in rcu_segcblist_test_flags()
110 return !READ_ONCE(*READ_ONCE(rsclp->tails[seg])); in rcu_segcblist_restempty()
A Dtree_plugin.h367 return READ_ONCE(rnp->gp_tasks) != NULL; in rcu_preempt_blocked_readers_cgp()
584 READ_ONCE(t->rcu_read_unlock_special.s)) && in rcu_preempt_need_deferred_qs()
787 READ_ONCE(rnp->exp_tasks)); in dump_blkd_tasks()
1012 if (READ_ONCE(rnp->exp_tasks) == NULL && in rcu_boost()
1013 READ_ONCE(rnp->boost_tasks) == NULL) in rcu_boost()
1062 return READ_ONCE(rnp->exp_tasks) != NULL || in rcu_boost()
1063 READ_ONCE(rnp->boost_tasks) != NULL; in rcu_boost()
1079 rcu_wait(READ_ONCE(rnp->boost_tasks) || in rcu_boost_kthread()
1080 READ_ONCE(rnp->exp_tasks)); in rcu_boost_kthread()
1341 unlikely(READ_ONCE(rdp->gpwrap))) && in rcu_try_advance_all_cbs()
[all …]
/linux/Documentation/translations/ko_KR/
A Dmemory-barriers.txt259 Q = READ_ONCE(P); D = READ_ONCE(*Q);
690 q = READ_ONCE(a);
702 q = READ_ONCE(a);
712 q = READ_ONCE(a);
735 q = READ_ONCE(a);
749 q = READ_ONCE(a);
766 q = READ_ONCE(a);
778 q = READ_ONCE(a);
794 q = READ_ONCE(a);
806 q = READ_ONCE(a);
[all …]
/linux/include/net/
A Dbusy_poll.h41 return READ_ONCE(sk->sk_ll_usec) && !signal_pending(current); in sk_can_busy_loop()
76 unsigned long bp_usec = READ_ONCE(sysctl_net_busy_poll); in busy_loop_timeout()
92 unsigned long bp_usec = READ_ONCE(sk->sk_ll_usec); in sk_busy_loop_timeout()
107 unsigned int napi_id = READ_ONCE(sk->sk_napi_id); in sk_busy_loop()
111 READ_ONCE(sk->sk_prefer_busy_poll), in sk_busy_loop()
112 READ_ONCE(sk->sk_busy_poll_budget) ?: BUSY_POLL_BUDGET); in sk_busy_loop()
133 if (unlikely(READ_ONCE(sk->sk_napi_id) != skb->napi_id)) in sk_mark_napi_id()
155 if (!READ_ONCE(sk->sk_napi_id)) in __sk_mark_napi_id_once()
/linux/arch/arm64/mm/
A Dmmu.c199 pmd = READ_ONCE(*pmdp); in alloc_init_cont_pte()
277 pud = READ_ONCE(*pudp); in alloc_init_cont_pmd()
328 p4d = READ_ONCE(*p4dp); in alloc_init_pud()
772 pud = READ_ONCE(*pudp); in kern_addr_valid()
780 pmd = READ_ONCE(*pmdp); in kern_addr_valid()
788 pte = READ_ONCE(*ptep); in kern_addr_valid()
839 pte = READ_ONCE(*ptep); in unmap_hotplug_pte_range()
862 pmd = READ_ONCE(*pmdp); in unmap_hotplug_pmd_range()
1225 p4d = READ_ONCE(*p4dp); in early_fixmap_init()
1392 pmd = READ_ONCE(*pmdp); in pmd_free_pte_page()
[all …]
A Dpageattr.c30 pte_t pte = READ_ONCE(*ptep); in change_page_range()
218 if (pgd_none(READ_ONCE(*pgdp))) in kernel_page_present()
222 if (p4d_none(READ_ONCE(*p4dp))) in kernel_page_present()
226 pud = READ_ONCE(*pudp); in kernel_page_present()
233 pmd = READ_ONCE(*pmdp); in kernel_page_present()
240 return pte_valid(READ_ONCE(*ptep)); in kernel_page_present()
/linux/arch/arm64/include/asm/
A Dpreempt.h12 return READ_ONCE(current_thread_info()->preempt.count); in preempt_count()
46 u32 pc = READ_ONCE(current_thread_info()->preempt.count); in __preempt_count_add()
53 u32 pc = READ_ONCE(current_thread_info()->preempt.count); in __preempt_count_sub()
61 u64 pc = READ_ONCE(ti->preempt_count); in __preempt_count_dec_and_test()
73 return !pc || !READ_ONCE(ti->preempt_count); in __preempt_count_dec_and_test()
78 u64 pc = READ_ONCE(current_thread_info()->preempt_count); in should_resched()
/linux/drivers/net/ethernet/mellanox/mlx4/
A Den_port.c165 packets += READ_ONCE(ring->packets); in mlx4_en_fold_software_stats()
166 bytes += READ_ONCE(ring->bytes); in mlx4_en_fold_software_stats()
176 packets += READ_ONCE(ring->packets); in mlx4_en_fold_software_stats()
177 bytes += READ_ONCE(ring->bytes); in mlx4_en_fold_software_stats()
254 sw_rx_dropped += READ_ONCE(ring->dropped); in mlx4_en_DUMP_ETH_STATS()
255 priv->port_stats.rx_chksum_good += READ_ONCE(ring->csum_ok); in mlx4_en_DUMP_ETH_STATS()
256 priv->port_stats.rx_chksum_none += READ_ONCE(ring->csum_none); in mlx4_en_DUMP_ETH_STATS()
259 priv->xdp_stats.rx_xdp_drop += READ_ONCE(ring->xdp_drop); in mlx4_en_DUMP_ETH_STATS()
262 priv->xdp_stats.rx_xdp_tx += READ_ONCE(ring->xdp_tx); in mlx4_en_DUMP_ETH_STATS()
263 priv->xdp_stats.rx_xdp_tx_full += READ_ONCE(ring->xdp_tx_full); in mlx4_en_DUMP_ETH_STATS()
[all …]
/linux/tools/lib/perf/
A Dmmap.c312 seq = READ_ONCE(pc->lock); in perf_mmap__read_self()
315 count->ena = READ_ONCE(pc->time_enabled); in perf_mmap__read_self()
316 count->run = READ_ONCE(pc->time_running); in perf_mmap__read_self()
320 time_mult = READ_ONCE(pc->time_mult); in perf_mmap__read_self()
321 time_shift = READ_ONCE(pc->time_shift); in perf_mmap__read_self()
322 time_offset = READ_ONCE(pc->time_offset); in perf_mmap__read_self()
326 time_mask = READ_ONCE(pc->time_mask); in perf_mmap__read_self()
330 idx = READ_ONCE(pc->index); in perf_mmap__read_self()
331 cnt = READ_ONCE(pc->offset); in perf_mmap__read_self()
334 u16 width = READ_ONCE(pc->pmc_width); in perf_mmap__read_self()
[all …]
/linux/arch/s390/lib/
A Dspinlock.c131 old = READ_ONCE(lp->lock); in arch_spin_lock_queued()
168 while (READ_ONCE(node->prev) != NULL) { in arch_spin_lock_queued()
182 old = READ_ONCE(lp->lock); in arch_spin_lock_queued()
202 while ((next = READ_ONCE(node->next)) == NULL) in arch_spin_lock_queued()
218 owner = arch_spin_yield_target(READ_ONCE(lp->lock), NULL); in arch_spin_lock_classic()
258 owner = READ_ONCE(lp->lock); in arch_spin_trylock_retry()
272 while (READ_ONCE(rw->cnts) & 0x10000) in arch_read_lock_wait()
284 while (READ_ONCE(rw->cnts) & 0x10000) in arch_read_lock_wait()
301 old = READ_ONCE(rw->cnts); in arch_write_lock_wait()
317 cpu = READ_ONCE(lp->lock) & _Q_LOCK_CPU_MASK; in arch_spin_relax()
/linux/drivers/powercap/
A Didle_inject.c114 duration_us = READ_ONCE(ii_dev->run_duration_us); in idle_inject_timer_fn()
115 duration_us += READ_ONCE(ii_dev->idle_duration_us); in idle_inject_timer_fn()
144 play_idle_precise(READ_ONCE(ii_dev->idle_duration_us) * NSEC_PER_USEC, in idle_inject_fn()
145 READ_ONCE(ii_dev->latency_us) * NSEC_PER_USEC); in idle_inject_fn()
172 *run_duration_us = READ_ONCE(ii_dev->run_duration_us); in idle_inject_get_duration()
173 *idle_duration_us = READ_ONCE(ii_dev->idle_duration_us); in idle_inject_get_duration()
198 unsigned int idle_duration_us = READ_ONCE(ii_dev->idle_duration_us); in idle_inject_start()
199 unsigned int run_duration_us = READ_ONCE(ii_dev->run_duration_us); in idle_inject_start()
/linux/tools/memory-model/Documentation/
A Dcontrol-dependencies.txt15 q = READ_ONCE(a);
17 p = READ_ONCE(b);
25 q = READ_ONCE(a);
28 p = READ_ONCE(b);
35 q = READ_ONCE(a);
62 q = READ_ONCE(a);
76 q = READ_ONCE(a);
94 q = READ_ONCE(a);
106 q = READ_ONCE(a);
123 q = READ_ONCE(a);
[all …]
/linux/arch/s390/kernel/
A Didle.c81 idle_count = READ_ONCE(idle->idle_count); in show_idle_count()
82 if (READ_ONCE(idle->clock_idle_enter)) in show_idle_count()
98 idle_time = READ_ONCE(idle->idle_time); in show_idle_time()
99 idle_enter = READ_ONCE(idle->clock_idle_enter); in show_idle_time()
100 idle_exit = READ_ONCE(idle->clock_idle_exit); in show_idle_time()
124 idle_enter = READ_ONCE(idle->clock_idle_enter); in arch_cpu_idle_time()
125 idle_exit = READ_ONCE(idle->clock_idle_exit); in arch_cpu_idle_time()
/linux/kernel/cgroup/
A Dmisc.c148 if (!(valid_type(type) && cg && READ_ONCE(misc_res_capacity[type]))) in misc_cg_try_charge()
158 if (new_usage > READ_ONCE(res->max) || in misc_cg_try_charge()
159 new_usage > READ_ONCE(misc_res_capacity[type])) { in misc_cg_try_charge()
215 if (READ_ONCE(misc_res_capacity[i])) { in misc_cg_max_show()
216 max = READ_ONCE(cg->res[i].max); in misc_cg_max_show()
281 if (READ_ONCE(misc_res_capacity[type])) in misc_cg_max_write()
305 if (READ_ONCE(misc_res_capacity[i]) || usage) in misc_cg_current_show()
328 cap = READ_ONCE(misc_res_capacity[i]); in misc_cg_capacity_show()
343 if (READ_ONCE(misc_res_capacity[i]) || events) in misc_events_show()
/linux/include/linux/
A Dsrcutiny.h63 idx = ((READ_ONCE(ssp->srcu_idx) + 1) & 0x2) >> 1; in __srcu_read_lock()
64 WRITE_ONCE(ssp->srcu_lock_nesting[idx], READ_ONCE(ssp->srcu_lock_nesting[idx]) + 1); in __srcu_read_lock()
84 idx = ((data_race(READ_ONCE(ssp->srcu_idx)) + 1) & 0x2) >> 1; in srcu_torture_stats_print()
87 data_race(READ_ONCE(ssp->srcu_lock_nesting[!idx])), in srcu_torture_stats_print()
88 data_race(READ_ONCE(ssp->srcu_lock_nesting[idx]))); in srcu_torture_stats_print()
/linux/net/mptcp/
A Dpm.c21 u8 add_addr = READ_ONCE(msk->pm.addr_signal); in mptcp_pm_announce_addr()
46 u8 rm_addr = READ_ONCE(msk->pm.addr_signal); in mptcp_pm_remove_addr()
93 subflows_max, READ_ONCE(pm->accept_subflow)); in mptcp_pm_allow_new_subflow()
96 if (!READ_ONCE(pm->accept_subflow)) in mptcp_pm_allow_new_subflow()
100 if (READ_ONCE(pm->accept_subflow)) { in mptcp_pm_allow_new_subflow()
139 if (READ_ONCE(pm->work_pending) && in mptcp_pm_fully_established()
164 if (!READ_ONCE(pm->work_pending)) in mptcp_pm_subflow_established()
169 if (READ_ONCE(pm->work_pending)) in mptcp_pm_subflow_established()
186 READ_ONCE(pm->accept_addr)); in mptcp_pm_add_addr_received()
192 if (!READ_ONCE(pm->accept_addr)) { in mptcp_pm_add_addr_received()
[all …]
/linux/arch/s390/include/asm/
A Dpreempt.h17 return READ_ONCE(S390_lowcore.preempt_count) & ~PREEMPT_NEED_RESCHED; in preempt_count()
25 old = READ_ONCE(S390_lowcore.preempt_count); in preempt_count_set()
44 return !(READ_ONCE(S390_lowcore.preempt_count) & PREEMPT_NEED_RESCHED); in test_preempt_need_resched()
67 return unlikely(READ_ONCE(S390_lowcore.preempt_count) == in should_resched()
77 return READ_ONCE(S390_lowcore.preempt_count); in preempt_count()
/linux/kernel/locking/
A Dqspinlock_paravirt.h121 return !READ_ONCE(lock->locked) && in trylock_clear_pending()
246 if (READ_ONCE(he->lock) == lock) { in pv_unhash()
247 node = READ_ONCE(he->node); in pv_unhash()
272 return READ_ONCE(prev->state) != vcpu_running; in pv_wait_early()
302 if (READ_ONCE(node->locked)) in pv_wait_node()
322 if (!READ_ONCE(node->locked)) { in pv_wait_node()
343 !READ_ONCE(node->locked)); in pv_wait_node()
414 if (READ_ONCE(pn->state) == vcpu_hashed) in pv_wait_head_or_lock()
/linux/tools/memory-model/litmus-tests/
A DSB+rfionceonce-poonceonces.litmus17 r1 = READ_ONCE(*x);
18 r2 = READ_ONCE(*y);
27 r3 = READ_ONCE(*y);
28 r4 = READ_ONCE(*x);
A DISA2+poonceonces.litmus9 * of the smp_load_acquire() invocations are replaced by READ_ONCE()?
24 r0 = READ_ONCE(*y);
33 r0 = READ_ONCE(*z);
34 r1 = READ_ONCE(*x);
/linux/include/clocksource/
A Dhyperv_timer.h60 sequence = READ_ONCE(tsc_pg->tsc_sequence); in hv_read_tsc_page_tsc()
69 scale = READ_ONCE(tsc_pg->tsc_scale); in hv_read_tsc_page_tsc()
70 offset = READ_ONCE(tsc_pg->tsc_offset); in hv_read_tsc_page_tsc()
79 } while (READ_ONCE(tsc_pg->tsc_sequence) != sequence); in hv_read_tsc_page_tsc()

Completed in 36 milliseconds

12345678910>>...45