1 // SPDX-License-Identifier: GPL-2.0
2 
3 // Generated by scripts/atomic/gen-atomic-fallback.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
5 
6 #ifndef _LINUX_ATOMIC_FALLBACK_H
7 #define _LINUX_ATOMIC_FALLBACK_H
8 
9 #include <linux/compiler.h>
10 
11 #ifndef arch_xchg_relaxed
12 #define arch_xchg_acquire arch_xchg
13 #define arch_xchg_release arch_xchg
14 #define arch_xchg_relaxed arch_xchg
15 #else /* arch_xchg_relaxed */
16 
17 #ifndef arch_xchg_acquire
18 #define arch_xchg_acquire(...) \
19 	__atomic_op_acquire(arch_xchg, __VA_ARGS__)
20 #endif
21 
22 #ifndef arch_xchg_release
23 #define arch_xchg_release(...) \
24 	__atomic_op_release(arch_xchg, __VA_ARGS__)
25 #endif
26 
27 #ifndef arch_xchg
28 #define arch_xchg(...) \
29 	__atomic_op_fence(arch_xchg, __VA_ARGS__)
30 #endif
31 
32 #endif /* arch_xchg_relaxed */
33 
34 #ifndef arch_cmpxchg_relaxed
35 #define arch_cmpxchg_acquire arch_cmpxchg
36 #define arch_cmpxchg_release arch_cmpxchg
37 #define arch_cmpxchg_relaxed arch_cmpxchg
38 #else /* arch_cmpxchg_relaxed */
39 
40 #ifndef arch_cmpxchg_acquire
41 #define arch_cmpxchg_acquire(...) \
42 	__atomic_op_acquire(arch_cmpxchg, __VA_ARGS__)
43 #endif
44 
45 #ifndef arch_cmpxchg_release
46 #define arch_cmpxchg_release(...) \
47 	__atomic_op_release(arch_cmpxchg, __VA_ARGS__)
48 #endif
49 
50 #ifndef arch_cmpxchg
51 #define arch_cmpxchg(...) \
52 	__atomic_op_fence(arch_cmpxchg, __VA_ARGS__)
53 #endif
54 
55 #endif /* arch_cmpxchg_relaxed */
56 
57 #ifndef arch_cmpxchg64_relaxed
58 #define arch_cmpxchg64_acquire arch_cmpxchg64
59 #define arch_cmpxchg64_release arch_cmpxchg64
60 #define arch_cmpxchg64_relaxed arch_cmpxchg64
61 #else /* arch_cmpxchg64_relaxed */
62 
63 #ifndef arch_cmpxchg64_acquire
64 #define arch_cmpxchg64_acquire(...) \
65 	__atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__)
66 #endif
67 
68 #ifndef arch_cmpxchg64_release
69 #define arch_cmpxchg64_release(...) \
70 	__atomic_op_release(arch_cmpxchg64, __VA_ARGS__)
71 #endif
72 
73 #ifndef arch_cmpxchg64
74 #define arch_cmpxchg64(...) \
75 	__atomic_op_fence(arch_cmpxchg64, __VA_ARGS__)
76 #endif
77 
78 #endif /* arch_cmpxchg64_relaxed */
79 
80 #ifndef arch_try_cmpxchg_relaxed
81 #ifdef arch_try_cmpxchg
82 #define arch_try_cmpxchg_acquire arch_try_cmpxchg
83 #define arch_try_cmpxchg_release arch_try_cmpxchg
84 #define arch_try_cmpxchg_relaxed arch_try_cmpxchg
85 #endif /* arch_try_cmpxchg */
86 
87 #ifndef arch_try_cmpxchg
88 #define arch_try_cmpxchg(_ptr, _oldp, _new) \
89 ({ \
90 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
91 	___r = arch_cmpxchg((_ptr), ___o, (_new)); \
92 	if (unlikely(___r != ___o)) \
93 		*___op = ___r; \
94 	likely(___r == ___o); \
95 })
96 #endif /* arch_try_cmpxchg */
97 
98 #ifndef arch_try_cmpxchg_acquire
99 #define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \
100 ({ \
101 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
102 	___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \
103 	if (unlikely(___r != ___o)) \
104 		*___op = ___r; \
105 	likely(___r == ___o); \
106 })
107 #endif /* arch_try_cmpxchg_acquire */
108 
109 #ifndef arch_try_cmpxchg_release
110 #define arch_try_cmpxchg_release(_ptr, _oldp, _new) \
111 ({ \
112 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
113 	___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \
114 	if (unlikely(___r != ___o)) \
115 		*___op = ___r; \
116 	likely(___r == ___o); \
117 })
118 #endif /* arch_try_cmpxchg_release */
119 
120 #ifndef arch_try_cmpxchg_relaxed
121 #define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \
122 ({ \
123 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
124 	___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \
125 	if (unlikely(___r != ___o)) \
126 		*___op = ___r; \
127 	likely(___r == ___o); \
128 })
129 #endif /* arch_try_cmpxchg_relaxed */
130 
131 #else /* arch_try_cmpxchg_relaxed */
132 
133 #ifndef arch_try_cmpxchg_acquire
134 #define arch_try_cmpxchg_acquire(...) \
135 	__atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__)
136 #endif
137 
138 #ifndef arch_try_cmpxchg_release
139 #define arch_try_cmpxchg_release(...) \
140 	__atomic_op_release(arch_try_cmpxchg, __VA_ARGS__)
141 #endif
142 
143 #ifndef arch_try_cmpxchg
144 #define arch_try_cmpxchg(...) \
145 	__atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__)
146 #endif
147 
148 #endif /* arch_try_cmpxchg_relaxed */
149 
150 #ifndef arch_atomic_read_acquire
151 static __always_inline int
arch_atomic_read_acquire(const atomic_t * v)152 arch_atomic_read_acquire(const atomic_t *v)
153 {
154 	return smp_load_acquire(&(v)->counter);
155 }
156 #define arch_atomic_read_acquire arch_atomic_read_acquire
157 #endif
158 
159 #ifndef arch_atomic_set_release
160 static __always_inline void
arch_atomic_set_release(atomic_t * v,int i)161 arch_atomic_set_release(atomic_t *v, int i)
162 {
163 	smp_store_release(&(v)->counter, i);
164 }
165 #define arch_atomic_set_release arch_atomic_set_release
166 #endif
167 
168 #ifndef arch_atomic_add_return_relaxed
169 #define arch_atomic_add_return_acquire arch_atomic_add_return
170 #define arch_atomic_add_return_release arch_atomic_add_return
171 #define arch_atomic_add_return_relaxed arch_atomic_add_return
172 #else /* arch_atomic_add_return_relaxed */
173 
174 #ifndef arch_atomic_add_return_acquire
175 static __always_inline int
arch_atomic_add_return_acquire(int i,atomic_t * v)176 arch_atomic_add_return_acquire(int i, atomic_t *v)
177 {
178 	int ret = arch_atomic_add_return_relaxed(i, v);
179 	__atomic_acquire_fence();
180 	return ret;
181 }
182 #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire
183 #endif
184 
185 #ifndef arch_atomic_add_return_release
186 static __always_inline int
arch_atomic_add_return_release(int i,atomic_t * v)187 arch_atomic_add_return_release(int i, atomic_t *v)
188 {
189 	__atomic_release_fence();
190 	return arch_atomic_add_return_relaxed(i, v);
191 }
192 #define arch_atomic_add_return_release arch_atomic_add_return_release
193 #endif
194 
195 #ifndef arch_atomic_add_return
196 static __always_inline int
arch_atomic_add_return(int i,atomic_t * v)197 arch_atomic_add_return(int i, atomic_t *v)
198 {
199 	int ret;
200 	__atomic_pre_full_fence();
201 	ret = arch_atomic_add_return_relaxed(i, v);
202 	__atomic_post_full_fence();
203 	return ret;
204 }
205 #define arch_atomic_add_return arch_atomic_add_return
206 #endif
207 
208 #endif /* arch_atomic_add_return_relaxed */
209 
210 #ifndef arch_atomic_fetch_add_relaxed
211 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add
212 #define arch_atomic_fetch_add_release arch_atomic_fetch_add
213 #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add
214 #else /* arch_atomic_fetch_add_relaxed */
215 
216 #ifndef arch_atomic_fetch_add_acquire
217 static __always_inline int
arch_atomic_fetch_add_acquire(int i,atomic_t * v)218 arch_atomic_fetch_add_acquire(int i, atomic_t *v)
219 {
220 	int ret = arch_atomic_fetch_add_relaxed(i, v);
221 	__atomic_acquire_fence();
222 	return ret;
223 }
224 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
225 #endif
226 
227 #ifndef arch_atomic_fetch_add_release
228 static __always_inline int
arch_atomic_fetch_add_release(int i,atomic_t * v)229 arch_atomic_fetch_add_release(int i, atomic_t *v)
230 {
231 	__atomic_release_fence();
232 	return arch_atomic_fetch_add_relaxed(i, v);
233 }
234 #define arch_atomic_fetch_add_release arch_atomic_fetch_add_release
235 #endif
236 
237 #ifndef arch_atomic_fetch_add
238 static __always_inline int
arch_atomic_fetch_add(int i,atomic_t * v)239 arch_atomic_fetch_add(int i, atomic_t *v)
240 {
241 	int ret;
242 	__atomic_pre_full_fence();
243 	ret = arch_atomic_fetch_add_relaxed(i, v);
244 	__atomic_post_full_fence();
245 	return ret;
246 }
247 #define arch_atomic_fetch_add arch_atomic_fetch_add
248 #endif
249 
250 #endif /* arch_atomic_fetch_add_relaxed */
251 
252 #ifndef arch_atomic_sub_return_relaxed
253 #define arch_atomic_sub_return_acquire arch_atomic_sub_return
254 #define arch_atomic_sub_return_release arch_atomic_sub_return
255 #define arch_atomic_sub_return_relaxed arch_atomic_sub_return
256 #else /* arch_atomic_sub_return_relaxed */
257 
258 #ifndef arch_atomic_sub_return_acquire
259 static __always_inline int
arch_atomic_sub_return_acquire(int i,atomic_t * v)260 arch_atomic_sub_return_acquire(int i, atomic_t *v)
261 {
262 	int ret = arch_atomic_sub_return_relaxed(i, v);
263 	__atomic_acquire_fence();
264 	return ret;
265 }
266 #define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire
267 #endif
268 
269 #ifndef arch_atomic_sub_return_release
270 static __always_inline int
arch_atomic_sub_return_release(int i,atomic_t * v)271 arch_atomic_sub_return_release(int i, atomic_t *v)
272 {
273 	__atomic_release_fence();
274 	return arch_atomic_sub_return_relaxed(i, v);
275 }
276 #define arch_atomic_sub_return_release arch_atomic_sub_return_release
277 #endif
278 
279 #ifndef arch_atomic_sub_return
280 static __always_inline int
arch_atomic_sub_return(int i,atomic_t * v)281 arch_atomic_sub_return(int i, atomic_t *v)
282 {
283 	int ret;
284 	__atomic_pre_full_fence();
285 	ret = arch_atomic_sub_return_relaxed(i, v);
286 	__atomic_post_full_fence();
287 	return ret;
288 }
289 #define arch_atomic_sub_return arch_atomic_sub_return
290 #endif
291 
292 #endif /* arch_atomic_sub_return_relaxed */
293 
294 #ifndef arch_atomic_fetch_sub_relaxed
295 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub
296 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub
297 #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub
298 #else /* arch_atomic_fetch_sub_relaxed */
299 
300 #ifndef arch_atomic_fetch_sub_acquire
301 static __always_inline int
arch_atomic_fetch_sub_acquire(int i,atomic_t * v)302 arch_atomic_fetch_sub_acquire(int i, atomic_t *v)
303 {
304 	int ret = arch_atomic_fetch_sub_relaxed(i, v);
305 	__atomic_acquire_fence();
306 	return ret;
307 }
308 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
309 #endif
310 
311 #ifndef arch_atomic_fetch_sub_release
312 static __always_inline int
arch_atomic_fetch_sub_release(int i,atomic_t * v)313 arch_atomic_fetch_sub_release(int i, atomic_t *v)
314 {
315 	__atomic_release_fence();
316 	return arch_atomic_fetch_sub_relaxed(i, v);
317 }
318 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release
319 #endif
320 
321 #ifndef arch_atomic_fetch_sub
322 static __always_inline int
arch_atomic_fetch_sub(int i,atomic_t * v)323 arch_atomic_fetch_sub(int i, atomic_t *v)
324 {
325 	int ret;
326 	__atomic_pre_full_fence();
327 	ret = arch_atomic_fetch_sub_relaxed(i, v);
328 	__atomic_post_full_fence();
329 	return ret;
330 }
331 #define arch_atomic_fetch_sub arch_atomic_fetch_sub
332 #endif
333 
334 #endif /* arch_atomic_fetch_sub_relaxed */
335 
336 #ifndef arch_atomic_inc
337 static __always_inline void
arch_atomic_inc(atomic_t * v)338 arch_atomic_inc(atomic_t *v)
339 {
340 	arch_atomic_add(1, v);
341 }
342 #define arch_atomic_inc arch_atomic_inc
343 #endif
344 
345 #ifndef arch_atomic_inc_return_relaxed
346 #ifdef arch_atomic_inc_return
347 #define arch_atomic_inc_return_acquire arch_atomic_inc_return
348 #define arch_atomic_inc_return_release arch_atomic_inc_return
349 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return
350 #endif /* arch_atomic_inc_return */
351 
352 #ifndef arch_atomic_inc_return
353 static __always_inline int
arch_atomic_inc_return(atomic_t * v)354 arch_atomic_inc_return(atomic_t *v)
355 {
356 	return arch_atomic_add_return(1, v);
357 }
358 #define arch_atomic_inc_return arch_atomic_inc_return
359 #endif
360 
361 #ifndef arch_atomic_inc_return_acquire
362 static __always_inline int
arch_atomic_inc_return_acquire(atomic_t * v)363 arch_atomic_inc_return_acquire(atomic_t *v)
364 {
365 	return arch_atomic_add_return_acquire(1, v);
366 }
367 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
368 #endif
369 
370 #ifndef arch_atomic_inc_return_release
371 static __always_inline int
arch_atomic_inc_return_release(atomic_t * v)372 arch_atomic_inc_return_release(atomic_t *v)
373 {
374 	return arch_atomic_add_return_release(1, v);
375 }
376 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
377 #endif
378 
379 #ifndef arch_atomic_inc_return_relaxed
380 static __always_inline int
arch_atomic_inc_return_relaxed(atomic_t * v)381 arch_atomic_inc_return_relaxed(atomic_t *v)
382 {
383 	return arch_atomic_add_return_relaxed(1, v);
384 }
385 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed
386 #endif
387 
388 #else /* arch_atomic_inc_return_relaxed */
389 
390 #ifndef arch_atomic_inc_return_acquire
391 static __always_inline int
arch_atomic_inc_return_acquire(atomic_t * v)392 arch_atomic_inc_return_acquire(atomic_t *v)
393 {
394 	int ret = arch_atomic_inc_return_relaxed(v);
395 	__atomic_acquire_fence();
396 	return ret;
397 }
398 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
399 #endif
400 
401 #ifndef arch_atomic_inc_return_release
402 static __always_inline int
arch_atomic_inc_return_release(atomic_t * v)403 arch_atomic_inc_return_release(atomic_t *v)
404 {
405 	__atomic_release_fence();
406 	return arch_atomic_inc_return_relaxed(v);
407 }
408 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
409 #endif
410 
411 #ifndef arch_atomic_inc_return
412 static __always_inline int
arch_atomic_inc_return(atomic_t * v)413 arch_atomic_inc_return(atomic_t *v)
414 {
415 	int ret;
416 	__atomic_pre_full_fence();
417 	ret = arch_atomic_inc_return_relaxed(v);
418 	__atomic_post_full_fence();
419 	return ret;
420 }
421 #define arch_atomic_inc_return arch_atomic_inc_return
422 #endif
423 
424 #endif /* arch_atomic_inc_return_relaxed */
425 
426 #ifndef arch_atomic_fetch_inc_relaxed
427 #ifdef arch_atomic_fetch_inc
428 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc
429 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc
430 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc
431 #endif /* arch_atomic_fetch_inc */
432 
433 #ifndef arch_atomic_fetch_inc
434 static __always_inline int
arch_atomic_fetch_inc(atomic_t * v)435 arch_atomic_fetch_inc(atomic_t *v)
436 {
437 	return arch_atomic_fetch_add(1, v);
438 }
439 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
440 #endif
441 
442 #ifndef arch_atomic_fetch_inc_acquire
443 static __always_inline int
arch_atomic_fetch_inc_acquire(atomic_t * v)444 arch_atomic_fetch_inc_acquire(atomic_t *v)
445 {
446 	return arch_atomic_fetch_add_acquire(1, v);
447 }
448 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
449 #endif
450 
451 #ifndef arch_atomic_fetch_inc_release
452 static __always_inline int
arch_atomic_fetch_inc_release(atomic_t * v)453 arch_atomic_fetch_inc_release(atomic_t *v)
454 {
455 	return arch_atomic_fetch_add_release(1, v);
456 }
457 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
458 #endif
459 
460 #ifndef arch_atomic_fetch_inc_relaxed
461 static __always_inline int
arch_atomic_fetch_inc_relaxed(atomic_t * v)462 arch_atomic_fetch_inc_relaxed(atomic_t *v)
463 {
464 	return arch_atomic_fetch_add_relaxed(1, v);
465 }
466 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed
467 #endif
468 
469 #else /* arch_atomic_fetch_inc_relaxed */
470 
471 #ifndef arch_atomic_fetch_inc_acquire
472 static __always_inline int
arch_atomic_fetch_inc_acquire(atomic_t * v)473 arch_atomic_fetch_inc_acquire(atomic_t *v)
474 {
475 	int ret = arch_atomic_fetch_inc_relaxed(v);
476 	__atomic_acquire_fence();
477 	return ret;
478 }
479 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
480 #endif
481 
482 #ifndef arch_atomic_fetch_inc_release
483 static __always_inline int
arch_atomic_fetch_inc_release(atomic_t * v)484 arch_atomic_fetch_inc_release(atomic_t *v)
485 {
486 	__atomic_release_fence();
487 	return arch_atomic_fetch_inc_relaxed(v);
488 }
489 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
490 #endif
491 
492 #ifndef arch_atomic_fetch_inc
493 static __always_inline int
arch_atomic_fetch_inc(atomic_t * v)494 arch_atomic_fetch_inc(atomic_t *v)
495 {
496 	int ret;
497 	__atomic_pre_full_fence();
498 	ret = arch_atomic_fetch_inc_relaxed(v);
499 	__atomic_post_full_fence();
500 	return ret;
501 }
502 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
503 #endif
504 
505 #endif /* arch_atomic_fetch_inc_relaxed */
506 
507 #ifndef arch_atomic_dec
508 static __always_inline void
arch_atomic_dec(atomic_t * v)509 arch_atomic_dec(atomic_t *v)
510 {
511 	arch_atomic_sub(1, v);
512 }
513 #define arch_atomic_dec arch_atomic_dec
514 #endif
515 
516 #ifndef arch_atomic_dec_return_relaxed
517 #ifdef arch_atomic_dec_return
518 #define arch_atomic_dec_return_acquire arch_atomic_dec_return
519 #define arch_atomic_dec_return_release arch_atomic_dec_return
520 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return
521 #endif /* arch_atomic_dec_return */
522 
523 #ifndef arch_atomic_dec_return
524 static __always_inline int
arch_atomic_dec_return(atomic_t * v)525 arch_atomic_dec_return(atomic_t *v)
526 {
527 	return arch_atomic_sub_return(1, v);
528 }
529 #define arch_atomic_dec_return arch_atomic_dec_return
530 #endif
531 
532 #ifndef arch_atomic_dec_return_acquire
533 static __always_inline int
arch_atomic_dec_return_acquire(atomic_t * v)534 arch_atomic_dec_return_acquire(atomic_t *v)
535 {
536 	return arch_atomic_sub_return_acquire(1, v);
537 }
538 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
539 #endif
540 
541 #ifndef arch_atomic_dec_return_release
542 static __always_inline int
arch_atomic_dec_return_release(atomic_t * v)543 arch_atomic_dec_return_release(atomic_t *v)
544 {
545 	return arch_atomic_sub_return_release(1, v);
546 }
547 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
548 #endif
549 
550 #ifndef arch_atomic_dec_return_relaxed
551 static __always_inline int
arch_atomic_dec_return_relaxed(atomic_t * v)552 arch_atomic_dec_return_relaxed(atomic_t *v)
553 {
554 	return arch_atomic_sub_return_relaxed(1, v);
555 }
556 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed
557 #endif
558 
559 #else /* arch_atomic_dec_return_relaxed */
560 
561 #ifndef arch_atomic_dec_return_acquire
562 static __always_inline int
arch_atomic_dec_return_acquire(atomic_t * v)563 arch_atomic_dec_return_acquire(atomic_t *v)
564 {
565 	int ret = arch_atomic_dec_return_relaxed(v);
566 	__atomic_acquire_fence();
567 	return ret;
568 }
569 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
570 #endif
571 
572 #ifndef arch_atomic_dec_return_release
573 static __always_inline int
arch_atomic_dec_return_release(atomic_t * v)574 arch_atomic_dec_return_release(atomic_t *v)
575 {
576 	__atomic_release_fence();
577 	return arch_atomic_dec_return_relaxed(v);
578 }
579 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
580 #endif
581 
582 #ifndef arch_atomic_dec_return
583 static __always_inline int
arch_atomic_dec_return(atomic_t * v)584 arch_atomic_dec_return(atomic_t *v)
585 {
586 	int ret;
587 	__atomic_pre_full_fence();
588 	ret = arch_atomic_dec_return_relaxed(v);
589 	__atomic_post_full_fence();
590 	return ret;
591 }
592 #define arch_atomic_dec_return arch_atomic_dec_return
593 #endif
594 
595 #endif /* arch_atomic_dec_return_relaxed */
596 
597 #ifndef arch_atomic_fetch_dec_relaxed
598 #ifdef arch_atomic_fetch_dec
599 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec
600 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec
601 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec
602 #endif /* arch_atomic_fetch_dec */
603 
604 #ifndef arch_atomic_fetch_dec
605 static __always_inline int
arch_atomic_fetch_dec(atomic_t * v)606 arch_atomic_fetch_dec(atomic_t *v)
607 {
608 	return arch_atomic_fetch_sub(1, v);
609 }
610 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
611 #endif
612 
613 #ifndef arch_atomic_fetch_dec_acquire
614 static __always_inline int
arch_atomic_fetch_dec_acquire(atomic_t * v)615 arch_atomic_fetch_dec_acquire(atomic_t *v)
616 {
617 	return arch_atomic_fetch_sub_acquire(1, v);
618 }
619 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
620 #endif
621 
622 #ifndef arch_atomic_fetch_dec_release
623 static __always_inline int
arch_atomic_fetch_dec_release(atomic_t * v)624 arch_atomic_fetch_dec_release(atomic_t *v)
625 {
626 	return arch_atomic_fetch_sub_release(1, v);
627 }
628 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
629 #endif
630 
631 #ifndef arch_atomic_fetch_dec_relaxed
632 static __always_inline int
arch_atomic_fetch_dec_relaxed(atomic_t * v)633 arch_atomic_fetch_dec_relaxed(atomic_t *v)
634 {
635 	return arch_atomic_fetch_sub_relaxed(1, v);
636 }
637 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed
638 #endif
639 
640 #else /* arch_atomic_fetch_dec_relaxed */
641 
642 #ifndef arch_atomic_fetch_dec_acquire
643 static __always_inline int
arch_atomic_fetch_dec_acquire(atomic_t * v)644 arch_atomic_fetch_dec_acquire(atomic_t *v)
645 {
646 	int ret = arch_atomic_fetch_dec_relaxed(v);
647 	__atomic_acquire_fence();
648 	return ret;
649 }
650 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
651 #endif
652 
653 #ifndef arch_atomic_fetch_dec_release
654 static __always_inline int
arch_atomic_fetch_dec_release(atomic_t * v)655 arch_atomic_fetch_dec_release(atomic_t *v)
656 {
657 	__atomic_release_fence();
658 	return arch_atomic_fetch_dec_relaxed(v);
659 }
660 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
661 #endif
662 
663 #ifndef arch_atomic_fetch_dec
664 static __always_inline int
arch_atomic_fetch_dec(atomic_t * v)665 arch_atomic_fetch_dec(atomic_t *v)
666 {
667 	int ret;
668 	__atomic_pre_full_fence();
669 	ret = arch_atomic_fetch_dec_relaxed(v);
670 	__atomic_post_full_fence();
671 	return ret;
672 }
673 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
674 #endif
675 
676 #endif /* arch_atomic_fetch_dec_relaxed */
677 
678 #ifndef arch_atomic_fetch_and_relaxed
679 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and
680 #define arch_atomic_fetch_and_release arch_atomic_fetch_and
681 #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and
682 #else /* arch_atomic_fetch_and_relaxed */
683 
684 #ifndef arch_atomic_fetch_and_acquire
685 static __always_inline int
arch_atomic_fetch_and_acquire(int i,atomic_t * v)686 arch_atomic_fetch_and_acquire(int i, atomic_t *v)
687 {
688 	int ret = arch_atomic_fetch_and_relaxed(i, v);
689 	__atomic_acquire_fence();
690 	return ret;
691 }
692 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire
693 #endif
694 
695 #ifndef arch_atomic_fetch_and_release
696 static __always_inline int
arch_atomic_fetch_and_release(int i,atomic_t * v)697 arch_atomic_fetch_and_release(int i, atomic_t *v)
698 {
699 	__atomic_release_fence();
700 	return arch_atomic_fetch_and_relaxed(i, v);
701 }
702 #define arch_atomic_fetch_and_release arch_atomic_fetch_and_release
703 #endif
704 
705 #ifndef arch_atomic_fetch_and
706 static __always_inline int
arch_atomic_fetch_and(int i,atomic_t * v)707 arch_atomic_fetch_and(int i, atomic_t *v)
708 {
709 	int ret;
710 	__atomic_pre_full_fence();
711 	ret = arch_atomic_fetch_and_relaxed(i, v);
712 	__atomic_post_full_fence();
713 	return ret;
714 }
715 #define arch_atomic_fetch_and arch_atomic_fetch_and
716 #endif
717 
718 #endif /* arch_atomic_fetch_and_relaxed */
719 
720 #ifndef arch_atomic_andnot
721 static __always_inline void
arch_atomic_andnot(int i,atomic_t * v)722 arch_atomic_andnot(int i, atomic_t *v)
723 {
724 	arch_atomic_and(~i, v);
725 }
726 #define arch_atomic_andnot arch_atomic_andnot
727 #endif
728 
729 #ifndef arch_atomic_fetch_andnot_relaxed
730 #ifdef arch_atomic_fetch_andnot
731 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot
732 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot
733 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot
734 #endif /* arch_atomic_fetch_andnot */
735 
736 #ifndef arch_atomic_fetch_andnot
737 static __always_inline int
arch_atomic_fetch_andnot(int i,atomic_t * v)738 arch_atomic_fetch_andnot(int i, atomic_t *v)
739 {
740 	return arch_atomic_fetch_and(~i, v);
741 }
742 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
743 #endif
744 
745 #ifndef arch_atomic_fetch_andnot_acquire
746 static __always_inline int
arch_atomic_fetch_andnot_acquire(int i,atomic_t * v)747 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
748 {
749 	return arch_atomic_fetch_and_acquire(~i, v);
750 }
751 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
752 #endif
753 
754 #ifndef arch_atomic_fetch_andnot_release
755 static __always_inline int
arch_atomic_fetch_andnot_release(int i,atomic_t * v)756 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
757 {
758 	return arch_atomic_fetch_and_release(~i, v);
759 }
760 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
761 #endif
762 
763 #ifndef arch_atomic_fetch_andnot_relaxed
764 static __always_inline int
arch_atomic_fetch_andnot_relaxed(int i,atomic_t * v)765 arch_atomic_fetch_andnot_relaxed(int i, atomic_t *v)
766 {
767 	return arch_atomic_fetch_and_relaxed(~i, v);
768 }
769 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed
770 #endif
771 
772 #else /* arch_atomic_fetch_andnot_relaxed */
773 
774 #ifndef arch_atomic_fetch_andnot_acquire
775 static __always_inline int
arch_atomic_fetch_andnot_acquire(int i,atomic_t * v)776 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
777 {
778 	int ret = arch_atomic_fetch_andnot_relaxed(i, v);
779 	__atomic_acquire_fence();
780 	return ret;
781 }
782 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
783 #endif
784 
785 #ifndef arch_atomic_fetch_andnot_release
786 static __always_inline int
arch_atomic_fetch_andnot_release(int i,atomic_t * v)787 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
788 {
789 	__atomic_release_fence();
790 	return arch_atomic_fetch_andnot_relaxed(i, v);
791 }
792 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
793 #endif
794 
795 #ifndef arch_atomic_fetch_andnot
796 static __always_inline int
arch_atomic_fetch_andnot(int i,atomic_t * v)797 arch_atomic_fetch_andnot(int i, atomic_t *v)
798 {
799 	int ret;
800 	__atomic_pre_full_fence();
801 	ret = arch_atomic_fetch_andnot_relaxed(i, v);
802 	__atomic_post_full_fence();
803 	return ret;
804 }
805 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
806 #endif
807 
808 #endif /* arch_atomic_fetch_andnot_relaxed */
809 
810 #ifndef arch_atomic_fetch_or_relaxed
811 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or
812 #define arch_atomic_fetch_or_release arch_atomic_fetch_or
813 #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or
814 #else /* arch_atomic_fetch_or_relaxed */
815 
816 #ifndef arch_atomic_fetch_or_acquire
817 static __always_inline int
arch_atomic_fetch_or_acquire(int i,atomic_t * v)818 arch_atomic_fetch_or_acquire(int i, atomic_t *v)
819 {
820 	int ret = arch_atomic_fetch_or_relaxed(i, v);
821 	__atomic_acquire_fence();
822 	return ret;
823 }
824 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire
825 #endif
826 
827 #ifndef arch_atomic_fetch_or_release
828 static __always_inline int
arch_atomic_fetch_or_release(int i,atomic_t * v)829 arch_atomic_fetch_or_release(int i, atomic_t *v)
830 {
831 	__atomic_release_fence();
832 	return arch_atomic_fetch_or_relaxed(i, v);
833 }
834 #define arch_atomic_fetch_or_release arch_atomic_fetch_or_release
835 #endif
836 
837 #ifndef arch_atomic_fetch_or
838 static __always_inline int
arch_atomic_fetch_or(int i,atomic_t * v)839 arch_atomic_fetch_or(int i, atomic_t *v)
840 {
841 	int ret;
842 	__atomic_pre_full_fence();
843 	ret = arch_atomic_fetch_or_relaxed(i, v);
844 	__atomic_post_full_fence();
845 	return ret;
846 }
847 #define arch_atomic_fetch_or arch_atomic_fetch_or
848 #endif
849 
850 #endif /* arch_atomic_fetch_or_relaxed */
851 
852 #ifndef arch_atomic_fetch_xor_relaxed
853 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor
854 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor
855 #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor
856 #else /* arch_atomic_fetch_xor_relaxed */
857 
858 #ifndef arch_atomic_fetch_xor_acquire
859 static __always_inline int
arch_atomic_fetch_xor_acquire(int i,atomic_t * v)860 arch_atomic_fetch_xor_acquire(int i, atomic_t *v)
861 {
862 	int ret = arch_atomic_fetch_xor_relaxed(i, v);
863 	__atomic_acquire_fence();
864 	return ret;
865 }
866 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire
867 #endif
868 
869 #ifndef arch_atomic_fetch_xor_release
870 static __always_inline int
arch_atomic_fetch_xor_release(int i,atomic_t * v)871 arch_atomic_fetch_xor_release(int i, atomic_t *v)
872 {
873 	__atomic_release_fence();
874 	return arch_atomic_fetch_xor_relaxed(i, v);
875 }
876 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
877 #endif
878 
879 #ifndef arch_atomic_fetch_xor
880 static __always_inline int
arch_atomic_fetch_xor(int i,atomic_t * v)881 arch_atomic_fetch_xor(int i, atomic_t *v)
882 {
883 	int ret;
884 	__atomic_pre_full_fence();
885 	ret = arch_atomic_fetch_xor_relaxed(i, v);
886 	__atomic_post_full_fence();
887 	return ret;
888 }
889 #define arch_atomic_fetch_xor arch_atomic_fetch_xor
890 #endif
891 
892 #endif /* arch_atomic_fetch_xor_relaxed */
893 
894 #ifndef arch_atomic_xchg_relaxed
895 #define arch_atomic_xchg_acquire arch_atomic_xchg
896 #define arch_atomic_xchg_release arch_atomic_xchg
897 #define arch_atomic_xchg_relaxed arch_atomic_xchg
898 #else /* arch_atomic_xchg_relaxed */
899 
900 #ifndef arch_atomic_xchg_acquire
901 static __always_inline int
arch_atomic_xchg_acquire(atomic_t * v,int i)902 arch_atomic_xchg_acquire(atomic_t *v, int i)
903 {
904 	int ret = arch_atomic_xchg_relaxed(v, i);
905 	__atomic_acquire_fence();
906 	return ret;
907 }
908 #define arch_atomic_xchg_acquire arch_atomic_xchg_acquire
909 #endif
910 
911 #ifndef arch_atomic_xchg_release
912 static __always_inline int
arch_atomic_xchg_release(atomic_t * v,int i)913 arch_atomic_xchg_release(atomic_t *v, int i)
914 {
915 	__atomic_release_fence();
916 	return arch_atomic_xchg_relaxed(v, i);
917 }
918 #define arch_atomic_xchg_release arch_atomic_xchg_release
919 #endif
920 
921 #ifndef arch_atomic_xchg
922 static __always_inline int
arch_atomic_xchg(atomic_t * v,int i)923 arch_atomic_xchg(atomic_t *v, int i)
924 {
925 	int ret;
926 	__atomic_pre_full_fence();
927 	ret = arch_atomic_xchg_relaxed(v, i);
928 	__atomic_post_full_fence();
929 	return ret;
930 }
931 #define arch_atomic_xchg arch_atomic_xchg
932 #endif
933 
934 #endif /* arch_atomic_xchg_relaxed */
935 
936 #ifndef arch_atomic_cmpxchg_relaxed
937 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg
938 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg
939 #define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg
940 #else /* arch_atomic_cmpxchg_relaxed */
941 
942 #ifndef arch_atomic_cmpxchg_acquire
943 static __always_inline int
arch_atomic_cmpxchg_acquire(atomic_t * v,int old,int new)944 arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
945 {
946 	int ret = arch_atomic_cmpxchg_relaxed(v, old, new);
947 	__atomic_acquire_fence();
948 	return ret;
949 }
950 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
951 #endif
952 
953 #ifndef arch_atomic_cmpxchg_release
954 static __always_inline int
arch_atomic_cmpxchg_release(atomic_t * v,int old,int new)955 arch_atomic_cmpxchg_release(atomic_t *v, int old, int new)
956 {
957 	__atomic_release_fence();
958 	return arch_atomic_cmpxchg_relaxed(v, old, new);
959 }
960 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release
961 #endif
962 
963 #ifndef arch_atomic_cmpxchg
964 static __always_inline int
arch_atomic_cmpxchg(atomic_t * v,int old,int new)965 arch_atomic_cmpxchg(atomic_t *v, int old, int new)
966 {
967 	int ret;
968 	__atomic_pre_full_fence();
969 	ret = arch_atomic_cmpxchg_relaxed(v, old, new);
970 	__atomic_post_full_fence();
971 	return ret;
972 }
973 #define arch_atomic_cmpxchg arch_atomic_cmpxchg
974 #endif
975 
976 #endif /* arch_atomic_cmpxchg_relaxed */
977 
978 #ifndef arch_atomic_try_cmpxchg_relaxed
979 #ifdef arch_atomic_try_cmpxchg
980 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg
981 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg
982 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg
983 #endif /* arch_atomic_try_cmpxchg */
984 
985 #ifndef arch_atomic_try_cmpxchg
986 static __always_inline bool
arch_atomic_try_cmpxchg(atomic_t * v,int * old,int new)987 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
988 {
989 	int r, o = *old;
990 	r = arch_atomic_cmpxchg(v, o, new);
991 	if (unlikely(r != o))
992 		*old = r;
993 	return likely(r == o);
994 }
995 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
996 #endif
997 
998 #ifndef arch_atomic_try_cmpxchg_acquire
999 static __always_inline bool
arch_atomic_try_cmpxchg_acquire(atomic_t * v,int * old,int new)1000 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1001 {
1002 	int r, o = *old;
1003 	r = arch_atomic_cmpxchg_acquire(v, o, new);
1004 	if (unlikely(r != o))
1005 		*old = r;
1006 	return likely(r == o);
1007 }
1008 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1009 #endif
1010 
1011 #ifndef arch_atomic_try_cmpxchg_release
1012 static __always_inline bool
arch_atomic_try_cmpxchg_release(atomic_t * v,int * old,int new)1013 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1014 {
1015 	int r, o = *old;
1016 	r = arch_atomic_cmpxchg_release(v, o, new);
1017 	if (unlikely(r != o))
1018 		*old = r;
1019 	return likely(r == o);
1020 }
1021 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1022 #endif
1023 
1024 #ifndef arch_atomic_try_cmpxchg_relaxed
1025 static __always_inline bool
arch_atomic_try_cmpxchg_relaxed(atomic_t * v,int * old,int new)1026 arch_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1027 {
1028 	int r, o = *old;
1029 	r = arch_atomic_cmpxchg_relaxed(v, o, new);
1030 	if (unlikely(r != o))
1031 		*old = r;
1032 	return likely(r == o);
1033 }
1034 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed
1035 #endif
1036 
1037 #else /* arch_atomic_try_cmpxchg_relaxed */
1038 
1039 #ifndef arch_atomic_try_cmpxchg_acquire
1040 static __always_inline bool
arch_atomic_try_cmpxchg_acquire(atomic_t * v,int * old,int new)1041 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1042 {
1043 	bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1044 	__atomic_acquire_fence();
1045 	return ret;
1046 }
1047 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1048 #endif
1049 
1050 #ifndef arch_atomic_try_cmpxchg_release
1051 static __always_inline bool
arch_atomic_try_cmpxchg_release(atomic_t * v,int * old,int new)1052 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1053 {
1054 	__atomic_release_fence();
1055 	return arch_atomic_try_cmpxchg_relaxed(v, old, new);
1056 }
1057 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1058 #endif
1059 
1060 #ifndef arch_atomic_try_cmpxchg
1061 static __always_inline bool
arch_atomic_try_cmpxchg(atomic_t * v,int * old,int new)1062 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1063 {
1064 	bool ret;
1065 	__atomic_pre_full_fence();
1066 	ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1067 	__atomic_post_full_fence();
1068 	return ret;
1069 }
1070 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
1071 #endif
1072 
1073 #endif /* arch_atomic_try_cmpxchg_relaxed */
1074 
1075 #ifndef arch_atomic_sub_and_test
1076 /**
1077  * arch_atomic_sub_and_test - subtract value from variable and test result
1078  * @i: integer value to subtract
1079  * @v: pointer of type atomic_t
1080  *
1081  * Atomically subtracts @i from @v and returns
1082  * true if the result is zero, or false for all
1083  * other cases.
1084  */
1085 static __always_inline bool
arch_atomic_sub_and_test(int i,atomic_t * v)1086 arch_atomic_sub_and_test(int i, atomic_t *v)
1087 {
1088 	return arch_atomic_sub_return(i, v) == 0;
1089 }
1090 #define arch_atomic_sub_and_test arch_atomic_sub_and_test
1091 #endif
1092 
1093 #ifndef arch_atomic_dec_and_test
1094 /**
1095  * arch_atomic_dec_and_test - decrement and test
1096  * @v: pointer of type atomic_t
1097  *
1098  * Atomically decrements @v by 1 and
1099  * returns true if the result is 0, or false for all other
1100  * cases.
1101  */
1102 static __always_inline bool
arch_atomic_dec_and_test(atomic_t * v)1103 arch_atomic_dec_and_test(atomic_t *v)
1104 {
1105 	return arch_atomic_dec_return(v) == 0;
1106 }
1107 #define arch_atomic_dec_and_test arch_atomic_dec_and_test
1108 #endif
1109 
1110 #ifndef arch_atomic_inc_and_test
1111 /**
1112  * arch_atomic_inc_and_test - increment and test
1113  * @v: pointer of type atomic_t
1114  *
1115  * Atomically increments @v by 1
1116  * and returns true if the result is zero, or false for all
1117  * other cases.
1118  */
1119 static __always_inline bool
arch_atomic_inc_and_test(atomic_t * v)1120 arch_atomic_inc_and_test(atomic_t *v)
1121 {
1122 	return arch_atomic_inc_return(v) == 0;
1123 }
1124 #define arch_atomic_inc_and_test arch_atomic_inc_and_test
1125 #endif
1126 
1127 #ifndef arch_atomic_add_negative
1128 /**
1129  * arch_atomic_add_negative - add and test if negative
1130  * @i: integer value to add
1131  * @v: pointer of type atomic_t
1132  *
1133  * Atomically adds @i to @v and returns true
1134  * if the result is negative, or false when
1135  * result is greater than or equal to zero.
1136  */
1137 static __always_inline bool
arch_atomic_add_negative(int i,atomic_t * v)1138 arch_atomic_add_negative(int i, atomic_t *v)
1139 {
1140 	return arch_atomic_add_return(i, v) < 0;
1141 }
1142 #define arch_atomic_add_negative arch_atomic_add_negative
1143 #endif
1144 
1145 #ifndef arch_atomic_fetch_add_unless
1146 /**
1147  * arch_atomic_fetch_add_unless - add unless the number is already a given value
1148  * @v: pointer of type atomic_t
1149  * @a: the amount to add to v...
1150  * @u: ...unless v is equal to u.
1151  *
1152  * Atomically adds @a to @v, so long as @v was not already @u.
1153  * Returns original value of @v
1154  */
1155 static __always_inline int
arch_atomic_fetch_add_unless(atomic_t * v,int a,int u)1156 arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
1157 {
1158 	int c = arch_atomic_read(v);
1159 
1160 	do {
1161 		if (unlikely(c == u))
1162 			break;
1163 	} while (!arch_atomic_try_cmpxchg(v, &c, c + a));
1164 
1165 	return c;
1166 }
1167 #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless
1168 #endif
1169 
1170 #ifndef arch_atomic_add_unless
1171 /**
1172  * arch_atomic_add_unless - add unless the number is already a given value
1173  * @v: pointer of type atomic_t
1174  * @a: the amount to add to v...
1175  * @u: ...unless v is equal to u.
1176  *
1177  * Atomically adds @a to @v, if @v was not already @u.
1178  * Returns true if the addition was done.
1179  */
1180 static __always_inline bool
arch_atomic_add_unless(atomic_t * v,int a,int u)1181 arch_atomic_add_unless(atomic_t *v, int a, int u)
1182 {
1183 	return arch_atomic_fetch_add_unless(v, a, u) != u;
1184 }
1185 #define arch_atomic_add_unless arch_atomic_add_unless
1186 #endif
1187 
1188 #ifndef arch_atomic_inc_not_zero
1189 /**
1190  * arch_atomic_inc_not_zero - increment unless the number is zero
1191  * @v: pointer of type atomic_t
1192  *
1193  * Atomically increments @v by 1, if @v is non-zero.
1194  * Returns true if the increment was done.
1195  */
1196 static __always_inline bool
arch_atomic_inc_not_zero(atomic_t * v)1197 arch_atomic_inc_not_zero(atomic_t *v)
1198 {
1199 	return arch_atomic_add_unless(v, 1, 0);
1200 }
1201 #define arch_atomic_inc_not_zero arch_atomic_inc_not_zero
1202 #endif
1203 
1204 #ifndef arch_atomic_inc_unless_negative
1205 static __always_inline bool
arch_atomic_inc_unless_negative(atomic_t * v)1206 arch_atomic_inc_unless_negative(atomic_t *v)
1207 {
1208 	int c = arch_atomic_read(v);
1209 
1210 	do {
1211 		if (unlikely(c < 0))
1212 			return false;
1213 	} while (!arch_atomic_try_cmpxchg(v, &c, c + 1));
1214 
1215 	return true;
1216 }
1217 #define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative
1218 #endif
1219 
1220 #ifndef arch_atomic_dec_unless_positive
1221 static __always_inline bool
arch_atomic_dec_unless_positive(atomic_t * v)1222 arch_atomic_dec_unless_positive(atomic_t *v)
1223 {
1224 	int c = arch_atomic_read(v);
1225 
1226 	do {
1227 		if (unlikely(c > 0))
1228 			return false;
1229 	} while (!arch_atomic_try_cmpxchg(v, &c, c - 1));
1230 
1231 	return true;
1232 }
1233 #define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive
1234 #endif
1235 
1236 #ifndef arch_atomic_dec_if_positive
1237 static __always_inline int
arch_atomic_dec_if_positive(atomic_t * v)1238 arch_atomic_dec_if_positive(atomic_t *v)
1239 {
1240 	int dec, c = arch_atomic_read(v);
1241 
1242 	do {
1243 		dec = c - 1;
1244 		if (unlikely(dec < 0))
1245 			break;
1246 	} while (!arch_atomic_try_cmpxchg(v, &c, dec));
1247 
1248 	return dec;
1249 }
1250 #define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
1251 #endif
1252 
1253 #ifdef CONFIG_GENERIC_ATOMIC64
1254 #include <asm-generic/atomic64.h>
1255 #endif
1256 
1257 #ifndef arch_atomic64_read_acquire
1258 static __always_inline s64
arch_atomic64_read_acquire(const atomic64_t * v)1259 arch_atomic64_read_acquire(const atomic64_t *v)
1260 {
1261 	return smp_load_acquire(&(v)->counter);
1262 }
1263 #define arch_atomic64_read_acquire arch_atomic64_read_acquire
1264 #endif
1265 
1266 #ifndef arch_atomic64_set_release
1267 static __always_inline void
arch_atomic64_set_release(atomic64_t * v,s64 i)1268 arch_atomic64_set_release(atomic64_t *v, s64 i)
1269 {
1270 	smp_store_release(&(v)->counter, i);
1271 }
1272 #define arch_atomic64_set_release arch_atomic64_set_release
1273 #endif
1274 
1275 #ifndef arch_atomic64_add_return_relaxed
1276 #define arch_atomic64_add_return_acquire arch_atomic64_add_return
1277 #define arch_atomic64_add_return_release arch_atomic64_add_return
1278 #define arch_atomic64_add_return_relaxed arch_atomic64_add_return
1279 #else /* arch_atomic64_add_return_relaxed */
1280 
1281 #ifndef arch_atomic64_add_return_acquire
1282 static __always_inline s64
arch_atomic64_add_return_acquire(s64 i,atomic64_t * v)1283 arch_atomic64_add_return_acquire(s64 i, atomic64_t *v)
1284 {
1285 	s64 ret = arch_atomic64_add_return_relaxed(i, v);
1286 	__atomic_acquire_fence();
1287 	return ret;
1288 }
1289 #define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire
1290 #endif
1291 
1292 #ifndef arch_atomic64_add_return_release
1293 static __always_inline s64
arch_atomic64_add_return_release(s64 i,atomic64_t * v)1294 arch_atomic64_add_return_release(s64 i, atomic64_t *v)
1295 {
1296 	__atomic_release_fence();
1297 	return arch_atomic64_add_return_relaxed(i, v);
1298 }
1299 #define arch_atomic64_add_return_release arch_atomic64_add_return_release
1300 #endif
1301 
1302 #ifndef arch_atomic64_add_return
1303 static __always_inline s64
arch_atomic64_add_return(s64 i,atomic64_t * v)1304 arch_atomic64_add_return(s64 i, atomic64_t *v)
1305 {
1306 	s64 ret;
1307 	__atomic_pre_full_fence();
1308 	ret = arch_atomic64_add_return_relaxed(i, v);
1309 	__atomic_post_full_fence();
1310 	return ret;
1311 }
1312 #define arch_atomic64_add_return arch_atomic64_add_return
1313 #endif
1314 
1315 #endif /* arch_atomic64_add_return_relaxed */
1316 
1317 #ifndef arch_atomic64_fetch_add_relaxed
1318 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add
1319 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add
1320 #define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add
1321 #else /* arch_atomic64_fetch_add_relaxed */
1322 
1323 #ifndef arch_atomic64_fetch_add_acquire
1324 static __always_inline s64
arch_atomic64_fetch_add_acquire(s64 i,atomic64_t * v)1325 arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1326 {
1327 	s64 ret = arch_atomic64_fetch_add_relaxed(i, v);
1328 	__atomic_acquire_fence();
1329 	return ret;
1330 }
1331 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire
1332 #endif
1333 
1334 #ifndef arch_atomic64_fetch_add_release
1335 static __always_inline s64
arch_atomic64_fetch_add_release(s64 i,atomic64_t * v)1336 arch_atomic64_fetch_add_release(s64 i, atomic64_t *v)
1337 {
1338 	__atomic_release_fence();
1339 	return arch_atomic64_fetch_add_relaxed(i, v);
1340 }
1341 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release
1342 #endif
1343 
1344 #ifndef arch_atomic64_fetch_add
1345 static __always_inline s64
arch_atomic64_fetch_add(s64 i,atomic64_t * v)1346 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
1347 {
1348 	s64 ret;
1349 	__atomic_pre_full_fence();
1350 	ret = arch_atomic64_fetch_add_relaxed(i, v);
1351 	__atomic_post_full_fence();
1352 	return ret;
1353 }
1354 #define arch_atomic64_fetch_add arch_atomic64_fetch_add
1355 #endif
1356 
1357 #endif /* arch_atomic64_fetch_add_relaxed */
1358 
1359 #ifndef arch_atomic64_sub_return_relaxed
1360 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return
1361 #define arch_atomic64_sub_return_release arch_atomic64_sub_return
1362 #define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return
1363 #else /* arch_atomic64_sub_return_relaxed */
1364 
1365 #ifndef arch_atomic64_sub_return_acquire
1366 static __always_inline s64
arch_atomic64_sub_return_acquire(s64 i,atomic64_t * v)1367 arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1368 {
1369 	s64 ret = arch_atomic64_sub_return_relaxed(i, v);
1370 	__atomic_acquire_fence();
1371 	return ret;
1372 }
1373 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire
1374 #endif
1375 
1376 #ifndef arch_atomic64_sub_return_release
1377 static __always_inline s64
arch_atomic64_sub_return_release(s64 i,atomic64_t * v)1378 arch_atomic64_sub_return_release(s64 i, atomic64_t *v)
1379 {
1380 	__atomic_release_fence();
1381 	return arch_atomic64_sub_return_relaxed(i, v);
1382 }
1383 #define arch_atomic64_sub_return_release arch_atomic64_sub_return_release
1384 #endif
1385 
1386 #ifndef arch_atomic64_sub_return
1387 static __always_inline s64
arch_atomic64_sub_return(s64 i,atomic64_t * v)1388 arch_atomic64_sub_return(s64 i, atomic64_t *v)
1389 {
1390 	s64 ret;
1391 	__atomic_pre_full_fence();
1392 	ret = arch_atomic64_sub_return_relaxed(i, v);
1393 	__atomic_post_full_fence();
1394 	return ret;
1395 }
1396 #define arch_atomic64_sub_return arch_atomic64_sub_return
1397 #endif
1398 
1399 #endif /* arch_atomic64_sub_return_relaxed */
1400 
1401 #ifndef arch_atomic64_fetch_sub_relaxed
1402 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub
1403 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub
1404 #define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub
1405 #else /* arch_atomic64_fetch_sub_relaxed */
1406 
1407 #ifndef arch_atomic64_fetch_sub_acquire
1408 static __always_inline s64
arch_atomic64_fetch_sub_acquire(s64 i,atomic64_t * v)1409 arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1410 {
1411 	s64 ret = arch_atomic64_fetch_sub_relaxed(i, v);
1412 	__atomic_acquire_fence();
1413 	return ret;
1414 }
1415 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire
1416 #endif
1417 
1418 #ifndef arch_atomic64_fetch_sub_release
1419 static __always_inline s64
arch_atomic64_fetch_sub_release(s64 i,atomic64_t * v)1420 arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1421 {
1422 	__atomic_release_fence();
1423 	return arch_atomic64_fetch_sub_relaxed(i, v);
1424 }
1425 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release
1426 #endif
1427 
1428 #ifndef arch_atomic64_fetch_sub
1429 static __always_inline s64
arch_atomic64_fetch_sub(s64 i,atomic64_t * v)1430 arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
1431 {
1432 	s64 ret;
1433 	__atomic_pre_full_fence();
1434 	ret = arch_atomic64_fetch_sub_relaxed(i, v);
1435 	__atomic_post_full_fence();
1436 	return ret;
1437 }
1438 #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
1439 #endif
1440 
1441 #endif /* arch_atomic64_fetch_sub_relaxed */
1442 
1443 #ifndef arch_atomic64_inc
1444 static __always_inline void
arch_atomic64_inc(atomic64_t * v)1445 arch_atomic64_inc(atomic64_t *v)
1446 {
1447 	arch_atomic64_add(1, v);
1448 }
1449 #define arch_atomic64_inc arch_atomic64_inc
1450 #endif
1451 
1452 #ifndef arch_atomic64_inc_return_relaxed
1453 #ifdef arch_atomic64_inc_return
1454 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return
1455 #define arch_atomic64_inc_return_release arch_atomic64_inc_return
1456 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return
1457 #endif /* arch_atomic64_inc_return */
1458 
1459 #ifndef arch_atomic64_inc_return
1460 static __always_inline s64
arch_atomic64_inc_return(atomic64_t * v)1461 arch_atomic64_inc_return(atomic64_t *v)
1462 {
1463 	return arch_atomic64_add_return(1, v);
1464 }
1465 #define arch_atomic64_inc_return arch_atomic64_inc_return
1466 #endif
1467 
1468 #ifndef arch_atomic64_inc_return_acquire
1469 static __always_inline s64
arch_atomic64_inc_return_acquire(atomic64_t * v)1470 arch_atomic64_inc_return_acquire(atomic64_t *v)
1471 {
1472 	return arch_atomic64_add_return_acquire(1, v);
1473 }
1474 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1475 #endif
1476 
1477 #ifndef arch_atomic64_inc_return_release
1478 static __always_inline s64
arch_atomic64_inc_return_release(atomic64_t * v)1479 arch_atomic64_inc_return_release(atomic64_t *v)
1480 {
1481 	return arch_atomic64_add_return_release(1, v);
1482 }
1483 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1484 #endif
1485 
1486 #ifndef arch_atomic64_inc_return_relaxed
1487 static __always_inline s64
arch_atomic64_inc_return_relaxed(atomic64_t * v)1488 arch_atomic64_inc_return_relaxed(atomic64_t *v)
1489 {
1490 	return arch_atomic64_add_return_relaxed(1, v);
1491 }
1492 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed
1493 #endif
1494 
1495 #else /* arch_atomic64_inc_return_relaxed */
1496 
1497 #ifndef arch_atomic64_inc_return_acquire
1498 static __always_inline s64
arch_atomic64_inc_return_acquire(atomic64_t * v)1499 arch_atomic64_inc_return_acquire(atomic64_t *v)
1500 {
1501 	s64 ret = arch_atomic64_inc_return_relaxed(v);
1502 	__atomic_acquire_fence();
1503 	return ret;
1504 }
1505 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1506 #endif
1507 
1508 #ifndef arch_atomic64_inc_return_release
1509 static __always_inline s64
arch_atomic64_inc_return_release(atomic64_t * v)1510 arch_atomic64_inc_return_release(atomic64_t *v)
1511 {
1512 	__atomic_release_fence();
1513 	return arch_atomic64_inc_return_relaxed(v);
1514 }
1515 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1516 #endif
1517 
1518 #ifndef arch_atomic64_inc_return
1519 static __always_inline s64
arch_atomic64_inc_return(atomic64_t * v)1520 arch_atomic64_inc_return(atomic64_t *v)
1521 {
1522 	s64 ret;
1523 	__atomic_pre_full_fence();
1524 	ret = arch_atomic64_inc_return_relaxed(v);
1525 	__atomic_post_full_fence();
1526 	return ret;
1527 }
1528 #define arch_atomic64_inc_return arch_atomic64_inc_return
1529 #endif
1530 
1531 #endif /* arch_atomic64_inc_return_relaxed */
1532 
1533 #ifndef arch_atomic64_fetch_inc_relaxed
1534 #ifdef arch_atomic64_fetch_inc
1535 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc
1536 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc
1537 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc
1538 #endif /* arch_atomic64_fetch_inc */
1539 
1540 #ifndef arch_atomic64_fetch_inc
1541 static __always_inline s64
arch_atomic64_fetch_inc(atomic64_t * v)1542 arch_atomic64_fetch_inc(atomic64_t *v)
1543 {
1544 	return arch_atomic64_fetch_add(1, v);
1545 }
1546 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1547 #endif
1548 
1549 #ifndef arch_atomic64_fetch_inc_acquire
1550 static __always_inline s64
arch_atomic64_fetch_inc_acquire(atomic64_t * v)1551 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1552 {
1553 	return arch_atomic64_fetch_add_acquire(1, v);
1554 }
1555 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1556 #endif
1557 
1558 #ifndef arch_atomic64_fetch_inc_release
1559 static __always_inline s64
arch_atomic64_fetch_inc_release(atomic64_t * v)1560 arch_atomic64_fetch_inc_release(atomic64_t *v)
1561 {
1562 	return arch_atomic64_fetch_add_release(1, v);
1563 }
1564 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1565 #endif
1566 
1567 #ifndef arch_atomic64_fetch_inc_relaxed
1568 static __always_inline s64
arch_atomic64_fetch_inc_relaxed(atomic64_t * v)1569 arch_atomic64_fetch_inc_relaxed(atomic64_t *v)
1570 {
1571 	return arch_atomic64_fetch_add_relaxed(1, v);
1572 }
1573 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed
1574 #endif
1575 
1576 #else /* arch_atomic64_fetch_inc_relaxed */
1577 
1578 #ifndef arch_atomic64_fetch_inc_acquire
1579 static __always_inline s64
arch_atomic64_fetch_inc_acquire(atomic64_t * v)1580 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1581 {
1582 	s64 ret = arch_atomic64_fetch_inc_relaxed(v);
1583 	__atomic_acquire_fence();
1584 	return ret;
1585 }
1586 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1587 #endif
1588 
1589 #ifndef arch_atomic64_fetch_inc_release
1590 static __always_inline s64
arch_atomic64_fetch_inc_release(atomic64_t * v)1591 arch_atomic64_fetch_inc_release(atomic64_t *v)
1592 {
1593 	__atomic_release_fence();
1594 	return arch_atomic64_fetch_inc_relaxed(v);
1595 }
1596 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1597 #endif
1598 
1599 #ifndef arch_atomic64_fetch_inc
1600 static __always_inline s64
arch_atomic64_fetch_inc(atomic64_t * v)1601 arch_atomic64_fetch_inc(atomic64_t *v)
1602 {
1603 	s64 ret;
1604 	__atomic_pre_full_fence();
1605 	ret = arch_atomic64_fetch_inc_relaxed(v);
1606 	__atomic_post_full_fence();
1607 	return ret;
1608 }
1609 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1610 #endif
1611 
1612 #endif /* arch_atomic64_fetch_inc_relaxed */
1613 
1614 #ifndef arch_atomic64_dec
1615 static __always_inline void
arch_atomic64_dec(atomic64_t * v)1616 arch_atomic64_dec(atomic64_t *v)
1617 {
1618 	arch_atomic64_sub(1, v);
1619 }
1620 #define arch_atomic64_dec arch_atomic64_dec
1621 #endif
1622 
1623 #ifndef arch_atomic64_dec_return_relaxed
1624 #ifdef arch_atomic64_dec_return
1625 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return
1626 #define arch_atomic64_dec_return_release arch_atomic64_dec_return
1627 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return
1628 #endif /* arch_atomic64_dec_return */
1629 
1630 #ifndef arch_atomic64_dec_return
1631 static __always_inline s64
arch_atomic64_dec_return(atomic64_t * v)1632 arch_atomic64_dec_return(atomic64_t *v)
1633 {
1634 	return arch_atomic64_sub_return(1, v);
1635 }
1636 #define arch_atomic64_dec_return arch_atomic64_dec_return
1637 #endif
1638 
1639 #ifndef arch_atomic64_dec_return_acquire
1640 static __always_inline s64
arch_atomic64_dec_return_acquire(atomic64_t * v)1641 arch_atomic64_dec_return_acquire(atomic64_t *v)
1642 {
1643 	return arch_atomic64_sub_return_acquire(1, v);
1644 }
1645 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1646 #endif
1647 
1648 #ifndef arch_atomic64_dec_return_release
1649 static __always_inline s64
arch_atomic64_dec_return_release(atomic64_t * v)1650 arch_atomic64_dec_return_release(atomic64_t *v)
1651 {
1652 	return arch_atomic64_sub_return_release(1, v);
1653 }
1654 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1655 #endif
1656 
1657 #ifndef arch_atomic64_dec_return_relaxed
1658 static __always_inline s64
arch_atomic64_dec_return_relaxed(atomic64_t * v)1659 arch_atomic64_dec_return_relaxed(atomic64_t *v)
1660 {
1661 	return arch_atomic64_sub_return_relaxed(1, v);
1662 }
1663 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed
1664 #endif
1665 
1666 #else /* arch_atomic64_dec_return_relaxed */
1667 
1668 #ifndef arch_atomic64_dec_return_acquire
1669 static __always_inline s64
arch_atomic64_dec_return_acquire(atomic64_t * v)1670 arch_atomic64_dec_return_acquire(atomic64_t *v)
1671 {
1672 	s64 ret = arch_atomic64_dec_return_relaxed(v);
1673 	__atomic_acquire_fence();
1674 	return ret;
1675 }
1676 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1677 #endif
1678 
1679 #ifndef arch_atomic64_dec_return_release
1680 static __always_inline s64
arch_atomic64_dec_return_release(atomic64_t * v)1681 arch_atomic64_dec_return_release(atomic64_t *v)
1682 {
1683 	__atomic_release_fence();
1684 	return arch_atomic64_dec_return_relaxed(v);
1685 }
1686 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1687 #endif
1688 
1689 #ifndef arch_atomic64_dec_return
1690 static __always_inline s64
arch_atomic64_dec_return(atomic64_t * v)1691 arch_atomic64_dec_return(atomic64_t *v)
1692 {
1693 	s64 ret;
1694 	__atomic_pre_full_fence();
1695 	ret = arch_atomic64_dec_return_relaxed(v);
1696 	__atomic_post_full_fence();
1697 	return ret;
1698 }
1699 #define arch_atomic64_dec_return arch_atomic64_dec_return
1700 #endif
1701 
1702 #endif /* arch_atomic64_dec_return_relaxed */
1703 
1704 #ifndef arch_atomic64_fetch_dec_relaxed
1705 #ifdef arch_atomic64_fetch_dec
1706 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec
1707 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec
1708 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec
1709 #endif /* arch_atomic64_fetch_dec */
1710 
1711 #ifndef arch_atomic64_fetch_dec
1712 static __always_inline s64
arch_atomic64_fetch_dec(atomic64_t * v)1713 arch_atomic64_fetch_dec(atomic64_t *v)
1714 {
1715 	return arch_atomic64_fetch_sub(1, v);
1716 }
1717 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1718 #endif
1719 
1720 #ifndef arch_atomic64_fetch_dec_acquire
1721 static __always_inline s64
arch_atomic64_fetch_dec_acquire(atomic64_t * v)1722 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1723 {
1724 	return arch_atomic64_fetch_sub_acquire(1, v);
1725 }
1726 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1727 #endif
1728 
1729 #ifndef arch_atomic64_fetch_dec_release
1730 static __always_inline s64
arch_atomic64_fetch_dec_release(atomic64_t * v)1731 arch_atomic64_fetch_dec_release(atomic64_t *v)
1732 {
1733 	return arch_atomic64_fetch_sub_release(1, v);
1734 }
1735 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1736 #endif
1737 
1738 #ifndef arch_atomic64_fetch_dec_relaxed
1739 static __always_inline s64
arch_atomic64_fetch_dec_relaxed(atomic64_t * v)1740 arch_atomic64_fetch_dec_relaxed(atomic64_t *v)
1741 {
1742 	return arch_atomic64_fetch_sub_relaxed(1, v);
1743 }
1744 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed
1745 #endif
1746 
1747 #else /* arch_atomic64_fetch_dec_relaxed */
1748 
1749 #ifndef arch_atomic64_fetch_dec_acquire
1750 static __always_inline s64
arch_atomic64_fetch_dec_acquire(atomic64_t * v)1751 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1752 {
1753 	s64 ret = arch_atomic64_fetch_dec_relaxed(v);
1754 	__atomic_acquire_fence();
1755 	return ret;
1756 }
1757 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1758 #endif
1759 
1760 #ifndef arch_atomic64_fetch_dec_release
1761 static __always_inline s64
arch_atomic64_fetch_dec_release(atomic64_t * v)1762 arch_atomic64_fetch_dec_release(atomic64_t *v)
1763 {
1764 	__atomic_release_fence();
1765 	return arch_atomic64_fetch_dec_relaxed(v);
1766 }
1767 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1768 #endif
1769 
1770 #ifndef arch_atomic64_fetch_dec
1771 static __always_inline s64
arch_atomic64_fetch_dec(atomic64_t * v)1772 arch_atomic64_fetch_dec(atomic64_t *v)
1773 {
1774 	s64 ret;
1775 	__atomic_pre_full_fence();
1776 	ret = arch_atomic64_fetch_dec_relaxed(v);
1777 	__atomic_post_full_fence();
1778 	return ret;
1779 }
1780 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1781 #endif
1782 
1783 #endif /* arch_atomic64_fetch_dec_relaxed */
1784 
1785 #ifndef arch_atomic64_fetch_and_relaxed
1786 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and
1787 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and
1788 #define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and
1789 #else /* arch_atomic64_fetch_and_relaxed */
1790 
1791 #ifndef arch_atomic64_fetch_and_acquire
1792 static __always_inline s64
arch_atomic64_fetch_and_acquire(s64 i,atomic64_t * v)1793 arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1794 {
1795 	s64 ret = arch_atomic64_fetch_and_relaxed(i, v);
1796 	__atomic_acquire_fence();
1797 	return ret;
1798 }
1799 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire
1800 #endif
1801 
1802 #ifndef arch_atomic64_fetch_and_release
1803 static __always_inline s64
arch_atomic64_fetch_and_release(s64 i,atomic64_t * v)1804 arch_atomic64_fetch_and_release(s64 i, atomic64_t *v)
1805 {
1806 	__atomic_release_fence();
1807 	return arch_atomic64_fetch_and_relaxed(i, v);
1808 }
1809 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release
1810 #endif
1811 
1812 #ifndef arch_atomic64_fetch_and
1813 static __always_inline s64
arch_atomic64_fetch_and(s64 i,atomic64_t * v)1814 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
1815 {
1816 	s64 ret;
1817 	__atomic_pre_full_fence();
1818 	ret = arch_atomic64_fetch_and_relaxed(i, v);
1819 	__atomic_post_full_fence();
1820 	return ret;
1821 }
1822 #define arch_atomic64_fetch_and arch_atomic64_fetch_and
1823 #endif
1824 
1825 #endif /* arch_atomic64_fetch_and_relaxed */
1826 
1827 #ifndef arch_atomic64_andnot
1828 static __always_inline void
arch_atomic64_andnot(s64 i,atomic64_t * v)1829 arch_atomic64_andnot(s64 i, atomic64_t *v)
1830 {
1831 	arch_atomic64_and(~i, v);
1832 }
1833 #define arch_atomic64_andnot arch_atomic64_andnot
1834 #endif
1835 
1836 #ifndef arch_atomic64_fetch_andnot_relaxed
1837 #ifdef arch_atomic64_fetch_andnot
1838 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot
1839 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot
1840 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot
1841 #endif /* arch_atomic64_fetch_andnot */
1842 
1843 #ifndef arch_atomic64_fetch_andnot
1844 static __always_inline s64
arch_atomic64_fetch_andnot(s64 i,atomic64_t * v)1845 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
1846 {
1847 	return arch_atomic64_fetch_and(~i, v);
1848 }
1849 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
1850 #endif
1851 
1852 #ifndef arch_atomic64_fetch_andnot_acquire
1853 static __always_inline s64
arch_atomic64_fetch_andnot_acquire(s64 i,atomic64_t * v)1854 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1855 {
1856 	return arch_atomic64_fetch_and_acquire(~i, v);
1857 }
1858 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1859 #endif
1860 
1861 #ifndef arch_atomic64_fetch_andnot_release
1862 static __always_inline s64
arch_atomic64_fetch_andnot_release(s64 i,atomic64_t * v)1863 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1864 {
1865 	return arch_atomic64_fetch_and_release(~i, v);
1866 }
1867 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
1868 #endif
1869 
1870 #ifndef arch_atomic64_fetch_andnot_relaxed
1871 static __always_inline s64
arch_atomic64_fetch_andnot_relaxed(s64 i,atomic64_t * v)1872 arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1873 {
1874 	return arch_atomic64_fetch_and_relaxed(~i, v);
1875 }
1876 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed
1877 #endif
1878 
1879 #else /* arch_atomic64_fetch_andnot_relaxed */
1880 
1881 #ifndef arch_atomic64_fetch_andnot_acquire
1882 static __always_inline s64
arch_atomic64_fetch_andnot_acquire(s64 i,atomic64_t * v)1883 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1884 {
1885 	s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
1886 	__atomic_acquire_fence();
1887 	return ret;
1888 }
1889 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1890 #endif
1891 
1892 #ifndef arch_atomic64_fetch_andnot_release
1893 static __always_inline s64
arch_atomic64_fetch_andnot_release(s64 i,atomic64_t * v)1894 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1895 {
1896 	__atomic_release_fence();
1897 	return arch_atomic64_fetch_andnot_relaxed(i, v);
1898 }
1899 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
1900 #endif
1901 
1902 #ifndef arch_atomic64_fetch_andnot
1903 static __always_inline s64
arch_atomic64_fetch_andnot(s64 i,atomic64_t * v)1904 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
1905 {
1906 	s64 ret;
1907 	__atomic_pre_full_fence();
1908 	ret = arch_atomic64_fetch_andnot_relaxed(i, v);
1909 	__atomic_post_full_fence();
1910 	return ret;
1911 }
1912 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
1913 #endif
1914 
1915 #endif /* arch_atomic64_fetch_andnot_relaxed */
1916 
1917 #ifndef arch_atomic64_fetch_or_relaxed
1918 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or
1919 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or
1920 #define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or
1921 #else /* arch_atomic64_fetch_or_relaxed */
1922 
1923 #ifndef arch_atomic64_fetch_or_acquire
1924 static __always_inline s64
arch_atomic64_fetch_or_acquire(s64 i,atomic64_t * v)1925 arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1926 {
1927 	s64 ret = arch_atomic64_fetch_or_relaxed(i, v);
1928 	__atomic_acquire_fence();
1929 	return ret;
1930 }
1931 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire
1932 #endif
1933 
1934 #ifndef arch_atomic64_fetch_or_release
1935 static __always_inline s64
arch_atomic64_fetch_or_release(s64 i,atomic64_t * v)1936 arch_atomic64_fetch_or_release(s64 i, atomic64_t *v)
1937 {
1938 	__atomic_release_fence();
1939 	return arch_atomic64_fetch_or_relaxed(i, v);
1940 }
1941 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release
1942 #endif
1943 
1944 #ifndef arch_atomic64_fetch_or
1945 static __always_inline s64
arch_atomic64_fetch_or(s64 i,atomic64_t * v)1946 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
1947 {
1948 	s64 ret;
1949 	__atomic_pre_full_fence();
1950 	ret = arch_atomic64_fetch_or_relaxed(i, v);
1951 	__atomic_post_full_fence();
1952 	return ret;
1953 }
1954 #define arch_atomic64_fetch_or arch_atomic64_fetch_or
1955 #endif
1956 
1957 #endif /* arch_atomic64_fetch_or_relaxed */
1958 
1959 #ifndef arch_atomic64_fetch_xor_relaxed
1960 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor
1961 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor
1962 #define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor
1963 #else /* arch_atomic64_fetch_xor_relaxed */
1964 
1965 #ifndef arch_atomic64_fetch_xor_acquire
1966 static __always_inline s64
arch_atomic64_fetch_xor_acquire(s64 i,atomic64_t * v)1967 arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1968 {
1969 	s64 ret = arch_atomic64_fetch_xor_relaxed(i, v);
1970 	__atomic_acquire_fence();
1971 	return ret;
1972 }
1973 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire
1974 #endif
1975 
1976 #ifndef arch_atomic64_fetch_xor_release
1977 static __always_inline s64
arch_atomic64_fetch_xor_release(s64 i,atomic64_t * v)1978 arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1979 {
1980 	__atomic_release_fence();
1981 	return arch_atomic64_fetch_xor_relaxed(i, v);
1982 }
1983 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
1984 #endif
1985 
1986 #ifndef arch_atomic64_fetch_xor
1987 static __always_inline s64
arch_atomic64_fetch_xor(s64 i,atomic64_t * v)1988 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
1989 {
1990 	s64 ret;
1991 	__atomic_pre_full_fence();
1992 	ret = arch_atomic64_fetch_xor_relaxed(i, v);
1993 	__atomic_post_full_fence();
1994 	return ret;
1995 }
1996 #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
1997 #endif
1998 
1999 #endif /* arch_atomic64_fetch_xor_relaxed */
2000 
2001 #ifndef arch_atomic64_xchg_relaxed
2002 #define arch_atomic64_xchg_acquire arch_atomic64_xchg
2003 #define arch_atomic64_xchg_release arch_atomic64_xchg
2004 #define arch_atomic64_xchg_relaxed arch_atomic64_xchg
2005 #else /* arch_atomic64_xchg_relaxed */
2006 
2007 #ifndef arch_atomic64_xchg_acquire
2008 static __always_inline s64
arch_atomic64_xchg_acquire(atomic64_t * v,s64 i)2009 arch_atomic64_xchg_acquire(atomic64_t *v, s64 i)
2010 {
2011 	s64 ret = arch_atomic64_xchg_relaxed(v, i);
2012 	__atomic_acquire_fence();
2013 	return ret;
2014 }
2015 #define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire
2016 #endif
2017 
2018 #ifndef arch_atomic64_xchg_release
2019 static __always_inline s64
arch_atomic64_xchg_release(atomic64_t * v,s64 i)2020 arch_atomic64_xchg_release(atomic64_t *v, s64 i)
2021 {
2022 	__atomic_release_fence();
2023 	return arch_atomic64_xchg_relaxed(v, i);
2024 }
2025 #define arch_atomic64_xchg_release arch_atomic64_xchg_release
2026 #endif
2027 
2028 #ifndef arch_atomic64_xchg
2029 static __always_inline s64
arch_atomic64_xchg(atomic64_t * v,s64 i)2030 arch_atomic64_xchg(atomic64_t *v, s64 i)
2031 {
2032 	s64 ret;
2033 	__atomic_pre_full_fence();
2034 	ret = arch_atomic64_xchg_relaxed(v, i);
2035 	__atomic_post_full_fence();
2036 	return ret;
2037 }
2038 #define arch_atomic64_xchg arch_atomic64_xchg
2039 #endif
2040 
2041 #endif /* arch_atomic64_xchg_relaxed */
2042 
2043 #ifndef arch_atomic64_cmpxchg_relaxed
2044 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg
2045 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg
2046 #define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg
2047 #else /* arch_atomic64_cmpxchg_relaxed */
2048 
2049 #ifndef arch_atomic64_cmpxchg_acquire
2050 static __always_inline s64
arch_atomic64_cmpxchg_acquire(atomic64_t * v,s64 old,s64 new)2051 arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2052 {
2053 	s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2054 	__atomic_acquire_fence();
2055 	return ret;
2056 }
2057 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire
2058 #endif
2059 
2060 #ifndef arch_atomic64_cmpxchg_release
2061 static __always_inline s64
arch_atomic64_cmpxchg_release(atomic64_t * v,s64 old,s64 new)2062 arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2063 {
2064 	__atomic_release_fence();
2065 	return arch_atomic64_cmpxchg_relaxed(v, old, new);
2066 }
2067 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release
2068 #endif
2069 
2070 #ifndef arch_atomic64_cmpxchg
2071 static __always_inline s64
arch_atomic64_cmpxchg(atomic64_t * v,s64 old,s64 new)2072 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2073 {
2074 	s64 ret;
2075 	__atomic_pre_full_fence();
2076 	ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2077 	__atomic_post_full_fence();
2078 	return ret;
2079 }
2080 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
2081 #endif
2082 
2083 #endif /* arch_atomic64_cmpxchg_relaxed */
2084 
2085 #ifndef arch_atomic64_try_cmpxchg_relaxed
2086 #ifdef arch_atomic64_try_cmpxchg
2087 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg
2088 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg
2089 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg
2090 #endif /* arch_atomic64_try_cmpxchg */
2091 
2092 #ifndef arch_atomic64_try_cmpxchg
2093 static __always_inline bool
arch_atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)2094 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2095 {
2096 	s64 r, o = *old;
2097 	r = arch_atomic64_cmpxchg(v, o, new);
2098 	if (unlikely(r != o))
2099 		*old = r;
2100 	return likely(r == o);
2101 }
2102 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2103 #endif
2104 
2105 #ifndef arch_atomic64_try_cmpxchg_acquire
2106 static __always_inline bool
arch_atomic64_try_cmpxchg_acquire(atomic64_t * v,s64 * old,s64 new)2107 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2108 {
2109 	s64 r, o = *old;
2110 	r = arch_atomic64_cmpxchg_acquire(v, o, new);
2111 	if (unlikely(r != o))
2112 		*old = r;
2113 	return likely(r == o);
2114 }
2115 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2116 #endif
2117 
2118 #ifndef arch_atomic64_try_cmpxchg_release
2119 static __always_inline bool
arch_atomic64_try_cmpxchg_release(atomic64_t * v,s64 * old,s64 new)2120 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2121 {
2122 	s64 r, o = *old;
2123 	r = arch_atomic64_cmpxchg_release(v, o, new);
2124 	if (unlikely(r != o))
2125 		*old = r;
2126 	return likely(r == o);
2127 }
2128 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2129 #endif
2130 
2131 #ifndef arch_atomic64_try_cmpxchg_relaxed
2132 static __always_inline bool
arch_atomic64_try_cmpxchg_relaxed(atomic64_t * v,s64 * old,s64 new)2133 arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2134 {
2135 	s64 r, o = *old;
2136 	r = arch_atomic64_cmpxchg_relaxed(v, o, new);
2137 	if (unlikely(r != o))
2138 		*old = r;
2139 	return likely(r == o);
2140 }
2141 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed
2142 #endif
2143 
2144 #else /* arch_atomic64_try_cmpxchg_relaxed */
2145 
2146 #ifndef arch_atomic64_try_cmpxchg_acquire
2147 static __always_inline bool
arch_atomic64_try_cmpxchg_acquire(atomic64_t * v,s64 * old,s64 new)2148 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2149 {
2150 	bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2151 	__atomic_acquire_fence();
2152 	return ret;
2153 }
2154 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2155 #endif
2156 
2157 #ifndef arch_atomic64_try_cmpxchg_release
2158 static __always_inline bool
arch_atomic64_try_cmpxchg_release(atomic64_t * v,s64 * old,s64 new)2159 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2160 {
2161 	__atomic_release_fence();
2162 	return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2163 }
2164 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2165 #endif
2166 
2167 #ifndef arch_atomic64_try_cmpxchg
2168 static __always_inline bool
arch_atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)2169 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2170 {
2171 	bool ret;
2172 	__atomic_pre_full_fence();
2173 	ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2174 	__atomic_post_full_fence();
2175 	return ret;
2176 }
2177 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2178 #endif
2179 
2180 #endif /* arch_atomic64_try_cmpxchg_relaxed */
2181 
2182 #ifndef arch_atomic64_sub_and_test
2183 /**
2184  * arch_atomic64_sub_and_test - subtract value from variable and test result
2185  * @i: integer value to subtract
2186  * @v: pointer of type atomic64_t
2187  *
2188  * Atomically subtracts @i from @v and returns
2189  * true if the result is zero, or false for all
2190  * other cases.
2191  */
2192 static __always_inline bool
arch_atomic64_sub_and_test(s64 i,atomic64_t * v)2193 arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
2194 {
2195 	return arch_atomic64_sub_return(i, v) == 0;
2196 }
2197 #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
2198 #endif
2199 
2200 #ifndef arch_atomic64_dec_and_test
2201 /**
2202  * arch_atomic64_dec_and_test - decrement and test
2203  * @v: pointer of type atomic64_t
2204  *
2205  * Atomically decrements @v by 1 and
2206  * returns true if the result is 0, or false for all other
2207  * cases.
2208  */
2209 static __always_inline bool
arch_atomic64_dec_and_test(atomic64_t * v)2210 arch_atomic64_dec_and_test(atomic64_t *v)
2211 {
2212 	return arch_atomic64_dec_return(v) == 0;
2213 }
2214 #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
2215 #endif
2216 
2217 #ifndef arch_atomic64_inc_and_test
2218 /**
2219  * arch_atomic64_inc_and_test - increment and test
2220  * @v: pointer of type atomic64_t
2221  *
2222  * Atomically increments @v by 1
2223  * and returns true if the result is zero, or false for all
2224  * other cases.
2225  */
2226 static __always_inline bool
arch_atomic64_inc_and_test(atomic64_t * v)2227 arch_atomic64_inc_and_test(atomic64_t *v)
2228 {
2229 	return arch_atomic64_inc_return(v) == 0;
2230 }
2231 #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
2232 #endif
2233 
2234 #ifndef arch_atomic64_add_negative
2235 /**
2236  * arch_atomic64_add_negative - add and test if negative
2237  * @i: integer value to add
2238  * @v: pointer of type atomic64_t
2239  *
2240  * Atomically adds @i to @v and returns true
2241  * if the result is negative, or false when
2242  * result is greater than or equal to zero.
2243  */
2244 static __always_inline bool
arch_atomic64_add_negative(s64 i,atomic64_t * v)2245 arch_atomic64_add_negative(s64 i, atomic64_t *v)
2246 {
2247 	return arch_atomic64_add_return(i, v) < 0;
2248 }
2249 #define arch_atomic64_add_negative arch_atomic64_add_negative
2250 #endif
2251 
2252 #ifndef arch_atomic64_fetch_add_unless
2253 /**
2254  * arch_atomic64_fetch_add_unless - add unless the number is already a given value
2255  * @v: pointer of type atomic64_t
2256  * @a: the amount to add to v...
2257  * @u: ...unless v is equal to u.
2258  *
2259  * Atomically adds @a to @v, so long as @v was not already @u.
2260  * Returns original value of @v
2261  */
2262 static __always_inline s64
arch_atomic64_fetch_add_unless(atomic64_t * v,s64 a,s64 u)2263 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2264 {
2265 	s64 c = arch_atomic64_read(v);
2266 
2267 	do {
2268 		if (unlikely(c == u))
2269 			break;
2270 	} while (!arch_atomic64_try_cmpxchg(v, &c, c + a));
2271 
2272 	return c;
2273 }
2274 #define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless
2275 #endif
2276 
2277 #ifndef arch_atomic64_add_unless
2278 /**
2279  * arch_atomic64_add_unless - add unless the number is already a given value
2280  * @v: pointer of type atomic64_t
2281  * @a: the amount to add to v...
2282  * @u: ...unless v is equal to u.
2283  *
2284  * Atomically adds @a to @v, if @v was not already @u.
2285  * Returns true if the addition was done.
2286  */
2287 static __always_inline bool
arch_atomic64_add_unless(atomic64_t * v,s64 a,s64 u)2288 arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2289 {
2290 	return arch_atomic64_fetch_add_unless(v, a, u) != u;
2291 }
2292 #define arch_atomic64_add_unless arch_atomic64_add_unless
2293 #endif
2294 
2295 #ifndef arch_atomic64_inc_not_zero
2296 /**
2297  * arch_atomic64_inc_not_zero - increment unless the number is zero
2298  * @v: pointer of type atomic64_t
2299  *
2300  * Atomically increments @v by 1, if @v is non-zero.
2301  * Returns true if the increment was done.
2302  */
2303 static __always_inline bool
arch_atomic64_inc_not_zero(atomic64_t * v)2304 arch_atomic64_inc_not_zero(atomic64_t *v)
2305 {
2306 	return arch_atomic64_add_unless(v, 1, 0);
2307 }
2308 #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
2309 #endif
2310 
2311 #ifndef arch_atomic64_inc_unless_negative
2312 static __always_inline bool
arch_atomic64_inc_unless_negative(atomic64_t * v)2313 arch_atomic64_inc_unless_negative(atomic64_t *v)
2314 {
2315 	s64 c = arch_atomic64_read(v);
2316 
2317 	do {
2318 		if (unlikely(c < 0))
2319 			return false;
2320 	} while (!arch_atomic64_try_cmpxchg(v, &c, c + 1));
2321 
2322 	return true;
2323 }
2324 #define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative
2325 #endif
2326 
2327 #ifndef arch_atomic64_dec_unless_positive
2328 static __always_inline bool
arch_atomic64_dec_unless_positive(atomic64_t * v)2329 arch_atomic64_dec_unless_positive(atomic64_t *v)
2330 {
2331 	s64 c = arch_atomic64_read(v);
2332 
2333 	do {
2334 		if (unlikely(c > 0))
2335 			return false;
2336 	} while (!arch_atomic64_try_cmpxchg(v, &c, c - 1));
2337 
2338 	return true;
2339 }
2340 #define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive
2341 #endif
2342 
2343 #ifndef arch_atomic64_dec_if_positive
2344 static __always_inline s64
arch_atomic64_dec_if_positive(atomic64_t * v)2345 arch_atomic64_dec_if_positive(atomic64_t *v)
2346 {
2347 	s64 dec, c = arch_atomic64_read(v);
2348 
2349 	do {
2350 		dec = c - 1;
2351 		if (unlikely(dec < 0))
2352 			break;
2353 	} while (!arch_atomic64_try_cmpxchg(v, &c, dec));
2354 
2355 	return dec;
2356 }
2357 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
2358 #endif
2359 
2360 #endif /* _LINUX_ATOMIC_FALLBACK_H */
2361 // cca554917d7ea73d5e3e7397dd70c484cad9b2c4
2362