1 // SPDX-License-Identifier: GPL-2.0
2
3 // Generated by scripts/atomic/gen-atomic-fallback.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
5
6 #ifndef _LINUX_ATOMIC_FALLBACK_H
7 #define _LINUX_ATOMIC_FALLBACK_H
8
9 #include <linux/compiler.h>
10
11 #ifndef arch_xchg_relaxed
12 #define arch_xchg_acquire arch_xchg
13 #define arch_xchg_release arch_xchg
14 #define arch_xchg_relaxed arch_xchg
15 #else /* arch_xchg_relaxed */
16
17 #ifndef arch_xchg_acquire
18 #define arch_xchg_acquire(...) \
19 __atomic_op_acquire(arch_xchg, __VA_ARGS__)
20 #endif
21
22 #ifndef arch_xchg_release
23 #define arch_xchg_release(...) \
24 __atomic_op_release(arch_xchg, __VA_ARGS__)
25 #endif
26
27 #ifndef arch_xchg
28 #define arch_xchg(...) \
29 __atomic_op_fence(arch_xchg, __VA_ARGS__)
30 #endif
31
32 #endif /* arch_xchg_relaxed */
33
34 #ifndef arch_cmpxchg_relaxed
35 #define arch_cmpxchg_acquire arch_cmpxchg
36 #define arch_cmpxchg_release arch_cmpxchg
37 #define arch_cmpxchg_relaxed arch_cmpxchg
38 #else /* arch_cmpxchg_relaxed */
39
40 #ifndef arch_cmpxchg_acquire
41 #define arch_cmpxchg_acquire(...) \
42 __atomic_op_acquire(arch_cmpxchg, __VA_ARGS__)
43 #endif
44
45 #ifndef arch_cmpxchg_release
46 #define arch_cmpxchg_release(...) \
47 __atomic_op_release(arch_cmpxchg, __VA_ARGS__)
48 #endif
49
50 #ifndef arch_cmpxchg
51 #define arch_cmpxchg(...) \
52 __atomic_op_fence(arch_cmpxchg, __VA_ARGS__)
53 #endif
54
55 #endif /* arch_cmpxchg_relaxed */
56
57 #ifndef arch_cmpxchg64_relaxed
58 #define arch_cmpxchg64_acquire arch_cmpxchg64
59 #define arch_cmpxchg64_release arch_cmpxchg64
60 #define arch_cmpxchg64_relaxed arch_cmpxchg64
61 #else /* arch_cmpxchg64_relaxed */
62
63 #ifndef arch_cmpxchg64_acquire
64 #define arch_cmpxchg64_acquire(...) \
65 __atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__)
66 #endif
67
68 #ifndef arch_cmpxchg64_release
69 #define arch_cmpxchg64_release(...) \
70 __atomic_op_release(arch_cmpxchg64, __VA_ARGS__)
71 #endif
72
73 #ifndef arch_cmpxchg64
74 #define arch_cmpxchg64(...) \
75 __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__)
76 #endif
77
78 #endif /* arch_cmpxchg64_relaxed */
79
80 #ifndef arch_try_cmpxchg_relaxed
81 #ifdef arch_try_cmpxchg
82 #define arch_try_cmpxchg_acquire arch_try_cmpxchg
83 #define arch_try_cmpxchg_release arch_try_cmpxchg
84 #define arch_try_cmpxchg_relaxed arch_try_cmpxchg
85 #endif /* arch_try_cmpxchg */
86
87 #ifndef arch_try_cmpxchg
88 #define arch_try_cmpxchg(_ptr, _oldp, _new) \
89 ({ \
90 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
91 ___r = arch_cmpxchg((_ptr), ___o, (_new)); \
92 if (unlikely(___r != ___o)) \
93 *___op = ___r; \
94 likely(___r == ___o); \
95 })
96 #endif /* arch_try_cmpxchg */
97
98 #ifndef arch_try_cmpxchg_acquire
99 #define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \
100 ({ \
101 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
102 ___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \
103 if (unlikely(___r != ___o)) \
104 *___op = ___r; \
105 likely(___r == ___o); \
106 })
107 #endif /* arch_try_cmpxchg_acquire */
108
109 #ifndef arch_try_cmpxchg_release
110 #define arch_try_cmpxchg_release(_ptr, _oldp, _new) \
111 ({ \
112 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
113 ___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \
114 if (unlikely(___r != ___o)) \
115 *___op = ___r; \
116 likely(___r == ___o); \
117 })
118 #endif /* arch_try_cmpxchg_release */
119
120 #ifndef arch_try_cmpxchg_relaxed
121 #define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \
122 ({ \
123 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
124 ___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \
125 if (unlikely(___r != ___o)) \
126 *___op = ___r; \
127 likely(___r == ___o); \
128 })
129 #endif /* arch_try_cmpxchg_relaxed */
130
131 #else /* arch_try_cmpxchg_relaxed */
132
133 #ifndef arch_try_cmpxchg_acquire
134 #define arch_try_cmpxchg_acquire(...) \
135 __atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__)
136 #endif
137
138 #ifndef arch_try_cmpxchg_release
139 #define arch_try_cmpxchg_release(...) \
140 __atomic_op_release(arch_try_cmpxchg, __VA_ARGS__)
141 #endif
142
143 #ifndef arch_try_cmpxchg
144 #define arch_try_cmpxchg(...) \
145 __atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__)
146 #endif
147
148 #endif /* arch_try_cmpxchg_relaxed */
149
150 #ifndef arch_atomic_read_acquire
151 static __always_inline int
arch_atomic_read_acquire(const atomic_t * v)152 arch_atomic_read_acquire(const atomic_t *v)
153 {
154 int ret;
155
156 if (__native_word(atomic_t)) {
157 ret = smp_load_acquire(&(v)->counter);
158 } else {
159 ret = arch_atomic_read(v);
160 __atomic_acquire_fence();
161 }
162
163 return ret;
164 }
165 #define arch_atomic_read_acquire arch_atomic_read_acquire
166 #endif
167
168 #ifndef arch_atomic_set_release
169 static __always_inline void
arch_atomic_set_release(atomic_t * v,int i)170 arch_atomic_set_release(atomic_t *v, int i)
171 {
172 if (__native_word(atomic_t)) {
173 smp_store_release(&(v)->counter, i);
174 } else {
175 __atomic_release_fence();
176 arch_atomic_set(v, i);
177 }
178 }
179 #define arch_atomic_set_release arch_atomic_set_release
180 #endif
181
182 #ifndef arch_atomic_add_return_relaxed
183 #define arch_atomic_add_return_acquire arch_atomic_add_return
184 #define arch_atomic_add_return_release arch_atomic_add_return
185 #define arch_atomic_add_return_relaxed arch_atomic_add_return
186 #else /* arch_atomic_add_return_relaxed */
187
188 #ifndef arch_atomic_add_return_acquire
189 static __always_inline int
arch_atomic_add_return_acquire(int i,atomic_t * v)190 arch_atomic_add_return_acquire(int i, atomic_t *v)
191 {
192 int ret = arch_atomic_add_return_relaxed(i, v);
193 __atomic_acquire_fence();
194 return ret;
195 }
196 #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire
197 #endif
198
199 #ifndef arch_atomic_add_return_release
200 static __always_inline int
arch_atomic_add_return_release(int i,atomic_t * v)201 arch_atomic_add_return_release(int i, atomic_t *v)
202 {
203 __atomic_release_fence();
204 return arch_atomic_add_return_relaxed(i, v);
205 }
206 #define arch_atomic_add_return_release arch_atomic_add_return_release
207 #endif
208
209 #ifndef arch_atomic_add_return
210 static __always_inline int
arch_atomic_add_return(int i,atomic_t * v)211 arch_atomic_add_return(int i, atomic_t *v)
212 {
213 int ret;
214 __atomic_pre_full_fence();
215 ret = arch_atomic_add_return_relaxed(i, v);
216 __atomic_post_full_fence();
217 return ret;
218 }
219 #define arch_atomic_add_return arch_atomic_add_return
220 #endif
221
222 #endif /* arch_atomic_add_return_relaxed */
223
224 #ifndef arch_atomic_fetch_add_relaxed
225 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add
226 #define arch_atomic_fetch_add_release arch_atomic_fetch_add
227 #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add
228 #else /* arch_atomic_fetch_add_relaxed */
229
230 #ifndef arch_atomic_fetch_add_acquire
231 static __always_inline int
arch_atomic_fetch_add_acquire(int i,atomic_t * v)232 arch_atomic_fetch_add_acquire(int i, atomic_t *v)
233 {
234 int ret = arch_atomic_fetch_add_relaxed(i, v);
235 __atomic_acquire_fence();
236 return ret;
237 }
238 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
239 #endif
240
241 #ifndef arch_atomic_fetch_add_release
242 static __always_inline int
arch_atomic_fetch_add_release(int i,atomic_t * v)243 arch_atomic_fetch_add_release(int i, atomic_t *v)
244 {
245 __atomic_release_fence();
246 return arch_atomic_fetch_add_relaxed(i, v);
247 }
248 #define arch_atomic_fetch_add_release arch_atomic_fetch_add_release
249 #endif
250
251 #ifndef arch_atomic_fetch_add
252 static __always_inline int
arch_atomic_fetch_add(int i,atomic_t * v)253 arch_atomic_fetch_add(int i, atomic_t *v)
254 {
255 int ret;
256 __atomic_pre_full_fence();
257 ret = arch_atomic_fetch_add_relaxed(i, v);
258 __atomic_post_full_fence();
259 return ret;
260 }
261 #define arch_atomic_fetch_add arch_atomic_fetch_add
262 #endif
263
264 #endif /* arch_atomic_fetch_add_relaxed */
265
266 #ifndef arch_atomic_sub_return_relaxed
267 #define arch_atomic_sub_return_acquire arch_atomic_sub_return
268 #define arch_atomic_sub_return_release arch_atomic_sub_return
269 #define arch_atomic_sub_return_relaxed arch_atomic_sub_return
270 #else /* arch_atomic_sub_return_relaxed */
271
272 #ifndef arch_atomic_sub_return_acquire
273 static __always_inline int
arch_atomic_sub_return_acquire(int i,atomic_t * v)274 arch_atomic_sub_return_acquire(int i, atomic_t *v)
275 {
276 int ret = arch_atomic_sub_return_relaxed(i, v);
277 __atomic_acquire_fence();
278 return ret;
279 }
280 #define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire
281 #endif
282
283 #ifndef arch_atomic_sub_return_release
284 static __always_inline int
arch_atomic_sub_return_release(int i,atomic_t * v)285 arch_atomic_sub_return_release(int i, atomic_t *v)
286 {
287 __atomic_release_fence();
288 return arch_atomic_sub_return_relaxed(i, v);
289 }
290 #define arch_atomic_sub_return_release arch_atomic_sub_return_release
291 #endif
292
293 #ifndef arch_atomic_sub_return
294 static __always_inline int
arch_atomic_sub_return(int i,atomic_t * v)295 arch_atomic_sub_return(int i, atomic_t *v)
296 {
297 int ret;
298 __atomic_pre_full_fence();
299 ret = arch_atomic_sub_return_relaxed(i, v);
300 __atomic_post_full_fence();
301 return ret;
302 }
303 #define arch_atomic_sub_return arch_atomic_sub_return
304 #endif
305
306 #endif /* arch_atomic_sub_return_relaxed */
307
308 #ifndef arch_atomic_fetch_sub_relaxed
309 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub
310 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub
311 #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub
312 #else /* arch_atomic_fetch_sub_relaxed */
313
314 #ifndef arch_atomic_fetch_sub_acquire
315 static __always_inline int
arch_atomic_fetch_sub_acquire(int i,atomic_t * v)316 arch_atomic_fetch_sub_acquire(int i, atomic_t *v)
317 {
318 int ret = arch_atomic_fetch_sub_relaxed(i, v);
319 __atomic_acquire_fence();
320 return ret;
321 }
322 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
323 #endif
324
325 #ifndef arch_atomic_fetch_sub_release
326 static __always_inline int
arch_atomic_fetch_sub_release(int i,atomic_t * v)327 arch_atomic_fetch_sub_release(int i, atomic_t *v)
328 {
329 __atomic_release_fence();
330 return arch_atomic_fetch_sub_relaxed(i, v);
331 }
332 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release
333 #endif
334
335 #ifndef arch_atomic_fetch_sub
336 static __always_inline int
arch_atomic_fetch_sub(int i,atomic_t * v)337 arch_atomic_fetch_sub(int i, atomic_t *v)
338 {
339 int ret;
340 __atomic_pre_full_fence();
341 ret = arch_atomic_fetch_sub_relaxed(i, v);
342 __atomic_post_full_fence();
343 return ret;
344 }
345 #define arch_atomic_fetch_sub arch_atomic_fetch_sub
346 #endif
347
348 #endif /* arch_atomic_fetch_sub_relaxed */
349
350 #ifndef arch_atomic_inc
351 static __always_inline void
arch_atomic_inc(atomic_t * v)352 arch_atomic_inc(atomic_t *v)
353 {
354 arch_atomic_add(1, v);
355 }
356 #define arch_atomic_inc arch_atomic_inc
357 #endif
358
359 #ifndef arch_atomic_inc_return_relaxed
360 #ifdef arch_atomic_inc_return
361 #define arch_atomic_inc_return_acquire arch_atomic_inc_return
362 #define arch_atomic_inc_return_release arch_atomic_inc_return
363 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return
364 #endif /* arch_atomic_inc_return */
365
366 #ifndef arch_atomic_inc_return
367 static __always_inline int
arch_atomic_inc_return(atomic_t * v)368 arch_atomic_inc_return(atomic_t *v)
369 {
370 return arch_atomic_add_return(1, v);
371 }
372 #define arch_atomic_inc_return arch_atomic_inc_return
373 #endif
374
375 #ifndef arch_atomic_inc_return_acquire
376 static __always_inline int
arch_atomic_inc_return_acquire(atomic_t * v)377 arch_atomic_inc_return_acquire(atomic_t *v)
378 {
379 return arch_atomic_add_return_acquire(1, v);
380 }
381 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
382 #endif
383
384 #ifndef arch_atomic_inc_return_release
385 static __always_inline int
arch_atomic_inc_return_release(atomic_t * v)386 arch_atomic_inc_return_release(atomic_t *v)
387 {
388 return arch_atomic_add_return_release(1, v);
389 }
390 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
391 #endif
392
393 #ifndef arch_atomic_inc_return_relaxed
394 static __always_inline int
arch_atomic_inc_return_relaxed(atomic_t * v)395 arch_atomic_inc_return_relaxed(atomic_t *v)
396 {
397 return arch_atomic_add_return_relaxed(1, v);
398 }
399 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed
400 #endif
401
402 #else /* arch_atomic_inc_return_relaxed */
403
404 #ifndef arch_atomic_inc_return_acquire
405 static __always_inline int
arch_atomic_inc_return_acquire(atomic_t * v)406 arch_atomic_inc_return_acquire(atomic_t *v)
407 {
408 int ret = arch_atomic_inc_return_relaxed(v);
409 __atomic_acquire_fence();
410 return ret;
411 }
412 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
413 #endif
414
415 #ifndef arch_atomic_inc_return_release
416 static __always_inline int
arch_atomic_inc_return_release(atomic_t * v)417 arch_atomic_inc_return_release(atomic_t *v)
418 {
419 __atomic_release_fence();
420 return arch_atomic_inc_return_relaxed(v);
421 }
422 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
423 #endif
424
425 #ifndef arch_atomic_inc_return
426 static __always_inline int
arch_atomic_inc_return(atomic_t * v)427 arch_atomic_inc_return(atomic_t *v)
428 {
429 int ret;
430 __atomic_pre_full_fence();
431 ret = arch_atomic_inc_return_relaxed(v);
432 __atomic_post_full_fence();
433 return ret;
434 }
435 #define arch_atomic_inc_return arch_atomic_inc_return
436 #endif
437
438 #endif /* arch_atomic_inc_return_relaxed */
439
440 #ifndef arch_atomic_fetch_inc_relaxed
441 #ifdef arch_atomic_fetch_inc
442 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc
443 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc
444 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc
445 #endif /* arch_atomic_fetch_inc */
446
447 #ifndef arch_atomic_fetch_inc
448 static __always_inline int
arch_atomic_fetch_inc(atomic_t * v)449 arch_atomic_fetch_inc(atomic_t *v)
450 {
451 return arch_atomic_fetch_add(1, v);
452 }
453 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
454 #endif
455
456 #ifndef arch_atomic_fetch_inc_acquire
457 static __always_inline int
arch_atomic_fetch_inc_acquire(atomic_t * v)458 arch_atomic_fetch_inc_acquire(atomic_t *v)
459 {
460 return arch_atomic_fetch_add_acquire(1, v);
461 }
462 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
463 #endif
464
465 #ifndef arch_atomic_fetch_inc_release
466 static __always_inline int
arch_atomic_fetch_inc_release(atomic_t * v)467 arch_atomic_fetch_inc_release(atomic_t *v)
468 {
469 return arch_atomic_fetch_add_release(1, v);
470 }
471 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
472 #endif
473
474 #ifndef arch_atomic_fetch_inc_relaxed
475 static __always_inline int
arch_atomic_fetch_inc_relaxed(atomic_t * v)476 arch_atomic_fetch_inc_relaxed(atomic_t *v)
477 {
478 return arch_atomic_fetch_add_relaxed(1, v);
479 }
480 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed
481 #endif
482
483 #else /* arch_atomic_fetch_inc_relaxed */
484
485 #ifndef arch_atomic_fetch_inc_acquire
486 static __always_inline int
arch_atomic_fetch_inc_acquire(atomic_t * v)487 arch_atomic_fetch_inc_acquire(atomic_t *v)
488 {
489 int ret = arch_atomic_fetch_inc_relaxed(v);
490 __atomic_acquire_fence();
491 return ret;
492 }
493 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
494 #endif
495
496 #ifndef arch_atomic_fetch_inc_release
497 static __always_inline int
arch_atomic_fetch_inc_release(atomic_t * v)498 arch_atomic_fetch_inc_release(atomic_t *v)
499 {
500 __atomic_release_fence();
501 return arch_atomic_fetch_inc_relaxed(v);
502 }
503 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
504 #endif
505
506 #ifndef arch_atomic_fetch_inc
507 static __always_inline int
arch_atomic_fetch_inc(atomic_t * v)508 arch_atomic_fetch_inc(atomic_t *v)
509 {
510 int ret;
511 __atomic_pre_full_fence();
512 ret = arch_atomic_fetch_inc_relaxed(v);
513 __atomic_post_full_fence();
514 return ret;
515 }
516 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
517 #endif
518
519 #endif /* arch_atomic_fetch_inc_relaxed */
520
521 #ifndef arch_atomic_dec
522 static __always_inline void
arch_atomic_dec(atomic_t * v)523 arch_atomic_dec(atomic_t *v)
524 {
525 arch_atomic_sub(1, v);
526 }
527 #define arch_atomic_dec arch_atomic_dec
528 #endif
529
530 #ifndef arch_atomic_dec_return_relaxed
531 #ifdef arch_atomic_dec_return
532 #define arch_atomic_dec_return_acquire arch_atomic_dec_return
533 #define arch_atomic_dec_return_release arch_atomic_dec_return
534 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return
535 #endif /* arch_atomic_dec_return */
536
537 #ifndef arch_atomic_dec_return
538 static __always_inline int
arch_atomic_dec_return(atomic_t * v)539 arch_atomic_dec_return(atomic_t *v)
540 {
541 return arch_atomic_sub_return(1, v);
542 }
543 #define arch_atomic_dec_return arch_atomic_dec_return
544 #endif
545
546 #ifndef arch_atomic_dec_return_acquire
547 static __always_inline int
arch_atomic_dec_return_acquire(atomic_t * v)548 arch_atomic_dec_return_acquire(atomic_t *v)
549 {
550 return arch_atomic_sub_return_acquire(1, v);
551 }
552 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
553 #endif
554
555 #ifndef arch_atomic_dec_return_release
556 static __always_inline int
arch_atomic_dec_return_release(atomic_t * v)557 arch_atomic_dec_return_release(atomic_t *v)
558 {
559 return arch_atomic_sub_return_release(1, v);
560 }
561 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
562 #endif
563
564 #ifndef arch_atomic_dec_return_relaxed
565 static __always_inline int
arch_atomic_dec_return_relaxed(atomic_t * v)566 arch_atomic_dec_return_relaxed(atomic_t *v)
567 {
568 return arch_atomic_sub_return_relaxed(1, v);
569 }
570 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed
571 #endif
572
573 #else /* arch_atomic_dec_return_relaxed */
574
575 #ifndef arch_atomic_dec_return_acquire
576 static __always_inline int
arch_atomic_dec_return_acquire(atomic_t * v)577 arch_atomic_dec_return_acquire(atomic_t *v)
578 {
579 int ret = arch_atomic_dec_return_relaxed(v);
580 __atomic_acquire_fence();
581 return ret;
582 }
583 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
584 #endif
585
586 #ifndef arch_atomic_dec_return_release
587 static __always_inline int
arch_atomic_dec_return_release(atomic_t * v)588 arch_atomic_dec_return_release(atomic_t *v)
589 {
590 __atomic_release_fence();
591 return arch_atomic_dec_return_relaxed(v);
592 }
593 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
594 #endif
595
596 #ifndef arch_atomic_dec_return
597 static __always_inline int
arch_atomic_dec_return(atomic_t * v)598 arch_atomic_dec_return(atomic_t *v)
599 {
600 int ret;
601 __atomic_pre_full_fence();
602 ret = arch_atomic_dec_return_relaxed(v);
603 __atomic_post_full_fence();
604 return ret;
605 }
606 #define arch_atomic_dec_return arch_atomic_dec_return
607 #endif
608
609 #endif /* arch_atomic_dec_return_relaxed */
610
611 #ifndef arch_atomic_fetch_dec_relaxed
612 #ifdef arch_atomic_fetch_dec
613 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec
614 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec
615 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec
616 #endif /* arch_atomic_fetch_dec */
617
618 #ifndef arch_atomic_fetch_dec
619 static __always_inline int
arch_atomic_fetch_dec(atomic_t * v)620 arch_atomic_fetch_dec(atomic_t *v)
621 {
622 return arch_atomic_fetch_sub(1, v);
623 }
624 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
625 #endif
626
627 #ifndef arch_atomic_fetch_dec_acquire
628 static __always_inline int
arch_atomic_fetch_dec_acquire(atomic_t * v)629 arch_atomic_fetch_dec_acquire(atomic_t *v)
630 {
631 return arch_atomic_fetch_sub_acquire(1, v);
632 }
633 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
634 #endif
635
636 #ifndef arch_atomic_fetch_dec_release
637 static __always_inline int
arch_atomic_fetch_dec_release(atomic_t * v)638 arch_atomic_fetch_dec_release(atomic_t *v)
639 {
640 return arch_atomic_fetch_sub_release(1, v);
641 }
642 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
643 #endif
644
645 #ifndef arch_atomic_fetch_dec_relaxed
646 static __always_inline int
arch_atomic_fetch_dec_relaxed(atomic_t * v)647 arch_atomic_fetch_dec_relaxed(atomic_t *v)
648 {
649 return arch_atomic_fetch_sub_relaxed(1, v);
650 }
651 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed
652 #endif
653
654 #else /* arch_atomic_fetch_dec_relaxed */
655
656 #ifndef arch_atomic_fetch_dec_acquire
657 static __always_inline int
arch_atomic_fetch_dec_acquire(atomic_t * v)658 arch_atomic_fetch_dec_acquire(atomic_t *v)
659 {
660 int ret = arch_atomic_fetch_dec_relaxed(v);
661 __atomic_acquire_fence();
662 return ret;
663 }
664 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
665 #endif
666
667 #ifndef arch_atomic_fetch_dec_release
668 static __always_inline int
arch_atomic_fetch_dec_release(atomic_t * v)669 arch_atomic_fetch_dec_release(atomic_t *v)
670 {
671 __atomic_release_fence();
672 return arch_atomic_fetch_dec_relaxed(v);
673 }
674 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
675 #endif
676
677 #ifndef arch_atomic_fetch_dec
678 static __always_inline int
arch_atomic_fetch_dec(atomic_t * v)679 arch_atomic_fetch_dec(atomic_t *v)
680 {
681 int ret;
682 __atomic_pre_full_fence();
683 ret = arch_atomic_fetch_dec_relaxed(v);
684 __atomic_post_full_fence();
685 return ret;
686 }
687 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
688 #endif
689
690 #endif /* arch_atomic_fetch_dec_relaxed */
691
692 #ifndef arch_atomic_fetch_and_relaxed
693 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and
694 #define arch_atomic_fetch_and_release arch_atomic_fetch_and
695 #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and
696 #else /* arch_atomic_fetch_and_relaxed */
697
698 #ifndef arch_atomic_fetch_and_acquire
699 static __always_inline int
arch_atomic_fetch_and_acquire(int i,atomic_t * v)700 arch_atomic_fetch_and_acquire(int i, atomic_t *v)
701 {
702 int ret = arch_atomic_fetch_and_relaxed(i, v);
703 __atomic_acquire_fence();
704 return ret;
705 }
706 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire
707 #endif
708
709 #ifndef arch_atomic_fetch_and_release
710 static __always_inline int
arch_atomic_fetch_and_release(int i,atomic_t * v)711 arch_atomic_fetch_and_release(int i, atomic_t *v)
712 {
713 __atomic_release_fence();
714 return arch_atomic_fetch_and_relaxed(i, v);
715 }
716 #define arch_atomic_fetch_and_release arch_atomic_fetch_and_release
717 #endif
718
719 #ifndef arch_atomic_fetch_and
720 static __always_inline int
arch_atomic_fetch_and(int i,atomic_t * v)721 arch_atomic_fetch_and(int i, atomic_t *v)
722 {
723 int ret;
724 __atomic_pre_full_fence();
725 ret = arch_atomic_fetch_and_relaxed(i, v);
726 __atomic_post_full_fence();
727 return ret;
728 }
729 #define arch_atomic_fetch_and arch_atomic_fetch_and
730 #endif
731
732 #endif /* arch_atomic_fetch_and_relaxed */
733
734 #ifndef arch_atomic_andnot
735 static __always_inline void
arch_atomic_andnot(int i,atomic_t * v)736 arch_atomic_andnot(int i, atomic_t *v)
737 {
738 arch_atomic_and(~i, v);
739 }
740 #define arch_atomic_andnot arch_atomic_andnot
741 #endif
742
743 #ifndef arch_atomic_fetch_andnot_relaxed
744 #ifdef arch_atomic_fetch_andnot
745 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot
746 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot
747 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot
748 #endif /* arch_atomic_fetch_andnot */
749
750 #ifndef arch_atomic_fetch_andnot
751 static __always_inline int
arch_atomic_fetch_andnot(int i,atomic_t * v)752 arch_atomic_fetch_andnot(int i, atomic_t *v)
753 {
754 return arch_atomic_fetch_and(~i, v);
755 }
756 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
757 #endif
758
759 #ifndef arch_atomic_fetch_andnot_acquire
760 static __always_inline int
arch_atomic_fetch_andnot_acquire(int i,atomic_t * v)761 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
762 {
763 return arch_atomic_fetch_and_acquire(~i, v);
764 }
765 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
766 #endif
767
768 #ifndef arch_atomic_fetch_andnot_release
769 static __always_inline int
arch_atomic_fetch_andnot_release(int i,atomic_t * v)770 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
771 {
772 return arch_atomic_fetch_and_release(~i, v);
773 }
774 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
775 #endif
776
777 #ifndef arch_atomic_fetch_andnot_relaxed
778 static __always_inline int
arch_atomic_fetch_andnot_relaxed(int i,atomic_t * v)779 arch_atomic_fetch_andnot_relaxed(int i, atomic_t *v)
780 {
781 return arch_atomic_fetch_and_relaxed(~i, v);
782 }
783 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed
784 #endif
785
786 #else /* arch_atomic_fetch_andnot_relaxed */
787
788 #ifndef arch_atomic_fetch_andnot_acquire
789 static __always_inline int
arch_atomic_fetch_andnot_acquire(int i,atomic_t * v)790 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
791 {
792 int ret = arch_atomic_fetch_andnot_relaxed(i, v);
793 __atomic_acquire_fence();
794 return ret;
795 }
796 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
797 #endif
798
799 #ifndef arch_atomic_fetch_andnot_release
800 static __always_inline int
arch_atomic_fetch_andnot_release(int i,atomic_t * v)801 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
802 {
803 __atomic_release_fence();
804 return arch_atomic_fetch_andnot_relaxed(i, v);
805 }
806 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
807 #endif
808
809 #ifndef arch_atomic_fetch_andnot
810 static __always_inline int
arch_atomic_fetch_andnot(int i,atomic_t * v)811 arch_atomic_fetch_andnot(int i, atomic_t *v)
812 {
813 int ret;
814 __atomic_pre_full_fence();
815 ret = arch_atomic_fetch_andnot_relaxed(i, v);
816 __atomic_post_full_fence();
817 return ret;
818 }
819 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
820 #endif
821
822 #endif /* arch_atomic_fetch_andnot_relaxed */
823
824 #ifndef arch_atomic_fetch_or_relaxed
825 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or
826 #define arch_atomic_fetch_or_release arch_atomic_fetch_or
827 #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or
828 #else /* arch_atomic_fetch_or_relaxed */
829
830 #ifndef arch_atomic_fetch_or_acquire
831 static __always_inline int
arch_atomic_fetch_or_acquire(int i,atomic_t * v)832 arch_atomic_fetch_or_acquire(int i, atomic_t *v)
833 {
834 int ret = arch_atomic_fetch_or_relaxed(i, v);
835 __atomic_acquire_fence();
836 return ret;
837 }
838 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire
839 #endif
840
841 #ifndef arch_atomic_fetch_or_release
842 static __always_inline int
arch_atomic_fetch_or_release(int i,atomic_t * v)843 arch_atomic_fetch_or_release(int i, atomic_t *v)
844 {
845 __atomic_release_fence();
846 return arch_atomic_fetch_or_relaxed(i, v);
847 }
848 #define arch_atomic_fetch_or_release arch_atomic_fetch_or_release
849 #endif
850
851 #ifndef arch_atomic_fetch_or
852 static __always_inline int
arch_atomic_fetch_or(int i,atomic_t * v)853 arch_atomic_fetch_or(int i, atomic_t *v)
854 {
855 int ret;
856 __atomic_pre_full_fence();
857 ret = arch_atomic_fetch_or_relaxed(i, v);
858 __atomic_post_full_fence();
859 return ret;
860 }
861 #define arch_atomic_fetch_or arch_atomic_fetch_or
862 #endif
863
864 #endif /* arch_atomic_fetch_or_relaxed */
865
866 #ifndef arch_atomic_fetch_xor_relaxed
867 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor
868 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor
869 #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor
870 #else /* arch_atomic_fetch_xor_relaxed */
871
872 #ifndef arch_atomic_fetch_xor_acquire
873 static __always_inline int
arch_atomic_fetch_xor_acquire(int i,atomic_t * v)874 arch_atomic_fetch_xor_acquire(int i, atomic_t *v)
875 {
876 int ret = arch_atomic_fetch_xor_relaxed(i, v);
877 __atomic_acquire_fence();
878 return ret;
879 }
880 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire
881 #endif
882
883 #ifndef arch_atomic_fetch_xor_release
884 static __always_inline int
arch_atomic_fetch_xor_release(int i,atomic_t * v)885 arch_atomic_fetch_xor_release(int i, atomic_t *v)
886 {
887 __atomic_release_fence();
888 return arch_atomic_fetch_xor_relaxed(i, v);
889 }
890 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
891 #endif
892
893 #ifndef arch_atomic_fetch_xor
894 static __always_inline int
arch_atomic_fetch_xor(int i,atomic_t * v)895 arch_atomic_fetch_xor(int i, atomic_t *v)
896 {
897 int ret;
898 __atomic_pre_full_fence();
899 ret = arch_atomic_fetch_xor_relaxed(i, v);
900 __atomic_post_full_fence();
901 return ret;
902 }
903 #define arch_atomic_fetch_xor arch_atomic_fetch_xor
904 #endif
905
906 #endif /* arch_atomic_fetch_xor_relaxed */
907
908 #ifndef arch_atomic_xchg_relaxed
909 #define arch_atomic_xchg_acquire arch_atomic_xchg
910 #define arch_atomic_xchg_release arch_atomic_xchg
911 #define arch_atomic_xchg_relaxed arch_atomic_xchg
912 #else /* arch_atomic_xchg_relaxed */
913
914 #ifndef arch_atomic_xchg_acquire
915 static __always_inline int
arch_atomic_xchg_acquire(atomic_t * v,int i)916 arch_atomic_xchg_acquire(atomic_t *v, int i)
917 {
918 int ret = arch_atomic_xchg_relaxed(v, i);
919 __atomic_acquire_fence();
920 return ret;
921 }
922 #define arch_atomic_xchg_acquire arch_atomic_xchg_acquire
923 #endif
924
925 #ifndef arch_atomic_xchg_release
926 static __always_inline int
arch_atomic_xchg_release(atomic_t * v,int i)927 arch_atomic_xchg_release(atomic_t *v, int i)
928 {
929 __atomic_release_fence();
930 return arch_atomic_xchg_relaxed(v, i);
931 }
932 #define arch_atomic_xchg_release arch_atomic_xchg_release
933 #endif
934
935 #ifndef arch_atomic_xchg
936 static __always_inline int
arch_atomic_xchg(atomic_t * v,int i)937 arch_atomic_xchg(atomic_t *v, int i)
938 {
939 int ret;
940 __atomic_pre_full_fence();
941 ret = arch_atomic_xchg_relaxed(v, i);
942 __atomic_post_full_fence();
943 return ret;
944 }
945 #define arch_atomic_xchg arch_atomic_xchg
946 #endif
947
948 #endif /* arch_atomic_xchg_relaxed */
949
950 #ifndef arch_atomic_cmpxchg_relaxed
951 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg
952 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg
953 #define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg
954 #else /* arch_atomic_cmpxchg_relaxed */
955
956 #ifndef arch_atomic_cmpxchg_acquire
957 static __always_inline int
arch_atomic_cmpxchg_acquire(atomic_t * v,int old,int new)958 arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
959 {
960 int ret = arch_atomic_cmpxchg_relaxed(v, old, new);
961 __atomic_acquire_fence();
962 return ret;
963 }
964 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
965 #endif
966
967 #ifndef arch_atomic_cmpxchg_release
968 static __always_inline int
arch_atomic_cmpxchg_release(atomic_t * v,int old,int new)969 arch_atomic_cmpxchg_release(atomic_t *v, int old, int new)
970 {
971 __atomic_release_fence();
972 return arch_atomic_cmpxchg_relaxed(v, old, new);
973 }
974 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release
975 #endif
976
977 #ifndef arch_atomic_cmpxchg
978 static __always_inline int
arch_atomic_cmpxchg(atomic_t * v,int old,int new)979 arch_atomic_cmpxchg(atomic_t *v, int old, int new)
980 {
981 int ret;
982 __atomic_pre_full_fence();
983 ret = arch_atomic_cmpxchg_relaxed(v, old, new);
984 __atomic_post_full_fence();
985 return ret;
986 }
987 #define arch_atomic_cmpxchg arch_atomic_cmpxchg
988 #endif
989
990 #endif /* arch_atomic_cmpxchg_relaxed */
991
992 #ifndef arch_atomic_try_cmpxchg_relaxed
993 #ifdef arch_atomic_try_cmpxchg
994 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg
995 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg
996 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg
997 #endif /* arch_atomic_try_cmpxchg */
998
999 #ifndef arch_atomic_try_cmpxchg
1000 static __always_inline bool
arch_atomic_try_cmpxchg(atomic_t * v,int * old,int new)1001 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1002 {
1003 int r, o = *old;
1004 r = arch_atomic_cmpxchg(v, o, new);
1005 if (unlikely(r != o))
1006 *old = r;
1007 return likely(r == o);
1008 }
1009 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
1010 #endif
1011
1012 #ifndef arch_atomic_try_cmpxchg_acquire
1013 static __always_inline bool
arch_atomic_try_cmpxchg_acquire(atomic_t * v,int * old,int new)1014 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1015 {
1016 int r, o = *old;
1017 r = arch_atomic_cmpxchg_acquire(v, o, new);
1018 if (unlikely(r != o))
1019 *old = r;
1020 return likely(r == o);
1021 }
1022 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1023 #endif
1024
1025 #ifndef arch_atomic_try_cmpxchg_release
1026 static __always_inline bool
arch_atomic_try_cmpxchg_release(atomic_t * v,int * old,int new)1027 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1028 {
1029 int r, o = *old;
1030 r = arch_atomic_cmpxchg_release(v, o, new);
1031 if (unlikely(r != o))
1032 *old = r;
1033 return likely(r == o);
1034 }
1035 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1036 #endif
1037
1038 #ifndef arch_atomic_try_cmpxchg_relaxed
1039 static __always_inline bool
arch_atomic_try_cmpxchg_relaxed(atomic_t * v,int * old,int new)1040 arch_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1041 {
1042 int r, o = *old;
1043 r = arch_atomic_cmpxchg_relaxed(v, o, new);
1044 if (unlikely(r != o))
1045 *old = r;
1046 return likely(r == o);
1047 }
1048 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed
1049 #endif
1050
1051 #else /* arch_atomic_try_cmpxchg_relaxed */
1052
1053 #ifndef arch_atomic_try_cmpxchg_acquire
1054 static __always_inline bool
arch_atomic_try_cmpxchg_acquire(atomic_t * v,int * old,int new)1055 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1056 {
1057 bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1058 __atomic_acquire_fence();
1059 return ret;
1060 }
1061 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1062 #endif
1063
1064 #ifndef arch_atomic_try_cmpxchg_release
1065 static __always_inline bool
arch_atomic_try_cmpxchg_release(atomic_t * v,int * old,int new)1066 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1067 {
1068 __atomic_release_fence();
1069 return arch_atomic_try_cmpxchg_relaxed(v, old, new);
1070 }
1071 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1072 #endif
1073
1074 #ifndef arch_atomic_try_cmpxchg
1075 static __always_inline bool
arch_atomic_try_cmpxchg(atomic_t * v,int * old,int new)1076 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1077 {
1078 bool ret;
1079 __atomic_pre_full_fence();
1080 ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1081 __atomic_post_full_fence();
1082 return ret;
1083 }
1084 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
1085 #endif
1086
1087 #endif /* arch_atomic_try_cmpxchg_relaxed */
1088
1089 #ifndef arch_atomic_sub_and_test
1090 /**
1091 * arch_atomic_sub_and_test - subtract value from variable and test result
1092 * @i: integer value to subtract
1093 * @v: pointer of type atomic_t
1094 *
1095 * Atomically subtracts @i from @v and returns
1096 * true if the result is zero, or false for all
1097 * other cases.
1098 */
1099 static __always_inline bool
arch_atomic_sub_and_test(int i,atomic_t * v)1100 arch_atomic_sub_and_test(int i, atomic_t *v)
1101 {
1102 return arch_atomic_sub_return(i, v) == 0;
1103 }
1104 #define arch_atomic_sub_and_test arch_atomic_sub_and_test
1105 #endif
1106
1107 #ifndef arch_atomic_dec_and_test
1108 /**
1109 * arch_atomic_dec_and_test - decrement and test
1110 * @v: pointer of type atomic_t
1111 *
1112 * Atomically decrements @v by 1 and
1113 * returns true if the result is 0, or false for all other
1114 * cases.
1115 */
1116 static __always_inline bool
arch_atomic_dec_and_test(atomic_t * v)1117 arch_atomic_dec_and_test(atomic_t *v)
1118 {
1119 return arch_atomic_dec_return(v) == 0;
1120 }
1121 #define arch_atomic_dec_and_test arch_atomic_dec_and_test
1122 #endif
1123
1124 #ifndef arch_atomic_inc_and_test
1125 /**
1126 * arch_atomic_inc_and_test - increment and test
1127 * @v: pointer of type atomic_t
1128 *
1129 * Atomically increments @v by 1
1130 * and returns true if the result is zero, or false for all
1131 * other cases.
1132 */
1133 static __always_inline bool
arch_atomic_inc_and_test(atomic_t * v)1134 arch_atomic_inc_and_test(atomic_t *v)
1135 {
1136 return arch_atomic_inc_return(v) == 0;
1137 }
1138 #define arch_atomic_inc_and_test arch_atomic_inc_and_test
1139 #endif
1140
1141 #ifndef arch_atomic_add_negative
1142 /**
1143 * arch_atomic_add_negative - add and test if negative
1144 * @i: integer value to add
1145 * @v: pointer of type atomic_t
1146 *
1147 * Atomically adds @i to @v and returns true
1148 * if the result is negative, or false when
1149 * result is greater than or equal to zero.
1150 */
1151 static __always_inline bool
arch_atomic_add_negative(int i,atomic_t * v)1152 arch_atomic_add_negative(int i, atomic_t *v)
1153 {
1154 return arch_atomic_add_return(i, v) < 0;
1155 }
1156 #define arch_atomic_add_negative arch_atomic_add_negative
1157 #endif
1158
1159 #ifndef arch_atomic_fetch_add_unless
1160 /**
1161 * arch_atomic_fetch_add_unless - add unless the number is already a given value
1162 * @v: pointer of type atomic_t
1163 * @a: the amount to add to v...
1164 * @u: ...unless v is equal to u.
1165 *
1166 * Atomically adds @a to @v, so long as @v was not already @u.
1167 * Returns original value of @v
1168 */
1169 static __always_inline int
arch_atomic_fetch_add_unless(atomic_t * v,int a,int u)1170 arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
1171 {
1172 int c = arch_atomic_read(v);
1173
1174 do {
1175 if (unlikely(c == u))
1176 break;
1177 } while (!arch_atomic_try_cmpxchg(v, &c, c + a));
1178
1179 return c;
1180 }
1181 #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless
1182 #endif
1183
1184 #ifndef arch_atomic_add_unless
1185 /**
1186 * arch_atomic_add_unless - add unless the number is already a given value
1187 * @v: pointer of type atomic_t
1188 * @a: the amount to add to v...
1189 * @u: ...unless v is equal to u.
1190 *
1191 * Atomically adds @a to @v, if @v was not already @u.
1192 * Returns true if the addition was done.
1193 */
1194 static __always_inline bool
arch_atomic_add_unless(atomic_t * v,int a,int u)1195 arch_atomic_add_unless(atomic_t *v, int a, int u)
1196 {
1197 return arch_atomic_fetch_add_unless(v, a, u) != u;
1198 }
1199 #define arch_atomic_add_unless arch_atomic_add_unless
1200 #endif
1201
1202 #ifndef arch_atomic_inc_not_zero
1203 /**
1204 * arch_atomic_inc_not_zero - increment unless the number is zero
1205 * @v: pointer of type atomic_t
1206 *
1207 * Atomically increments @v by 1, if @v is non-zero.
1208 * Returns true if the increment was done.
1209 */
1210 static __always_inline bool
arch_atomic_inc_not_zero(atomic_t * v)1211 arch_atomic_inc_not_zero(atomic_t *v)
1212 {
1213 return arch_atomic_add_unless(v, 1, 0);
1214 }
1215 #define arch_atomic_inc_not_zero arch_atomic_inc_not_zero
1216 #endif
1217
1218 #ifndef arch_atomic_inc_unless_negative
1219 static __always_inline bool
arch_atomic_inc_unless_negative(atomic_t * v)1220 arch_atomic_inc_unless_negative(atomic_t *v)
1221 {
1222 int c = arch_atomic_read(v);
1223
1224 do {
1225 if (unlikely(c < 0))
1226 return false;
1227 } while (!arch_atomic_try_cmpxchg(v, &c, c + 1));
1228
1229 return true;
1230 }
1231 #define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative
1232 #endif
1233
1234 #ifndef arch_atomic_dec_unless_positive
1235 static __always_inline bool
arch_atomic_dec_unless_positive(atomic_t * v)1236 arch_atomic_dec_unless_positive(atomic_t *v)
1237 {
1238 int c = arch_atomic_read(v);
1239
1240 do {
1241 if (unlikely(c > 0))
1242 return false;
1243 } while (!arch_atomic_try_cmpxchg(v, &c, c - 1));
1244
1245 return true;
1246 }
1247 #define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive
1248 #endif
1249
1250 #ifndef arch_atomic_dec_if_positive
1251 static __always_inline int
arch_atomic_dec_if_positive(atomic_t * v)1252 arch_atomic_dec_if_positive(atomic_t *v)
1253 {
1254 int dec, c = arch_atomic_read(v);
1255
1256 do {
1257 dec = c - 1;
1258 if (unlikely(dec < 0))
1259 break;
1260 } while (!arch_atomic_try_cmpxchg(v, &c, dec));
1261
1262 return dec;
1263 }
1264 #define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
1265 #endif
1266
1267 #ifdef CONFIG_GENERIC_ATOMIC64
1268 #include <asm-generic/atomic64.h>
1269 #endif
1270
1271 #ifndef arch_atomic64_read_acquire
1272 static __always_inline s64
arch_atomic64_read_acquire(const atomic64_t * v)1273 arch_atomic64_read_acquire(const atomic64_t *v)
1274 {
1275 s64 ret;
1276
1277 if (__native_word(atomic64_t)) {
1278 ret = smp_load_acquire(&(v)->counter);
1279 } else {
1280 ret = arch_atomic64_read(v);
1281 __atomic_acquire_fence();
1282 }
1283
1284 return ret;
1285 }
1286 #define arch_atomic64_read_acquire arch_atomic64_read_acquire
1287 #endif
1288
1289 #ifndef arch_atomic64_set_release
1290 static __always_inline void
arch_atomic64_set_release(atomic64_t * v,s64 i)1291 arch_atomic64_set_release(atomic64_t *v, s64 i)
1292 {
1293 if (__native_word(atomic64_t)) {
1294 smp_store_release(&(v)->counter, i);
1295 } else {
1296 __atomic_release_fence();
1297 arch_atomic64_set(v, i);
1298 }
1299 }
1300 #define arch_atomic64_set_release arch_atomic64_set_release
1301 #endif
1302
1303 #ifndef arch_atomic64_add_return_relaxed
1304 #define arch_atomic64_add_return_acquire arch_atomic64_add_return
1305 #define arch_atomic64_add_return_release arch_atomic64_add_return
1306 #define arch_atomic64_add_return_relaxed arch_atomic64_add_return
1307 #else /* arch_atomic64_add_return_relaxed */
1308
1309 #ifndef arch_atomic64_add_return_acquire
1310 static __always_inline s64
arch_atomic64_add_return_acquire(s64 i,atomic64_t * v)1311 arch_atomic64_add_return_acquire(s64 i, atomic64_t *v)
1312 {
1313 s64 ret = arch_atomic64_add_return_relaxed(i, v);
1314 __atomic_acquire_fence();
1315 return ret;
1316 }
1317 #define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire
1318 #endif
1319
1320 #ifndef arch_atomic64_add_return_release
1321 static __always_inline s64
arch_atomic64_add_return_release(s64 i,atomic64_t * v)1322 arch_atomic64_add_return_release(s64 i, atomic64_t *v)
1323 {
1324 __atomic_release_fence();
1325 return arch_atomic64_add_return_relaxed(i, v);
1326 }
1327 #define arch_atomic64_add_return_release arch_atomic64_add_return_release
1328 #endif
1329
1330 #ifndef arch_atomic64_add_return
1331 static __always_inline s64
arch_atomic64_add_return(s64 i,atomic64_t * v)1332 arch_atomic64_add_return(s64 i, atomic64_t *v)
1333 {
1334 s64 ret;
1335 __atomic_pre_full_fence();
1336 ret = arch_atomic64_add_return_relaxed(i, v);
1337 __atomic_post_full_fence();
1338 return ret;
1339 }
1340 #define arch_atomic64_add_return arch_atomic64_add_return
1341 #endif
1342
1343 #endif /* arch_atomic64_add_return_relaxed */
1344
1345 #ifndef arch_atomic64_fetch_add_relaxed
1346 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add
1347 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add
1348 #define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add
1349 #else /* arch_atomic64_fetch_add_relaxed */
1350
1351 #ifndef arch_atomic64_fetch_add_acquire
1352 static __always_inline s64
arch_atomic64_fetch_add_acquire(s64 i,atomic64_t * v)1353 arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1354 {
1355 s64 ret = arch_atomic64_fetch_add_relaxed(i, v);
1356 __atomic_acquire_fence();
1357 return ret;
1358 }
1359 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire
1360 #endif
1361
1362 #ifndef arch_atomic64_fetch_add_release
1363 static __always_inline s64
arch_atomic64_fetch_add_release(s64 i,atomic64_t * v)1364 arch_atomic64_fetch_add_release(s64 i, atomic64_t *v)
1365 {
1366 __atomic_release_fence();
1367 return arch_atomic64_fetch_add_relaxed(i, v);
1368 }
1369 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release
1370 #endif
1371
1372 #ifndef arch_atomic64_fetch_add
1373 static __always_inline s64
arch_atomic64_fetch_add(s64 i,atomic64_t * v)1374 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
1375 {
1376 s64 ret;
1377 __atomic_pre_full_fence();
1378 ret = arch_atomic64_fetch_add_relaxed(i, v);
1379 __atomic_post_full_fence();
1380 return ret;
1381 }
1382 #define arch_atomic64_fetch_add arch_atomic64_fetch_add
1383 #endif
1384
1385 #endif /* arch_atomic64_fetch_add_relaxed */
1386
1387 #ifndef arch_atomic64_sub_return_relaxed
1388 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return
1389 #define arch_atomic64_sub_return_release arch_atomic64_sub_return
1390 #define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return
1391 #else /* arch_atomic64_sub_return_relaxed */
1392
1393 #ifndef arch_atomic64_sub_return_acquire
1394 static __always_inline s64
arch_atomic64_sub_return_acquire(s64 i,atomic64_t * v)1395 arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1396 {
1397 s64 ret = arch_atomic64_sub_return_relaxed(i, v);
1398 __atomic_acquire_fence();
1399 return ret;
1400 }
1401 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire
1402 #endif
1403
1404 #ifndef arch_atomic64_sub_return_release
1405 static __always_inline s64
arch_atomic64_sub_return_release(s64 i,atomic64_t * v)1406 arch_atomic64_sub_return_release(s64 i, atomic64_t *v)
1407 {
1408 __atomic_release_fence();
1409 return arch_atomic64_sub_return_relaxed(i, v);
1410 }
1411 #define arch_atomic64_sub_return_release arch_atomic64_sub_return_release
1412 #endif
1413
1414 #ifndef arch_atomic64_sub_return
1415 static __always_inline s64
arch_atomic64_sub_return(s64 i,atomic64_t * v)1416 arch_atomic64_sub_return(s64 i, atomic64_t *v)
1417 {
1418 s64 ret;
1419 __atomic_pre_full_fence();
1420 ret = arch_atomic64_sub_return_relaxed(i, v);
1421 __atomic_post_full_fence();
1422 return ret;
1423 }
1424 #define arch_atomic64_sub_return arch_atomic64_sub_return
1425 #endif
1426
1427 #endif /* arch_atomic64_sub_return_relaxed */
1428
1429 #ifndef arch_atomic64_fetch_sub_relaxed
1430 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub
1431 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub
1432 #define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub
1433 #else /* arch_atomic64_fetch_sub_relaxed */
1434
1435 #ifndef arch_atomic64_fetch_sub_acquire
1436 static __always_inline s64
arch_atomic64_fetch_sub_acquire(s64 i,atomic64_t * v)1437 arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1438 {
1439 s64 ret = arch_atomic64_fetch_sub_relaxed(i, v);
1440 __atomic_acquire_fence();
1441 return ret;
1442 }
1443 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire
1444 #endif
1445
1446 #ifndef arch_atomic64_fetch_sub_release
1447 static __always_inline s64
arch_atomic64_fetch_sub_release(s64 i,atomic64_t * v)1448 arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1449 {
1450 __atomic_release_fence();
1451 return arch_atomic64_fetch_sub_relaxed(i, v);
1452 }
1453 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release
1454 #endif
1455
1456 #ifndef arch_atomic64_fetch_sub
1457 static __always_inline s64
arch_atomic64_fetch_sub(s64 i,atomic64_t * v)1458 arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
1459 {
1460 s64 ret;
1461 __atomic_pre_full_fence();
1462 ret = arch_atomic64_fetch_sub_relaxed(i, v);
1463 __atomic_post_full_fence();
1464 return ret;
1465 }
1466 #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
1467 #endif
1468
1469 #endif /* arch_atomic64_fetch_sub_relaxed */
1470
1471 #ifndef arch_atomic64_inc
1472 static __always_inline void
arch_atomic64_inc(atomic64_t * v)1473 arch_atomic64_inc(atomic64_t *v)
1474 {
1475 arch_atomic64_add(1, v);
1476 }
1477 #define arch_atomic64_inc arch_atomic64_inc
1478 #endif
1479
1480 #ifndef arch_atomic64_inc_return_relaxed
1481 #ifdef arch_atomic64_inc_return
1482 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return
1483 #define arch_atomic64_inc_return_release arch_atomic64_inc_return
1484 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return
1485 #endif /* arch_atomic64_inc_return */
1486
1487 #ifndef arch_atomic64_inc_return
1488 static __always_inline s64
arch_atomic64_inc_return(atomic64_t * v)1489 arch_atomic64_inc_return(atomic64_t *v)
1490 {
1491 return arch_atomic64_add_return(1, v);
1492 }
1493 #define arch_atomic64_inc_return arch_atomic64_inc_return
1494 #endif
1495
1496 #ifndef arch_atomic64_inc_return_acquire
1497 static __always_inline s64
arch_atomic64_inc_return_acquire(atomic64_t * v)1498 arch_atomic64_inc_return_acquire(atomic64_t *v)
1499 {
1500 return arch_atomic64_add_return_acquire(1, v);
1501 }
1502 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1503 #endif
1504
1505 #ifndef arch_atomic64_inc_return_release
1506 static __always_inline s64
arch_atomic64_inc_return_release(atomic64_t * v)1507 arch_atomic64_inc_return_release(atomic64_t *v)
1508 {
1509 return arch_atomic64_add_return_release(1, v);
1510 }
1511 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1512 #endif
1513
1514 #ifndef arch_atomic64_inc_return_relaxed
1515 static __always_inline s64
arch_atomic64_inc_return_relaxed(atomic64_t * v)1516 arch_atomic64_inc_return_relaxed(atomic64_t *v)
1517 {
1518 return arch_atomic64_add_return_relaxed(1, v);
1519 }
1520 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed
1521 #endif
1522
1523 #else /* arch_atomic64_inc_return_relaxed */
1524
1525 #ifndef arch_atomic64_inc_return_acquire
1526 static __always_inline s64
arch_atomic64_inc_return_acquire(atomic64_t * v)1527 arch_atomic64_inc_return_acquire(atomic64_t *v)
1528 {
1529 s64 ret = arch_atomic64_inc_return_relaxed(v);
1530 __atomic_acquire_fence();
1531 return ret;
1532 }
1533 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1534 #endif
1535
1536 #ifndef arch_atomic64_inc_return_release
1537 static __always_inline s64
arch_atomic64_inc_return_release(atomic64_t * v)1538 arch_atomic64_inc_return_release(atomic64_t *v)
1539 {
1540 __atomic_release_fence();
1541 return arch_atomic64_inc_return_relaxed(v);
1542 }
1543 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1544 #endif
1545
1546 #ifndef arch_atomic64_inc_return
1547 static __always_inline s64
arch_atomic64_inc_return(atomic64_t * v)1548 arch_atomic64_inc_return(atomic64_t *v)
1549 {
1550 s64 ret;
1551 __atomic_pre_full_fence();
1552 ret = arch_atomic64_inc_return_relaxed(v);
1553 __atomic_post_full_fence();
1554 return ret;
1555 }
1556 #define arch_atomic64_inc_return arch_atomic64_inc_return
1557 #endif
1558
1559 #endif /* arch_atomic64_inc_return_relaxed */
1560
1561 #ifndef arch_atomic64_fetch_inc_relaxed
1562 #ifdef arch_atomic64_fetch_inc
1563 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc
1564 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc
1565 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc
1566 #endif /* arch_atomic64_fetch_inc */
1567
1568 #ifndef arch_atomic64_fetch_inc
1569 static __always_inline s64
arch_atomic64_fetch_inc(atomic64_t * v)1570 arch_atomic64_fetch_inc(atomic64_t *v)
1571 {
1572 return arch_atomic64_fetch_add(1, v);
1573 }
1574 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1575 #endif
1576
1577 #ifndef arch_atomic64_fetch_inc_acquire
1578 static __always_inline s64
arch_atomic64_fetch_inc_acquire(atomic64_t * v)1579 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1580 {
1581 return arch_atomic64_fetch_add_acquire(1, v);
1582 }
1583 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1584 #endif
1585
1586 #ifndef arch_atomic64_fetch_inc_release
1587 static __always_inline s64
arch_atomic64_fetch_inc_release(atomic64_t * v)1588 arch_atomic64_fetch_inc_release(atomic64_t *v)
1589 {
1590 return arch_atomic64_fetch_add_release(1, v);
1591 }
1592 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1593 #endif
1594
1595 #ifndef arch_atomic64_fetch_inc_relaxed
1596 static __always_inline s64
arch_atomic64_fetch_inc_relaxed(atomic64_t * v)1597 arch_atomic64_fetch_inc_relaxed(atomic64_t *v)
1598 {
1599 return arch_atomic64_fetch_add_relaxed(1, v);
1600 }
1601 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed
1602 #endif
1603
1604 #else /* arch_atomic64_fetch_inc_relaxed */
1605
1606 #ifndef arch_atomic64_fetch_inc_acquire
1607 static __always_inline s64
arch_atomic64_fetch_inc_acquire(atomic64_t * v)1608 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1609 {
1610 s64 ret = arch_atomic64_fetch_inc_relaxed(v);
1611 __atomic_acquire_fence();
1612 return ret;
1613 }
1614 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1615 #endif
1616
1617 #ifndef arch_atomic64_fetch_inc_release
1618 static __always_inline s64
arch_atomic64_fetch_inc_release(atomic64_t * v)1619 arch_atomic64_fetch_inc_release(atomic64_t *v)
1620 {
1621 __atomic_release_fence();
1622 return arch_atomic64_fetch_inc_relaxed(v);
1623 }
1624 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1625 #endif
1626
1627 #ifndef arch_atomic64_fetch_inc
1628 static __always_inline s64
arch_atomic64_fetch_inc(atomic64_t * v)1629 arch_atomic64_fetch_inc(atomic64_t *v)
1630 {
1631 s64 ret;
1632 __atomic_pre_full_fence();
1633 ret = arch_atomic64_fetch_inc_relaxed(v);
1634 __atomic_post_full_fence();
1635 return ret;
1636 }
1637 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1638 #endif
1639
1640 #endif /* arch_atomic64_fetch_inc_relaxed */
1641
1642 #ifndef arch_atomic64_dec
1643 static __always_inline void
arch_atomic64_dec(atomic64_t * v)1644 arch_atomic64_dec(atomic64_t *v)
1645 {
1646 arch_atomic64_sub(1, v);
1647 }
1648 #define arch_atomic64_dec arch_atomic64_dec
1649 #endif
1650
1651 #ifndef arch_atomic64_dec_return_relaxed
1652 #ifdef arch_atomic64_dec_return
1653 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return
1654 #define arch_atomic64_dec_return_release arch_atomic64_dec_return
1655 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return
1656 #endif /* arch_atomic64_dec_return */
1657
1658 #ifndef arch_atomic64_dec_return
1659 static __always_inline s64
arch_atomic64_dec_return(atomic64_t * v)1660 arch_atomic64_dec_return(atomic64_t *v)
1661 {
1662 return arch_atomic64_sub_return(1, v);
1663 }
1664 #define arch_atomic64_dec_return arch_atomic64_dec_return
1665 #endif
1666
1667 #ifndef arch_atomic64_dec_return_acquire
1668 static __always_inline s64
arch_atomic64_dec_return_acquire(atomic64_t * v)1669 arch_atomic64_dec_return_acquire(atomic64_t *v)
1670 {
1671 return arch_atomic64_sub_return_acquire(1, v);
1672 }
1673 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1674 #endif
1675
1676 #ifndef arch_atomic64_dec_return_release
1677 static __always_inline s64
arch_atomic64_dec_return_release(atomic64_t * v)1678 arch_atomic64_dec_return_release(atomic64_t *v)
1679 {
1680 return arch_atomic64_sub_return_release(1, v);
1681 }
1682 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1683 #endif
1684
1685 #ifndef arch_atomic64_dec_return_relaxed
1686 static __always_inline s64
arch_atomic64_dec_return_relaxed(atomic64_t * v)1687 arch_atomic64_dec_return_relaxed(atomic64_t *v)
1688 {
1689 return arch_atomic64_sub_return_relaxed(1, v);
1690 }
1691 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed
1692 #endif
1693
1694 #else /* arch_atomic64_dec_return_relaxed */
1695
1696 #ifndef arch_atomic64_dec_return_acquire
1697 static __always_inline s64
arch_atomic64_dec_return_acquire(atomic64_t * v)1698 arch_atomic64_dec_return_acquire(atomic64_t *v)
1699 {
1700 s64 ret = arch_atomic64_dec_return_relaxed(v);
1701 __atomic_acquire_fence();
1702 return ret;
1703 }
1704 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1705 #endif
1706
1707 #ifndef arch_atomic64_dec_return_release
1708 static __always_inline s64
arch_atomic64_dec_return_release(atomic64_t * v)1709 arch_atomic64_dec_return_release(atomic64_t *v)
1710 {
1711 __atomic_release_fence();
1712 return arch_atomic64_dec_return_relaxed(v);
1713 }
1714 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1715 #endif
1716
1717 #ifndef arch_atomic64_dec_return
1718 static __always_inline s64
arch_atomic64_dec_return(atomic64_t * v)1719 arch_atomic64_dec_return(atomic64_t *v)
1720 {
1721 s64 ret;
1722 __atomic_pre_full_fence();
1723 ret = arch_atomic64_dec_return_relaxed(v);
1724 __atomic_post_full_fence();
1725 return ret;
1726 }
1727 #define arch_atomic64_dec_return arch_atomic64_dec_return
1728 #endif
1729
1730 #endif /* arch_atomic64_dec_return_relaxed */
1731
1732 #ifndef arch_atomic64_fetch_dec_relaxed
1733 #ifdef arch_atomic64_fetch_dec
1734 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec
1735 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec
1736 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec
1737 #endif /* arch_atomic64_fetch_dec */
1738
1739 #ifndef arch_atomic64_fetch_dec
1740 static __always_inline s64
arch_atomic64_fetch_dec(atomic64_t * v)1741 arch_atomic64_fetch_dec(atomic64_t *v)
1742 {
1743 return arch_atomic64_fetch_sub(1, v);
1744 }
1745 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1746 #endif
1747
1748 #ifndef arch_atomic64_fetch_dec_acquire
1749 static __always_inline s64
arch_atomic64_fetch_dec_acquire(atomic64_t * v)1750 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1751 {
1752 return arch_atomic64_fetch_sub_acquire(1, v);
1753 }
1754 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1755 #endif
1756
1757 #ifndef arch_atomic64_fetch_dec_release
1758 static __always_inline s64
arch_atomic64_fetch_dec_release(atomic64_t * v)1759 arch_atomic64_fetch_dec_release(atomic64_t *v)
1760 {
1761 return arch_atomic64_fetch_sub_release(1, v);
1762 }
1763 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1764 #endif
1765
1766 #ifndef arch_atomic64_fetch_dec_relaxed
1767 static __always_inline s64
arch_atomic64_fetch_dec_relaxed(atomic64_t * v)1768 arch_atomic64_fetch_dec_relaxed(atomic64_t *v)
1769 {
1770 return arch_atomic64_fetch_sub_relaxed(1, v);
1771 }
1772 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed
1773 #endif
1774
1775 #else /* arch_atomic64_fetch_dec_relaxed */
1776
1777 #ifndef arch_atomic64_fetch_dec_acquire
1778 static __always_inline s64
arch_atomic64_fetch_dec_acquire(atomic64_t * v)1779 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1780 {
1781 s64 ret = arch_atomic64_fetch_dec_relaxed(v);
1782 __atomic_acquire_fence();
1783 return ret;
1784 }
1785 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1786 #endif
1787
1788 #ifndef arch_atomic64_fetch_dec_release
1789 static __always_inline s64
arch_atomic64_fetch_dec_release(atomic64_t * v)1790 arch_atomic64_fetch_dec_release(atomic64_t *v)
1791 {
1792 __atomic_release_fence();
1793 return arch_atomic64_fetch_dec_relaxed(v);
1794 }
1795 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1796 #endif
1797
1798 #ifndef arch_atomic64_fetch_dec
1799 static __always_inline s64
arch_atomic64_fetch_dec(atomic64_t * v)1800 arch_atomic64_fetch_dec(atomic64_t *v)
1801 {
1802 s64 ret;
1803 __atomic_pre_full_fence();
1804 ret = arch_atomic64_fetch_dec_relaxed(v);
1805 __atomic_post_full_fence();
1806 return ret;
1807 }
1808 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1809 #endif
1810
1811 #endif /* arch_atomic64_fetch_dec_relaxed */
1812
1813 #ifndef arch_atomic64_fetch_and_relaxed
1814 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and
1815 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and
1816 #define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and
1817 #else /* arch_atomic64_fetch_and_relaxed */
1818
1819 #ifndef arch_atomic64_fetch_and_acquire
1820 static __always_inline s64
arch_atomic64_fetch_and_acquire(s64 i,atomic64_t * v)1821 arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1822 {
1823 s64 ret = arch_atomic64_fetch_and_relaxed(i, v);
1824 __atomic_acquire_fence();
1825 return ret;
1826 }
1827 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire
1828 #endif
1829
1830 #ifndef arch_atomic64_fetch_and_release
1831 static __always_inline s64
arch_atomic64_fetch_and_release(s64 i,atomic64_t * v)1832 arch_atomic64_fetch_and_release(s64 i, atomic64_t *v)
1833 {
1834 __atomic_release_fence();
1835 return arch_atomic64_fetch_and_relaxed(i, v);
1836 }
1837 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release
1838 #endif
1839
1840 #ifndef arch_atomic64_fetch_and
1841 static __always_inline s64
arch_atomic64_fetch_and(s64 i,atomic64_t * v)1842 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
1843 {
1844 s64 ret;
1845 __atomic_pre_full_fence();
1846 ret = arch_atomic64_fetch_and_relaxed(i, v);
1847 __atomic_post_full_fence();
1848 return ret;
1849 }
1850 #define arch_atomic64_fetch_and arch_atomic64_fetch_and
1851 #endif
1852
1853 #endif /* arch_atomic64_fetch_and_relaxed */
1854
1855 #ifndef arch_atomic64_andnot
1856 static __always_inline void
arch_atomic64_andnot(s64 i,atomic64_t * v)1857 arch_atomic64_andnot(s64 i, atomic64_t *v)
1858 {
1859 arch_atomic64_and(~i, v);
1860 }
1861 #define arch_atomic64_andnot arch_atomic64_andnot
1862 #endif
1863
1864 #ifndef arch_atomic64_fetch_andnot_relaxed
1865 #ifdef arch_atomic64_fetch_andnot
1866 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot
1867 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot
1868 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot
1869 #endif /* arch_atomic64_fetch_andnot */
1870
1871 #ifndef arch_atomic64_fetch_andnot
1872 static __always_inline s64
arch_atomic64_fetch_andnot(s64 i,atomic64_t * v)1873 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
1874 {
1875 return arch_atomic64_fetch_and(~i, v);
1876 }
1877 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
1878 #endif
1879
1880 #ifndef arch_atomic64_fetch_andnot_acquire
1881 static __always_inline s64
arch_atomic64_fetch_andnot_acquire(s64 i,atomic64_t * v)1882 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1883 {
1884 return arch_atomic64_fetch_and_acquire(~i, v);
1885 }
1886 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1887 #endif
1888
1889 #ifndef arch_atomic64_fetch_andnot_release
1890 static __always_inline s64
arch_atomic64_fetch_andnot_release(s64 i,atomic64_t * v)1891 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1892 {
1893 return arch_atomic64_fetch_and_release(~i, v);
1894 }
1895 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
1896 #endif
1897
1898 #ifndef arch_atomic64_fetch_andnot_relaxed
1899 static __always_inline s64
arch_atomic64_fetch_andnot_relaxed(s64 i,atomic64_t * v)1900 arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1901 {
1902 return arch_atomic64_fetch_and_relaxed(~i, v);
1903 }
1904 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed
1905 #endif
1906
1907 #else /* arch_atomic64_fetch_andnot_relaxed */
1908
1909 #ifndef arch_atomic64_fetch_andnot_acquire
1910 static __always_inline s64
arch_atomic64_fetch_andnot_acquire(s64 i,atomic64_t * v)1911 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1912 {
1913 s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
1914 __atomic_acquire_fence();
1915 return ret;
1916 }
1917 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1918 #endif
1919
1920 #ifndef arch_atomic64_fetch_andnot_release
1921 static __always_inline s64
arch_atomic64_fetch_andnot_release(s64 i,atomic64_t * v)1922 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1923 {
1924 __atomic_release_fence();
1925 return arch_atomic64_fetch_andnot_relaxed(i, v);
1926 }
1927 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
1928 #endif
1929
1930 #ifndef arch_atomic64_fetch_andnot
1931 static __always_inline s64
arch_atomic64_fetch_andnot(s64 i,atomic64_t * v)1932 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
1933 {
1934 s64 ret;
1935 __atomic_pre_full_fence();
1936 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
1937 __atomic_post_full_fence();
1938 return ret;
1939 }
1940 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
1941 #endif
1942
1943 #endif /* arch_atomic64_fetch_andnot_relaxed */
1944
1945 #ifndef arch_atomic64_fetch_or_relaxed
1946 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or
1947 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or
1948 #define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or
1949 #else /* arch_atomic64_fetch_or_relaxed */
1950
1951 #ifndef arch_atomic64_fetch_or_acquire
1952 static __always_inline s64
arch_atomic64_fetch_or_acquire(s64 i,atomic64_t * v)1953 arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1954 {
1955 s64 ret = arch_atomic64_fetch_or_relaxed(i, v);
1956 __atomic_acquire_fence();
1957 return ret;
1958 }
1959 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire
1960 #endif
1961
1962 #ifndef arch_atomic64_fetch_or_release
1963 static __always_inline s64
arch_atomic64_fetch_or_release(s64 i,atomic64_t * v)1964 arch_atomic64_fetch_or_release(s64 i, atomic64_t *v)
1965 {
1966 __atomic_release_fence();
1967 return arch_atomic64_fetch_or_relaxed(i, v);
1968 }
1969 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release
1970 #endif
1971
1972 #ifndef arch_atomic64_fetch_or
1973 static __always_inline s64
arch_atomic64_fetch_or(s64 i,atomic64_t * v)1974 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
1975 {
1976 s64 ret;
1977 __atomic_pre_full_fence();
1978 ret = arch_atomic64_fetch_or_relaxed(i, v);
1979 __atomic_post_full_fence();
1980 return ret;
1981 }
1982 #define arch_atomic64_fetch_or arch_atomic64_fetch_or
1983 #endif
1984
1985 #endif /* arch_atomic64_fetch_or_relaxed */
1986
1987 #ifndef arch_atomic64_fetch_xor_relaxed
1988 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor
1989 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor
1990 #define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor
1991 #else /* arch_atomic64_fetch_xor_relaxed */
1992
1993 #ifndef arch_atomic64_fetch_xor_acquire
1994 static __always_inline s64
arch_atomic64_fetch_xor_acquire(s64 i,atomic64_t * v)1995 arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1996 {
1997 s64 ret = arch_atomic64_fetch_xor_relaxed(i, v);
1998 __atomic_acquire_fence();
1999 return ret;
2000 }
2001 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire
2002 #endif
2003
2004 #ifndef arch_atomic64_fetch_xor_release
2005 static __always_inline s64
arch_atomic64_fetch_xor_release(s64 i,atomic64_t * v)2006 arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v)
2007 {
2008 __atomic_release_fence();
2009 return arch_atomic64_fetch_xor_relaxed(i, v);
2010 }
2011 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
2012 #endif
2013
2014 #ifndef arch_atomic64_fetch_xor
2015 static __always_inline s64
arch_atomic64_fetch_xor(s64 i,atomic64_t * v)2016 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
2017 {
2018 s64 ret;
2019 __atomic_pre_full_fence();
2020 ret = arch_atomic64_fetch_xor_relaxed(i, v);
2021 __atomic_post_full_fence();
2022 return ret;
2023 }
2024 #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
2025 #endif
2026
2027 #endif /* arch_atomic64_fetch_xor_relaxed */
2028
2029 #ifndef arch_atomic64_xchg_relaxed
2030 #define arch_atomic64_xchg_acquire arch_atomic64_xchg
2031 #define arch_atomic64_xchg_release arch_atomic64_xchg
2032 #define arch_atomic64_xchg_relaxed arch_atomic64_xchg
2033 #else /* arch_atomic64_xchg_relaxed */
2034
2035 #ifndef arch_atomic64_xchg_acquire
2036 static __always_inline s64
arch_atomic64_xchg_acquire(atomic64_t * v,s64 i)2037 arch_atomic64_xchg_acquire(atomic64_t *v, s64 i)
2038 {
2039 s64 ret = arch_atomic64_xchg_relaxed(v, i);
2040 __atomic_acquire_fence();
2041 return ret;
2042 }
2043 #define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire
2044 #endif
2045
2046 #ifndef arch_atomic64_xchg_release
2047 static __always_inline s64
arch_atomic64_xchg_release(atomic64_t * v,s64 i)2048 arch_atomic64_xchg_release(atomic64_t *v, s64 i)
2049 {
2050 __atomic_release_fence();
2051 return arch_atomic64_xchg_relaxed(v, i);
2052 }
2053 #define arch_atomic64_xchg_release arch_atomic64_xchg_release
2054 #endif
2055
2056 #ifndef arch_atomic64_xchg
2057 static __always_inline s64
arch_atomic64_xchg(atomic64_t * v,s64 i)2058 arch_atomic64_xchg(atomic64_t *v, s64 i)
2059 {
2060 s64 ret;
2061 __atomic_pre_full_fence();
2062 ret = arch_atomic64_xchg_relaxed(v, i);
2063 __atomic_post_full_fence();
2064 return ret;
2065 }
2066 #define arch_atomic64_xchg arch_atomic64_xchg
2067 #endif
2068
2069 #endif /* arch_atomic64_xchg_relaxed */
2070
2071 #ifndef arch_atomic64_cmpxchg_relaxed
2072 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg
2073 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg
2074 #define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg
2075 #else /* arch_atomic64_cmpxchg_relaxed */
2076
2077 #ifndef arch_atomic64_cmpxchg_acquire
2078 static __always_inline s64
arch_atomic64_cmpxchg_acquire(atomic64_t * v,s64 old,s64 new)2079 arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2080 {
2081 s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2082 __atomic_acquire_fence();
2083 return ret;
2084 }
2085 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire
2086 #endif
2087
2088 #ifndef arch_atomic64_cmpxchg_release
2089 static __always_inline s64
arch_atomic64_cmpxchg_release(atomic64_t * v,s64 old,s64 new)2090 arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2091 {
2092 __atomic_release_fence();
2093 return arch_atomic64_cmpxchg_relaxed(v, old, new);
2094 }
2095 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release
2096 #endif
2097
2098 #ifndef arch_atomic64_cmpxchg
2099 static __always_inline s64
arch_atomic64_cmpxchg(atomic64_t * v,s64 old,s64 new)2100 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2101 {
2102 s64 ret;
2103 __atomic_pre_full_fence();
2104 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2105 __atomic_post_full_fence();
2106 return ret;
2107 }
2108 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
2109 #endif
2110
2111 #endif /* arch_atomic64_cmpxchg_relaxed */
2112
2113 #ifndef arch_atomic64_try_cmpxchg_relaxed
2114 #ifdef arch_atomic64_try_cmpxchg
2115 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg
2116 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg
2117 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg
2118 #endif /* arch_atomic64_try_cmpxchg */
2119
2120 #ifndef arch_atomic64_try_cmpxchg
2121 static __always_inline bool
arch_atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)2122 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2123 {
2124 s64 r, o = *old;
2125 r = arch_atomic64_cmpxchg(v, o, new);
2126 if (unlikely(r != o))
2127 *old = r;
2128 return likely(r == o);
2129 }
2130 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2131 #endif
2132
2133 #ifndef arch_atomic64_try_cmpxchg_acquire
2134 static __always_inline bool
arch_atomic64_try_cmpxchg_acquire(atomic64_t * v,s64 * old,s64 new)2135 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2136 {
2137 s64 r, o = *old;
2138 r = arch_atomic64_cmpxchg_acquire(v, o, new);
2139 if (unlikely(r != o))
2140 *old = r;
2141 return likely(r == o);
2142 }
2143 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2144 #endif
2145
2146 #ifndef arch_atomic64_try_cmpxchg_release
2147 static __always_inline bool
arch_atomic64_try_cmpxchg_release(atomic64_t * v,s64 * old,s64 new)2148 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2149 {
2150 s64 r, o = *old;
2151 r = arch_atomic64_cmpxchg_release(v, o, new);
2152 if (unlikely(r != o))
2153 *old = r;
2154 return likely(r == o);
2155 }
2156 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2157 #endif
2158
2159 #ifndef arch_atomic64_try_cmpxchg_relaxed
2160 static __always_inline bool
arch_atomic64_try_cmpxchg_relaxed(atomic64_t * v,s64 * old,s64 new)2161 arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2162 {
2163 s64 r, o = *old;
2164 r = arch_atomic64_cmpxchg_relaxed(v, o, new);
2165 if (unlikely(r != o))
2166 *old = r;
2167 return likely(r == o);
2168 }
2169 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed
2170 #endif
2171
2172 #else /* arch_atomic64_try_cmpxchg_relaxed */
2173
2174 #ifndef arch_atomic64_try_cmpxchg_acquire
2175 static __always_inline bool
arch_atomic64_try_cmpxchg_acquire(atomic64_t * v,s64 * old,s64 new)2176 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2177 {
2178 bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2179 __atomic_acquire_fence();
2180 return ret;
2181 }
2182 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2183 #endif
2184
2185 #ifndef arch_atomic64_try_cmpxchg_release
2186 static __always_inline bool
arch_atomic64_try_cmpxchg_release(atomic64_t * v,s64 * old,s64 new)2187 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2188 {
2189 __atomic_release_fence();
2190 return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2191 }
2192 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2193 #endif
2194
2195 #ifndef arch_atomic64_try_cmpxchg
2196 static __always_inline bool
arch_atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)2197 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2198 {
2199 bool ret;
2200 __atomic_pre_full_fence();
2201 ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2202 __atomic_post_full_fence();
2203 return ret;
2204 }
2205 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2206 #endif
2207
2208 #endif /* arch_atomic64_try_cmpxchg_relaxed */
2209
2210 #ifndef arch_atomic64_sub_and_test
2211 /**
2212 * arch_atomic64_sub_and_test - subtract value from variable and test result
2213 * @i: integer value to subtract
2214 * @v: pointer of type atomic64_t
2215 *
2216 * Atomically subtracts @i from @v and returns
2217 * true if the result is zero, or false for all
2218 * other cases.
2219 */
2220 static __always_inline bool
arch_atomic64_sub_and_test(s64 i,atomic64_t * v)2221 arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
2222 {
2223 return arch_atomic64_sub_return(i, v) == 0;
2224 }
2225 #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
2226 #endif
2227
2228 #ifndef arch_atomic64_dec_and_test
2229 /**
2230 * arch_atomic64_dec_and_test - decrement and test
2231 * @v: pointer of type atomic64_t
2232 *
2233 * Atomically decrements @v by 1 and
2234 * returns true if the result is 0, or false for all other
2235 * cases.
2236 */
2237 static __always_inline bool
arch_atomic64_dec_and_test(atomic64_t * v)2238 arch_atomic64_dec_and_test(atomic64_t *v)
2239 {
2240 return arch_atomic64_dec_return(v) == 0;
2241 }
2242 #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
2243 #endif
2244
2245 #ifndef arch_atomic64_inc_and_test
2246 /**
2247 * arch_atomic64_inc_and_test - increment and test
2248 * @v: pointer of type atomic64_t
2249 *
2250 * Atomically increments @v by 1
2251 * and returns true if the result is zero, or false for all
2252 * other cases.
2253 */
2254 static __always_inline bool
arch_atomic64_inc_and_test(atomic64_t * v)2255 arch_atomic64_inc_and_test(atomic64_t *v)
2256 {
2257 return arch_atomic64_inc_return(v) == 0;
2258 }
2259 #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
2260 #endif
2261
2262 #ifndef arch_atomic64_add_negative
2263 /**
2264 * arch_atomic64_add_negative - add and test if negative
2265 * @i: integer value to add
2266 * @v: pointer of type atomic64_t
2267 *
2268 * Atomically adds @i to @v and returns true
2269 * if the result is negative, or false when
2270 * result is greater than or equal to zero.
2271 */
2272 static __always_inline bool
arch_atomic64_add_negative(s64 i,atomic64_t * v)2273 arch_atomic64_add_negative(s64 i, atomic64_t *v)
2274 {
2275 return arch_atomic64_add_return(i, v) < 0;
2276 }
2277 #define arch_atomic64_add_negative arch_atomic64_add_negative
2278 #endif
2279
2280 #ifndef arch_atomic64_fetch_add_unless
2281 /**
2282 * arch_atomic64_fetch_add_unless - add unless the number is already a given value
2283 * @v: pointer of type atomic64_t
2284 * @a: the amount to add to v...
2285 * @u: ...unless v is equal to u.
2286 *
2287 * Atomically adds @a to @v, so long as @v was not already @u.
2288 * Returns original value of @v
2289 */
2290 static __always_inline s64
arch_atomic64_fetch_add_unless(atomic64_t * v,s64 a,s64 u)2291 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2292 {
2293 s64 c = arch_atomic64_read(v);
2294
2295 do {
2296 if (unlikely(c == u))
2297 break;
2298 } while (!arch_atomic64_try_cmpxchg(v, &c, c + a));
2299
2300 return c;
2301 }
2302 #define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless
2303 #endif
2304
2305 #ifndef arch_atomic64_add_unless
2306 /**
2307 * arch_atomic64_add_unless - add unless the number is already a given value
2308 * @v: pointer of type atomic64_t
2309 * @a: the amount to add to v...
2310 * @u: ...unless v is equal to u.
2311 *
2312 * Atomically adds @a to @v, if @v was not already @u.
2313 * Returns true if the addition was done.
2314 */
2315 static __always_inline bool
arch_atomic64_add_unless(atomic64_t * v,s64 a,s64 u)2316 arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2317 {
2318 return arch_atomic64_fetch_add_unless(v, a, u) != u;
2319 }
2320 #define arch_atomic64_add_unless arch_atomic64_add_unless
2321 #endif
2322
2323 #ifndef arch_atomic64_inc_not_zero
2324 /**
2325 * arch_atomic64_inc_not_zero - increment unless the number is zero
2326 * @v: pointer of type atomic64_t
2327 *
2328 * Atomically increments @v by 1, if @v is non-zero.
2329 * Returns true if the increment was done.
2330 */
2331 static __always_inline bool
arch_atomic64_inc_not_zero(atomic64_t * v)2332 arch_atomic64_inc_not_zero(atomic64_t *v)
2333 {
2334 return arch_atomic64_add_unless(v, 1, 0);
2335 }
2336 #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
2337 #endif
2338
2339 #ifndef arch_atomic64_inc_unless_negative
2340 static __always_inline bool
arch_atomic64_inc_unless_negative(atomic64_t * v)2341 arch_atomic64_inc_unless_negative(atomic64_t *v)
2342 {
2343 s64 c = arch_atomic64_read(v);
2344
2345 do {
2346 if (unlikely(c < 0))
2347 return false;
2348 } while (!arch_atomic64_try_cmpxchg(v, &c, c + 1));
2349
2350 return true;
2351 }
2352 #define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative
2353 #endif
2354
2355 #ifndef arch_atomic64_dec_unless_positive
2356 static __always_inline bool
arch_atomic64_dec_unless_positive(atomic64_t * v)2357 arch_atomic64_dec_unless_positive(atomic64_t *v)
2358 {
2359 s64 c = arch_atomic64_read(v);
2360
2361 do {
2362 if (unlikely(c > 0))
2363 return false;
2364 } while (!arch_atomic64_try_cmpxchg(v, &c, c - 1));
2365
2366 return true;
2367 }
2368 #define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive
2369 #endif
2370
2371 #ifndef arch_atomic64_dec_if_positive
2372 static __always_inline s64
arch_atomic64_dec_if_positive(atomic64_t * v)2373 arch_atomic64_dec_if_positive(atomic64_t *v)
2374 {
2375 s64 dec, c = arch_atomic64_read(v);
2376
2377 do {
2378 dec = c - 1;
2379 if (unlikely(dec < 0))
2380 break;
2381 } while (!arch_atomic64_try_cmpxchg(v, &c, dec));
2382
2383 return dec;
2384 }
2385 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
2386 #endif
2387
2388 #endif /* _LINUX_ATOMIC_FALLBACK_H */
2389 // 8e2cc06bc0d2c0967d2f8424762bd48555ee40ae
2390