• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0
2 
3 // Generated by scripts/atomic/gen-atomic-fallback.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
5 
6 #ifndef _LINUX_ATOMIC_FALLBACK_H
7 #define _LINUX_ATOMIC_FALLBACK_H
8 
9 #include <linux/compiler.h>
10 
11 #ifndef xchg_relaxed
12 #define xchg_relaxed		xchg
13 #define xchg_acquire		xchg
14 #define xchg_release		xchg
15 #else /* xchg_relaxed */
16 
17 #ifndef xchg_acquire
18 #define xchg_acquire(...) \
19 	__atomic_op_acquire(xchg, __VA_ARGS__)
20 #endif
21 
22 #ifndef xchg_release
23 #define xchg_release(...) \
24 	__atomic_op_release(xchg, __VA_ARGS__)
25 #endif
26 
27 #ifndef xchg
28 #define xchg(...) \
29 	__atomic_op_fence(xchg, __VA_ARGS__)
30 #endif
31 
32 #endif /* xchg_relaxed */
33 
34 #ifndef cmpxchg_relaxed
35 #define cmpxchg_relaxed		cmpxchg
36 #define cmpxchg_acquire		cmpxchg
37 #define cmpxchg_release		cmpxchg
38 #else /* cmpxchg_relaxed */
39 
40 #ifndef cmpxchg_acquire
41 #define cmpxchg_acquire(...) \
42 	__atomic_op_acquire(cmpxchg, __VA_ARGS__)
43 #endif
44 
45 #ifndef cmpxchg_release
46 #define cmpxchg_release(...) \
47 	__atomic_op_release(cmpxchg, __VA_ARGS__)
48 #endif
49 
50 #ifndef cmpxchg
51 #define cmpxchg(...) \
52 	__atomic_op_fence(cmpxchg, __VA_ARGS__)
53 #endif
54 
55 #endif /* cmpxchg_relaxed */
56 
57 #ifndef cmpxchg64_relaxed
58 #define cmpxchg64_relaxed		cmpxchg64
59 #define cmpxchg64_acquire		cmpxchg64
60 #define cmpxchg64_release		cmpxchg64
61 #else /* cmpxchg64_relaxed */
62 
63 #ifndef cmpxchg64_acquire
64 #define cmpxchg64_acquire(...) \
65 	__atomic_op_acquire(cmpxchg64, __VA_ARGS__)
66 #endif
67 
68 #ifndef cmpxchg64_release
69 #define cmpxchg64_release(...) \
70 	__atomic_op_release(cmpxchg64, __VA_ARGS__)
71 #endif
72 
73 #ifndef cmpxchg64
74 #define cmpxchg64(...) \
75 	__atomic_op_fence(cmpxchg64, __VA_ARGS__)
76 #endif
77 
78 #endif /* cmpxchg64_relaxed */
79 
80 #define arch_atomic_read atomic_read
81 #define arch_atomic_read_acquire atomic_read_acquire
82 
83 #ifndef atomic_read_acquire
84 static __always_inline int
atomic_read_acquire(const atomic_t * v)85 atomic_read_acquire(const atomic_t *v)
86 {
87 	return smp_load_acquire(&(v)->counter);
88 }
89 #define atomic_read_acquire atomic_read_acquire
90 #endif
91 
92 #define arch_atomic_set atomic_set
93 #define arch_atomic_set_release atomic_set_release
94 
95 #ifndef atomic_set_release
96 static __always_inline void
atomic_set_release(atomic_t * v,int i)97 atomic_set_release(atomic_t *v, int i)
98 {
99 	smp_store_release(&(v)->counter, i);
100 }
101 #define atomic_set_release atomic_set_release
102 #endif
103 
104 #define arch_atomic_add atomic_add
105 
106 #define arch_atomic_add_return atomic_add_return
107 #define arch_atomic_add_return_acquire atomic_add_return_acquire
108 #define arch_atomic_add_return_release atomic_add_return_release
109 #define arch_atomic_add_return_relaxed atomic_add_return_relaxed
110 
111 #ifndef atomic_add_return_relaxed
112 #define atomic_add_return_acquire atomic_add_return
113 #define atomic_add_return_release atomic_add_return
114 #define atomic_add_return_relaxed atomic_add_return
115 #else /* atomic_add_return_relaxed */
116 
117 #ifndef atomic_add_return_acquire
118 static __always_inline int
atomic_add_return_acquire(int i,atomic_t * v)119 atomic_add_return_acquire(int i, atomic_t *v)
120 {
121 	int ret = atomic_add_return_relaxed(i, v);
122 	__atomic_acquire_fence();
123 	return ret;
124 }
125 #define atomic_add_return_acquire atomic_add_return_acquire
126 #endif
127 
128 #ifndef atomic_add_return_release
129 static __always_inline int
atomic_add_return_release(int i,atomic_t * v)130 atomic_add_return_release(int i, atomic_t *v)
131 {
132 	__atomic_release_fence();
133 	return atomic_add_return_relaxed(i, v);
134 }
135 #define atomic_add_return_release atomic_add_return_release
136 #endif
137 
138 #ifndef atomic_add_return
139 static __always_inline int
atomic_add_return(int i,atomic_t * v)140 atomic_add_return(int i, atomic_t *v)
141 {
142 	int ret;
143 	__atomic_pre_full_fence();
144 	ret = atomic_add_return_relaxed(i, v);
145 	__atomic_post_full_fence();
146 	return ret;
147 }
148 #define atomic_add_return atomic_add_return
149 #endif
150 
151 #endif /* atomic_add_return_relaxed */
152 
153 #define arch_atomic_fetch_add atomic_fetch_add
154 #define arch_atomic_fetch_add_acquire atomic_fetch_add_acquire
155 #define arch_atomic_fetch_add_release atomic_fetch_add_release
156 #define arch_atomic_fetch_add_relaxed atomic_fetch_add_relaxed
157 
158 #ifndef atomic_fetch_add_relaxed
159 #define atomic_fetch_add_acquire atomic_fetch_add
160 #define atomic_fetch_add_release atomic_fetch_add
161 #define atomic_fetch_add_relaxed atomic_fetch_add
162 #else /* atomic_fetch_add_relaxed */
163 
164 #ifndef atomic_fetch_add_acquire
165 static __always_inline int
atomic_fetch_add_acquire(int i,atomic_t * v)166 atomic_fetch_add_acquire(int i, atomic_t *v)
167 {
168 	int ret = atomic_fetch_add_relaxed(i, v);
169 	__atomic_acquire_fence();
170 	return ret;
171 }
172 #define atomic_fetch_add_acquire atomic_fetch_add_acquire
173 #endif
174 
175 #ifndef atomic_fetch_add_release
176 static __always_inline int
atomic_fetch_add_release(int i,atomic_t * v)177 atomic_fetch_add_release(int i, atomic_t *v)
178 {
179 	__atomic_release_fence();
180 	return atomic_fetch_add_relaxed(i, v);
181 }
182 #define atomic_fetch_add_release atomic_fetch_add_release
183 #endif
184 
185 #ifndef atomic_fetch_add
186 static __always_inline int
atomic_fetch_add(int i,atomic_t * v)187 atomic_fetch_add(int i, atomic_t *v)
188 {
189 	int ret;
190 	__atomic_pre_full_fence();
191 	ret = atomic_fetch_add_relaxed(i, v);
192 	__atomic_post_full_fence();
193 	return ret;
194 }
195 #define atomic_fetch_add atomic_fetch_add
196 #endif
197 
198 #endif /* atomic_fetch_add_relaxed */
199 
200 #define arch_atomic_sub atomic_sub
201 
202 #define arch_atomic_sub_return atomic_sub_return
203 #define arch_atomic_sub_return_acquire atomic_sub_return_acquire
204 #define arch_atomic_sub_return_release atomic_sub_return_release
205 #define arch_atomic_sub_return_relaxed atomic_sub_return_relaxed
206 
207 #ifndef atomic_sub_return_relaxed
208 #define atomic_sub_return_acquire atomic_sub_return
209 #define atomic_sub_return_release atomic_sub_return
210 #define atomic_sub_return_relaxed atomic_sub_return
211 #else /* atomic_sub_return_relaxed */
212 
213 #ifndef atomic_sub_return_acquire
214 static __always_inline int
atomic_sub_return_acquire(int i,atomic_t * v)215 atomic_sub_return_acquire(int i, atomic_t *v)
216 {
217 	int ret = atomic_sub_return_relaxed(i, v);
218 	__atomic_acquire_fence();
219 	return ret;
220 }
221 #define atomic_sub_return_acquire atomic_sub_return_acquire
222 #endif
223 
224 #ifndef atomic_sub_return_release
225 static __always_inline int
atomic_sub_return_release(int i,atomic_t * v)226 atomic_sub_return_release(int i, atomic_t *v)
227 {
228 	__atomic_release_fence();
229 	return atomic_sub_return_relaxed(i, v);
230 }
231 #define atomic_sub_return_release atomic_sub_return_release
232 #endif
233 
234 #ifndef atomic_sub_return
235 static __always_inline int
atomic_sub_return(int i,atomic_t * v)236 atomic_sub_return(int i, atomic_t *v)
237 {
238 	int ret;
239 	__atomic_pre_full_fence();
240 	ret = atomic_sub_return_relaxed(i, v);
241 	__atomic_post_full_fence();
242 	return ret;
243 }
244 #define atomic_sub_return atomic_sub_return
245 #endif
246 
247 #endif /* atomic_sub_return_relaxed */
248 
249 #define arch_atomic_fetch_sub atomic_fetch_sub
250 #define arch_atomic_fetch_sub_acquire atomic_fetch_sub_acquire
251 #define arch_atomic_fetch_sub_release atomic_fetch_sub_release
252 #define arch_atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
253 
254 #ifndef atomic_fetch_sub_relaxed
255 #define atomic_fetch_sub_acquire atomic_fetch_sub
256 #define atomic_fetch_sub_release atomic_fetch_sub
257 #define atomic_fetch_sub_relaxed atomic_fetch_sub
258 #else /* atomic_fetch_sub_relaxed */
259 
260 #ifndef atomic_fetch_sub_acquire
261 static __always_inline int
atomic_fetch_sub_acquire(int i,atomic_t * v)262 atomic_fetch_sub_acquire(int i, atomic_t *v)
263 {
264 	int ret = atomic_fetch_sub_relaxed(i, v);
265 	__atomic_acquire_fence();
266 	return ret;
267 }
268 #define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
269 #endif
270 
271 #ifndef atomic_fetch_sub_release
272 static __always_inline int
atomic_fetch_sub_release(int i,atomic_t * v)273 atomic_fetch_sub_release(int i, atomic_t *v)
274 {
275 	__atomic_release_fence();
276 	return atomic_fetch_sub_relaxed(i, v);
277 }
278 #define atomic_fetch_sub_release atomic_fetch_sub_release
279 #endif
280 
281 #ifndef atomic_fetch_sub
282 static __always_inline int
atomic_fetch_sub(int i,atomic_t * v)283 atomic_fetch_sub(int i, atomic_t *v)
284 {
285 	int ret;
286 	__atomic_pre_full_fence();
287 	ret = atomic_fetch_sub_relaxed(i, v);
288 	__atomic_post_full_fence();
289 	return ret;
290 }
291 #define atomic_fetch_sub atomic_fetch_sub
292 #endif
293 
294 #endif /* atomic_fetch_sub_relaxed */
295 
296 #define arch_atomic_inc atomic_inc
297 
298 #ifndef atomic_inc
299 static __always_inline void
atomic_inc(atomic_t * v)300 atomic_inc(atomic_t *v)
301 {
302 	atomic_add(1, v);
303 }
304 #define atomic_inc atomic_inc
305 #endif
306 
307 #define arch_atomic_inc_return atomic_inc_return
308 #define arch_atomic_inc_return_acquire atomic_inc_return_acquire
309 #define arch_atomic_inc_return_release atomic_inc_return_release
310 #define arch_atomic_inc_return_relaxed atomic_inc_return_relaxed
311 
312 #ifndef atomic_inc_return_relaxed
313 #ifdef atomic_inc_return
314 #define atomic_inc_return_acquire atomic_inc_return
315 #define atomic_inc_return_release atomic_inc_return
316 #define atomic_inc_return_relaxed atomic_inc_return
317 #endif /* atomic_inc_return */
318 
319 #ifndef atomic_inc_return
320 static __always_inline int
atomic_inc_return(atomic_t * v)321 atomic_inc_return(atomic_t *v)
322 {
323 	return atomic_add_return(1, v);
324 }
325 #define atomic_inc_return atomic_inc_return
326 #endif
327 
328 #ifndef atomic_inc_return_acquire
329 static __always_inline int
atomic_inc_return_acquire(atomic_t * v)330 atomic_inc_return_acquire(atomic_t *v)
331 {
332 	return atomic_add_return_acquire(1, v);
333 }
334 #define atomic_inc_return_acquire atomic_inc_return_acquire
335 #endif
336 
337 #ifndef atomic_inc_return_release
338 static __always_inline int
atomic_inc_return_release(atomic_t * v)339 atomic_inc_return_release(atomic_t *v)
340 {
341 	return atomic_add_return_release(1, v);
342 }
343 #define atomic_inc_return_release atomic_inc_return_release
344 #endif
345 
346 #ifndef atomic_inc_return_relaxed
347 static __always_inline int
atomic_inc_return_relaxed(atomic_t * v)348 atomic_inc_return_relaxed(atomic_t *v)
349 {
350 	return atomic_add_return_relaxed(1, v);
351 }
352 #define atomic_inc_return_relaxed atomic_inc_return_relaxed
353 #endif
354 
355 #else /* atomic_inc_return_relaxed */
356 
357 #ifndef atomic_inc_return_acquire
358 static __always_inline int
atomic_inc_return_acquire(atomic_t * v)359 atomic_inc_return_acquire(atomic_t *v)
360 {
361 	int ret = atomic_inc_return_relaxed(v);
362 	__atomic_acquire_fence();
363 	return ret;
364 }
365 #define atomic_inc_return_acquire atomic_inc_return_acquire
366 #endif
367 
368 #ifndef atomic_inc_return_release
369 static __always_inline int
atomic_inc_return_release(atomic_t * v)370 atomic_inc_return_release(atomic_t *v)
371 {
372 	__atomic_release_fence();
373 	return atomic_inc_return_relaxed(v);
374 }
375 #define atomic_inc_return_release atomic_inc_return_release
376 #endif
377 
378 #ifndef atomic_inc_return
379 static __always_inline int
atomic_inc_return(atomic_t * v)380 atomic_inc_return(atomic_t *v)
381 {
382 	int ret;
383 	__atomic_pre_full_fence();
384 	ret = atomic_inc_return_relaxed(v);
385 	__atomic_post_full_fence();
386 	return ret;
387 }
388 #define atomic_inc_return atomic_inc_return
389 #endif
390 
391 #endif /* atomic_inc_return_relaxed */
392 
393 #define arch_atomic_fetch_inc atomic_fetch_inc
394 #define arch_atomic_fetch_inc_acquire atomic_fetch_inc_acquire
395 #define arch_atomic_fetch_inc_release atomic_fetch_inc_release
396 #define arch_atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
397 
398 #ifndef atomic_fetch_inc_relaxed
399 #ifdef atomic_fetch_inc
400 #define atomic_fetch_inc_acquire atomic_fetch_inc
401 #define atomic_fetch_inc_release atomic_fetch_inc
402 #define atomic_fetch_inc_relaxed atomic_fetch_inc
403 #endif /* atomic_fetch_inc */
404 
405 #ifndef atomic_fetch_inc
406 static __always_inline int
atomic_fetch_inc(atomic_t * v)407 atomic_fetch_inc(atomic_t *v)
408 {
409 	return atomic_fetch_add(1, v);
410 }
411 #define atomic_fetch_inc atomic_fetch_inc
412 #endif
413 
414 #ifndef atomic_fetch_inc_acquire
415 static __always_inline int
atomic_fetch_inc_acquire(atomic_t * v)416 atomic_fetch_inc_acquire(atomic_t *v)
417 {
418 	return atomic_fetch_add_acquire(1, v);
419 }
420 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
421 #endif
422 
423 #ifndef atomic_fetch_inc_release
424 static __always_inline int
atomic_fetch_inc_release(atomic_t * v)425 atomic_fetch_inc_release(atomic_t *v)
426 {
427 	return atomic_fetch_add_release(1, v);
428 }
429 #define atomic_fetch_inc_release atomic_fetch_inc_release
430 #endif
431 
432 #ifndef atomic_fetch_inc_relaxed
433 static __always_inline int
atomic_fetch_inc_relaxed(atomic_t * v)434 atomic_fetch_inc_relaxed(atomic_t *v)
435 {
436 	return atomic_fetch_add_relaxed(1, v);
437 }
438 #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
439 #endif
440 
441 #else /* atomic_fetch_inc_relaxed */
442 
443 #ifndef atomic_fetch_inc_acquire
444 static __always_inline int
atomic_fetch_inc_acquire(atomic_t * v)445 atomic_fetch_inc_acquire(atomic_t *v)
446 {
447 	int ret = atomic_fetch_inc_relaxed(v);
448 	__atomic_acquire_fence();
449 	return ret;
450 }
451 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
452 #endif
453 
454 #ifndef atomic_fetch_inc_release
455 static __always_inline int
atomic_fetch_inc_release(atomic_t * v)456 atomic_fetch_inc_release(atomic_t *v)
457 {
458 	__atomic_release_fence();
459 	return atomic_fetch_inc_relaxed(v);
460 }
461 #define atomic_fetch_inc_release atomic_fetch_inc_release
462 #endif
463 
464 #ifndef atomic_fetch_inc
465 static __always_inline int
atomic_fetch_inc(atomic_t * v)466 atomic_fetch_inc(atomic_t *v)
467 {
468 	int ret;
469 	__atomic_pre_full_fence();
470 	ret = atomic_fetch_inc_relaxed(v);
471 	__atomic_post_full_fence();
472 	return ret;
473 }
474 #define atomic_fetch_inc atomic_fetch_inc
475 #endif
476 
477 #endif /* atomic_fetch_inc_relaxed */
478 
479 #define arch_atomic_dec atomic_dec
480 
481 #ifndef atomic_dec
482 static __always_inline void
atomic_dec(atomic_t * v)483 atomic_dec(atomic_t *v)
484 {
485 	atomic_sub(1, v);
486 }
487 #define atomic_dec atomic_dec
488 #endif
489 
490 #define arch_atomic_dec_return atomic_dec_return
491 #define arch_atomic_dec_return_acquire atomic_dec_return_acquire
492 #define arch_atomic_dec_return_release atomic_dec_return_release
493 #define arch_atomic_dec_return_relaxed atomic_dec_return_relaxed
494 
495 #ifndef atomic_dec_return_relaxed
496 #ifdef atomic_dec_return
497 #define atomic_dec_return_acquire atomic_dec_return
498 #define atomic_dec_return_release atomic_dec_return
499 #define atomic_dec_return_relaxed atomic_dec_return
500 #endif /* atomic_dec_return */
501 
502 #ifndef atomic_dec_return
503 static __always_inline int
atomic_dec_return(atomic_t * v)504 atomic_dec_return(atomic_t *v)
505 {
506 	return atomic_sub_return(1, v);
507 }
508 #define atomic_dec_return atomic_dec_return
509 #endif
510 
511 #ifndef atomic_dec_return_acquire
512 static __always_inline int
atomic_dec_return_acquire(atomic_t * v)513 atomic_dec_return_acquire(atomic_t *v)
514 {
515 	return atomic_sub_return_acquire(1, v);
516 }
517 #define atomic_dec_return_acquire atomic_dec_return_acquire
518 #endif
519 
520 #ifndef atomic_dec_return_release
521 static __always_inline int
atomic_dec_return_release(atomic_t * v)522 atomic_dec_return_release(atomic_t *v)
523 {
524 	return atomic_sub_return_release(1, v);
525 }
526 #define atomic_dec_return_release atomic_dec_return_release
527 #endif
528 
529 #ifndef atomic_dec_return_relaxed
530 static __always_inline int
atomic_dec_return_relaxed(atomic_t * v)531 atomic_dec_return_relaxed(atomic_t *v)
532 {
533 	return atomic_sub_return_relaxed(1, v);
534 }
535 #define atomic_dec_return_relaxed atomic_dec_return_relaxed
536 #endif
537 
538 #else /* atomic_dec_return_relaxed */
539 
540 #ifndef atomic_dec_return_acquire
541 static __always_inline int
atomic_dec_return_acquire(atomic_t * v)542 atomic_dec_return_acquire(atomic_t *v)
543 {
544 	int ret = atomic_dec_return_relaxed(v);
545 	__atomic_acquire_fence();
546 	return ret;
547 }
548 #define atomic_dec_return_acquire atomic_dec_return_acquire
549 #endif
550 
551 #ifndef atomic_dec_return_release
552 static __always_inline int
atomic_dec_return_release(atomic_t * v)553 atomic_dec_return_release(atomic_t *v)
554 {
555 	__atomic_release_fence();
556 	return atomic_dec_return_relaxed(v);
557 }
558 #define atomic_dec_return_release atomic_dec_return_release
559 #endif
560 
561 #ifndef atomic_dec_return
562 static __always_inline int
atomic_dec_return(atomic_t * v)563 atomic_dec_return(atomic_t *v)
564 {
565 	int ret;
566 	__atomic_pre_full_fence();
567 	ret = atomic_dec_return_relaxed(v);
568 	__atomic_post_full_fence();
569 	return ret;
570 }
571 #define atomic_dec_return atomic_dec_return
572 #endif
573 
574 #endif /* atomic_dec_return_relaxed */
575 
576 #define arch_atomic_fetch_dec atomic_fetch_dec
577 #define arch_atomic_fetch_dec_acquire atomic_fetch_dec_acquire
578 #define arch_atomic_fetch_dec_release atomic_fetch_dec_release
579 #define arch_atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
580 
581 #ifndef atomic_fetch_dec_relaxed
582 #ifdef atomic_fetch_dec
583 #define atomic_fetch_dec_acquire atomic_fetch_dec
584 #define atomic_fetch_dec_release atomic_fetch_dec
585 #define atomic_fetch_dec_relaxed atomic_fetch_dec
586 #endif /* atomic_fetch_dec */
587 
588 #ifndef atomic_fetch_dec
589 static __always_inline int
atomic_fetch_dec(atomic_t * v)590 atomic_fetch_dec(atomic_t *v)
591 {
592 	return atomic_fetch_sub(1, v);
593 }
594 #define atomic_fetch_dec atomic_fetch_dec
595 #endif
596 
597 #ifndef atomic_fetch_dec_acquire
598 static __always_inline int
atomic_fetch_dec_acquire(atomic_t * v)599 atomic_fetch_dec_acquire(atomic_t *v)
600 {
601 	return atomic_fetch_sub_acquire(1, v);
602 }
603 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
604 #endif
605 
606 #ifndef atomic_fetch_dec_release
607 static __always_inline int
atomic_fetch_dec_release(atomic_t * v)608 atomic_fetch_dec_release(atomic_t *v)
609 {
610 	return atomic_fetch_sub_release(1, v);
611 }
612 #define atomic_fetch_dec_release atomic_fetch_dec_release
613 #endif
614 
615 #ifndef atomic_fetch_dec_relaxed
616 static __always_inline int
atomic_fetch_dec_relaxed(atomic_t * v)617 atomic_fetch_dec_relaxed(atomic_t *v)
618 {
619 	return atomic_fetch_sub_relaxed(1, v);
620 }
621 #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
622 #endif
623 
624 #else /* atomic_fetch_dec_relaxed */
625 
626 #ifndef atomic_fetch_dec_acquire
627 static __always_inline int
atomic_fetch_dec_acquire(atomic_t * v)628 atomic_fetch_dec_acquire(atomic_t *v)
629 {
630 	int ret = atomic_fetch_dec_relaxed(v);
631 	__atomic_acquire_fence();
632 	return ret;
633 }
634 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
635 #endif
636 
637 #ifndef atomic_fetch_dec_release
638 static __always_inline int
atomic_fetch_dec_release(atomic_t * v)639 atomic_fetch_dec_release(atomic_t *v)
640 {
641 	__atomic_release_fence();
642 	return atomic_fetch_dec_relaxed(v);
643 }
644 #define atomic_fetch_dec_release atomic_fetch_dec_release
645 #endif
646 
647 #ifndef atomic_fetch_dec
648 static __always_inline int
atomic_fetch_dec(atomic_t * v)649 atomic_fetch_dec(atomic_t *v)
650 {
651 	int ret;
652 	__atomic_pre_full_fence();
653 	ret = atomic_fetch_dec_relaxed(v);
654 	__atomic_post_full_fence();
655 	return ret;
656 }
657 #define atomic_fetch_dec atomic_fetch_dec
658 #endif
659 
660 #endif /* atomic_fetch_dec_relaxed */
661 
662 #define arch_atomic_and atomic_and
663 
664 #define arch_atomic_fetch_and atomic_fetch_and
665 #define arch_atomic_fetch_and_acquire atomic_fetch_and_acquire
666 #define arch_atomic_fetch_and_release atomic_fetch_and_release
667 #define arch_atomic_fetch_and_relaxed atomic_fetch_and_relaxed
668 
669 #ifndef atomic_fetch_and_relaxed
670 #define atomic_fetch_and_acquire atomic_fetch_and
671 #define atomic_fetch_and_release atomic_fetch_and
672 #define atomic_fetch_and_relaxed atomic_fetch_and
673 #else /* atomic_fetch_and_relaxed */
674 
675 #ifndef atomic_fetch_and_acquire
676 static __always_inline int
atomic_fetch_and_acquire(int i,atomic_t * v)677 atomic_fetch_and_acquire(int i, atomic_t *v)
678 {
679 	int ret = atomic_fetch_and_relaxed(i, v);
680 	__atomic_acquire_fence();
681 	return ret;
682 }
683 #define atomic_fetch_and_acquire atomic_fetch_and_acquire
684 #endif
685 
686 #ifndef atomic_fetch_and_release
687 static __always_inline int
atomic_fetch_and_release(int i,atomic_t * v)688 atomic_fetch_and_release(int i, atomic_t *v)
689 {
690 	__atomic_release_fence();
691 	return atomic_fetch_and_relaxed(i, v);
692 }
693 #define atomic_fetch_and_release atomic_fetch_and_release
694 #endif
695 
696 #ifndef atomic_fetch_and
697 static __always_inline int
atomic_fetch_and(int i,atomic_t * v)698 atomic_fetch_and(int i, atomic_t *v)
699 {
700 	int ret;
701 	__atomic_pre_full_fence();
702 	ret = atomic_fetch_and_relaxed(i, v);
703 	__atomic_post_full_fence();
704 	return ret;
705 }
706 #define atomic_fetch_and atomic_fetch_and
707 #endif
708 
709 #endif /* atomic_fetch_and_relaxed */
710 
711 #define arch_atomic_andnot atomic_andnot
712 
713 #ifndef atomic_andnot
714 static __always_inline void
atomic_andnot(int i,atomic_t * v)715 atomic_andnot(int i, atomic_t *v)
716 {
717 	atomic_and(~i, v);
718 }
719 #define atomic_andnot atomic_andnot
720 #endif
721 
722 #define arch_atomic_fetch_andnot atomic_fetch_andnot
723 #define arch_atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
724 #define arch_atomic_fetch_andnot_release atomic_fetch_andnot_release
725 #define arch_atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
726 
727 #ifndef atomic_fetch_andnot_relaxed
728 #ifdef atomic_fetch_andnot
729 #define atomic_fetch_andnot_acquire atomic_fetch_andnot
730 #define atomic_fetch_andnot_release atomic_fetch_andnot
731 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
732 #endif /* atomic_fetch_andnot */
733 
734 #ifndef atomic_fetch_andnot
735 static __always_inline int
atomic_fetch_andnot(int i,atomic_t * v)736 atomic_fetch_andnot(int i, atomic_t *v)
737 {
738 	return atomic_fetch_and(~i, v);
739 }
740 #define atomic_fetch_andnot atomic_fetch_andnot
741 #endif
742 
743 #ifndef atomic_fetch_andnot_acquire
744 static __always_inline int
atomic_fetch_andnot_acquire(int i,atomic_t * v)745 atomic_fetch_andnot_acquire(int i, atomic_t *v)
746 {
747 	return atomic_fetch_and_acquire(~i, v);
748 }
749 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
750 #endif
751 
752 #ifndef atomic_fetch_andnot_release
753 static __always_inline int
atomic_fetch_andnot_release(int i,atomic_t * v)754 atomic_fetch_andnot_release(int i, atomic_t *v)
755 {
756 	return atomic_fetch_and_release(~i, v);
757 }
758 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
759 #endif
760 
761 #ifndef atomic_fetch_andnot_relaxed
762 static __always_inline int
atomic_fetch_andnot_relaxed(int i,atomic_t * v)763 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
764 {
765 	return atomic_fetch_and_relaxed(~i, v);
766 }
767 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
768 #endif
769 
770 #else /* atomic_fetch_andnot_relaxed */
771 
772 #ifndef atomic_fetch_andnot_acquire
773 static __always_inline int
atomic_fetch_andnot_acquire(int i,atomic_t * v)774 atomic_fetch_andnot_acquire(int i, atomic_t *v)
775 {
776 	int ret = atomic_fetch_andnot_relaxed(i, v);
777 	__atomic_acquire_fence();
778 	return ret;
779 }
780 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
781 #endif
782 
783 #ifndef atomic_fetch_andnot_release
784 static __always_inline int
atomic_fetch_andnot_release(int i,atomic_t * v)785 atomic_fetch_andnot_release(int i, atomic_t *v)
786 {
787 	__atomic_release_fence();
788 	return atomic_fetch_andnot_relaxed(i, v);
789 }
790 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
791 #endif
792 
793 #ifndef atomic_fetch_andnot
794 static __always_inline int
atomic_fetch_andnot(int i,atomic_t * v)795 atomic_fetch_andnot(int i, atomic_t *v)
796 {
797 	int ret;
798 	__atomic_pre_full_fence();
799 	ret = atomic_fetch_andnot_relaxed(i, v);
800 	__atomic_post_full_fence();
801 	return ret;
802 }
803 #define atomic_fetch_andnot atomic_fetch_andnot
804 #endif
805 
806 #endif /* atomic_fetch_andnot_relaxed */
807 
808 #define arch_atomic_or atomic_or
809 
810 #define arch_atomic_fetch_or atomic_fetch_or
811 #define arch_atomic_fetch_or_acquire atomic_fetch_or_acquire
812 #define arch_atomic_fetch_or_release atomic_fetch_or_release
813 #define arch_atomic_fetch_or_relaxed atomic_fetch_or_relaxed
814 
815 #ifndef atomic_fetch_or_relaxed
816 #define atomic_fetch_or_acquire atomic_fetch_or
817 #define atomic_fetch_or_release atomic_fetch_or
818 #define atomic_fetch_or_relaxed atomic_fetch_or
819 #else /* atomic_fetch_or_relaxed */
820 
821 #ifndef atomic_fetch_or_acquire
822 static __always_inline int
atomic_fetch_or_acquire(int i,atomic_t * v)823 atomic_fetch_or_acquire(int i, atomic_t *v)
824 {
825 	int ret = atomic_fetch_or_relaxed(i, v);
826 	__atomic_acquire_fence();
827 	return ret;
828 }
829 #define atomic_fetch_or_acquire atomic_fetch_or_acquire
830 #endif
831 
832 #ifndef atomic_fetch_or_release
833 static __always_inline int
atomic_fetch_or_release(int i,atomic_t * v)834 atomic_fetch_or_release(int i, atomic_t *v)
835 {
836 	__atomic_release_fence();
837 	return atomic_fetch_or_relaxed(i, v);
838 }
839 #define atomic_fetch_or_release atomic_fetch_or_release
840 #endif
841 
842 #ifndef atomic_fetch_or
843 static __always_inline int
atomic_fetch_or(int i,atomic_t * v)844 atomic_fetch_or(int i, atomic_t *v)
845 {
846 	int ret;
847 	__atomic_pre_full_fence();
848 	ret = atomic_fetch_or_relaxed(i, v);
849 	__atomic_post_full_fence();
850 	return ret;
851 }
852 #define atomic_fetch_or atomic_fetch_or
853 #endif
854 
855 #endif /* atomic_fetch_or_relaxed */
856 
857 #define arch_atomic_xor atomic_xor
858 
859 #define arch_atomic_fetch_xor atomic_fetch_xor
860 #define arch_atomic_fetch_xor_acquire atomic_fetch_xor_acquire
861 #define arch_atomic_fetch_xor_release atomic_fetch_xor_release
862 #define arch_atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
863 
864 #ifndef atomic_fetch_xor_relaxed
865 #define atomic_fetch_xor_acquire atomic_fetch_xor
866 #define atomic_fetch_xor_release atomic_fetch_xor
867 #define atomic_fetch_xor_relaxed atomic_fetch_xor
868 #else /* atomic_fetch_xor_relaxed */
869 
870 #ifndef atomic_fetch_xor_acquire
871 static __always_inline int
atomic_fetch_xor_acquire(int i,atomic_t * v)872 atomic_fetch_xor_acquire(int i, atomic_t *v)
873 {
874 	int ret = atomic_fetch_xor_relaxed(i, v);
875 	__atomic_acquire_fence();
876 	return ret;
877 }
878 #define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
879 #endif
880 
881 #ifndef atomic_fetch_xor_release
882 static __always_inline int
atomic_fetch_xor_release(int i,atomic_t * v)883 atomic_fetch_xor_release(int i, atomic_t *v)
884 {
885 	__atomic_release_fence();
886 	return atomic_fetch_xor_relaxed(i, v);
887 }
888 #define atomic_fetch_xor_release atomic_fetch_xor_release
889 #endif
890 
891 #ifndef atomic_fetch_xor
892 static __always_inline int
atomic_fetch_xor(int i,atomic_t * v)893 atomic_fetch_xor(int i, atomic_t *v)
894 {
895 	int ret;
896 	__atomic_pre_full_fence();
897 	ret = atomic_fetch_xor_relaxed(i, v);
898 	__atomic_post_full_fence();
899 	return ret;
900 }
901 #define atomic_fetch_xor atomic_fetch_xor
902 #endif
903 
904 #endif /* atomic_fetch_xor_relaxed */
905 
906 #define arch_atomic_xchg atomic_xchg
907 #define arch_atomic_xchg_acquire atomic_xchg_acquire
908 #define arch_atomic_xchg_release atomic_xchg_release
909 #define arch_atomic_xchg_relaxed atomic_xchg_relaxed
910 
911 #ifndef atomic_xchg_relaxed
912 #define atomic_xchg_acquire atomic_xchg
913 #define atomic_xchg_release atomic_xchg
914 #define atomic_xchg_relaxed atomic_xchg
915 #else /* atomic_xchg_relaxed */
916 
917 #ifndef atomic_xchg_acquire
918 static __always_inline int
atomic_xchg_acquire(atomic_t * v,int i)919 atomic_xchg_acquire(atomic_t *v, int i)
920 {
921 	int ret = atomic_xchg_relaxed(v, i);
922 	__atomic_acquire_fence();
923 	return ret;
924 }
925 #define atomic_xchg_acquire atomic_xchg_acquire
926 #endif
927 
928 #ifndef atomic_xchg_release
929 static __always_inline int
atomic_xchg_release(atomic_t * v,int i)930 atomic_xchg_release(atomic_t *v, int i)
931 {
932 	__atomic_release_fence();
933 	return atomic_xchg_relaxed(v, i);
934 }
935 #define atomic_xchg_release atomic_xchg_release
936 #endif
937 
938 #ifndef atomic_xchg
939 static __always_inline int
atomic_xchg(atomic_t * v,int i)940 atomic_xchg(atomic_t *v, int i)
941 {
942 	int ret;
943 	__atomic_pre_full_fence();
944 	ret = atomic_xchg_relaxed(v, i);
945 	__atomic_post_full_fence();
946 	return ret;
947 }
948 #define atomic_xchg atomic_xchg
949 #endif
950 
951 #endif /* atomic_xchg_relaxed */
952 
953 #define arch_atomic_cmpxchg atomic_cmpxchg
954 #define arch_atomic_cmpxchg_acquire atomic_cmpxchg_acquire
955 #define arch_atomic_cmpxchg_release atomic_cmpxchg_release
956 #define arch_atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
957 
958 #ifndef atomic_cmpxchg_relaxed
959 #define atomic_cmpxchg_acquire atomic_cmpxchg
960 #define atomic_cmpxchg_release atomic_cmpxchg
961 #define atomic_cmpxchg_relaxed atomic_cmpxchg
962 #else /* atomic_cmpxchg_relaxed */
963 
964 #ifndef atomic_cmpxchg_acquire
965 static __always_inline int
atomic_cmpxchg_acquire(atomic_t * v,int old,int new)966 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
967 {
968 	int ret = atomic_cmpxchg_relaxed(v, old, new);
969 	__atomic_acquire_fence();
970 	return ret;
971 }
972 #define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
973 #endif
974 
975 #ifndef atomic_cmpxchg_release
976 static __always_inline int
atomic_cmpxchg_release(atomic_t * v,int old,int new)977 atomic_cmpxchg_release(atomic_t *v, int old, int new)
978 {
979 	__atomic_release_fence();
980 	return atomic_cmpxchg_relaxed(v, old, new);
981 }
982 #define atomic_cmpxchg_release atomic_cmpxchg_release
983 #endif
984 
985 #ifndef atomic_cmpxchg
986 static __always_inline int
atomic_cmpxchg(atomic_t * v,int old,int new)987 atomic_cmpxchg(atomic_t *v, int old, int new)
988 {
989 	int ret;
990 	__atomic_pre_full_fence();
991 	ret = atomic_cmpxchg_relaxed(v, old, new);
992 	__atomic_post_full_fence();
993 	return ret;
994 }
995 #define atomic_cmpxchg atomic_cmpxchg
996 #endif
997 
998 #endif /* atomic_cmpxchg_relaxed */
999 
1000 #define arch_atomic_try_cmpxchg atomic_try_cmpxchg
1001 #define arch_atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
1002 #define arch_atomic_try_cmpxchg_release atomic_try_cmpxchg_release
1003 #define arch_atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
1004 
1005 #ifndef atomic_try_cmpxchg_relaxed
1006 #ifdef atomic_try_cmpxchg
1007 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
1008 #define atomic_try_cmpxchg_release atomic_try_cmpxchg
1009 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
1010 #endif /* atomic_try_cmpxchg */
1011 
1012 #ifndef atomic_try_cmpxchg
1013 static __always_inline bool
atomic_try_cmpxchg(atomic_t * v,int * old,int new)1014 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1015 {
1016 	int r, o = *old;
1017 	r = atomic_cmpxchg(v, o, new);
1018 	if (unlikely(r != o))
1019 		*old = r;
1020 	return likely(r == o);
1021 }
1022 #define atomic_try_cmpxchg atomic_try_cmpxchg
1023 #endif
1024 
1025 #ifndef atomic_try_cmpxchg_acquire
1026 static __always_inline bool
atomic_try_cmpxchg_acquire(atomic_t * v,int * old,int new)1027 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1028 {
1029 	int r, o = *old;
1030 	r = atomic_cmpxchg_acquire(v, o, new);
1031 	if (unlikely(r != o))
1032 		*old = r;
1033 	return likely(r == o);
1034 }
1035 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
1036 #endif
1037 
1038 #ifndef atomic_try_cmpxchg_release
1039 static __always_inline bool
atomic_try_cmpxchg_release(atomic_t * v,int * old,int new)1040 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1041 {
1042 	int r, o = *old;
1043 	r = atomic_cmpxchg_release(v, o, new);
1044 	if (unlikely(r != o))
1045 		*old = r;
1046 	return likely(r == o);
1047 }
1048 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
1049 #endif
1050 
1051 #ifndef atomic_try_cmpxchg_relaxed
1052 static __always_inline bool
atomic_try_cmpxchg_relaxed(atomic_t * v,int * old,int new)1053 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1054 {
1055 	int r, o = *old;
1056 	r = atomic_cmpxchg_relaxed(v, o, new);
1057 	if (unlikely(r != o))
1058 		*old = r;
1059 	return likely(r == o);
1060 }
1061 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
1062 #endif
1063 
1064 #else /* atomic_try_cmpxchg_relaxed */
1065 
1066 #ifndef atomic_try_cmpxchg_acquire
1067 static __always_inline bool
atomic_try_cmpxchg_acquire(atomic_t * v,int * old,int new)1068 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1069 {
1070 	bool ret = atomic_try_cmpxchg_relaxed(v, old, new);
1071 	__atomic_acquire_fence();
1072 	return ret;
1073 }
1074 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
1075 #endif
1076 
1077 #ifndef atomic_try_cmpxchg_release
1078 static __always_inline bool
atomic_try_cmpxchg_release(atomic_t * v,int * old,int new)1079 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1080 {
1081 	__atomic_release_fence();
1082 	return atomic_try_cmpxchg_relaxed(v, old, new);
1083 }
1084 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
1085 #endif
1086 
1087 #ifndef atomic_try_cmpxchg
1088 static __always_inline bool
atomic_try_cmpxchg(atomic_t * v,int * old,int new)1089 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1090 {
1091 	bool ret;
1092 	__atomic_pre_full_fence();
1093 	ret = atomic_try_cmpxchg_relaxed(v, old, new);
1094 	__atomic_post_full_fence();
1095 	return ret;
1096 }
1097 #define atomic_try_cmpxchg atomic_try_cmpxchg
1098 #endif
1099 
1100 #endif /* atomic_try_cmpxchg_relaxed */
1101 
1102 #define arch_atomic_sub_and_test atomic_sub_and_test
1103 
1104 #ifndef atomic_sub_and_test
1105 /**
1106  * atomic_sub_and_test - subtract value from variable and test result
1107  * @i: integer value to subtract
1108  * @v: pointer of type atomic_t
1109  *
1110  * Atomically subtracts @i from @v and returns
1111  * true if the result is zero, or false for all
1112  * other cases.
1113  */
1114 static __always_inline bool
atomic_sub_and_test(int i,atomic_t * v)1115 atomic_sub_and_test(int i, atomic_t *v)
1116 {
1117 	return atomic_sub_return(i, v) == 0;
1118 }
1119 #define atomic_sub_and_test atomic_sub_and_test
1120 #endif
1121 
1122 #define arch_atomic_dec_and_test atomic_dec_and_test
1123 
1124 #ifndef atomic_dec_and_test
1125 /**
1126  * atomic_dec_and_test - decrement and test
1127  * @v: pointer of type atomic_t
1128  *
1129  * Atomically decrements @v by 1 and
1130  * returns true if the result is 0, or false for all other
1131  * cases.
1132  */
1133 static __always_inline bool
atomic_dec_and_test(atomic_t * v)1134 atomic_dec_and_test(atomic_t *v)
1135 {
1136 	return atomic_dec_return(v) == 0;
1137 }
1138 #define atomic_dec_and_test atomic_dec_and_test
1139 #endif
1140 
1141 #define arch_atomic_inc_and_test atomic_inc_and_test
1142 
1143 #ifndef atomic_inc_and_test
1144 /**
1145  * atomic_inc_and_test - increment and test
1146  * @v: pointer of type atomic_t
1147  *
1148  * Atomically increments @v by 1
1149  * and returns true if the result is zero, or false for all
1150  * other cases.
1151  */
1152 static __always_inline bool
atomic_inc_and_test(atomic_t * v)1153 atomic_inc_and_test(atomic_t *v)
1154 {
1155 	return atomic_inc_return(v) == 0;
1156 }
1157 #define atomic_inc_and_test atomic_inc_and_test
1158 #endif
1159 
1160 #define arch_atomic_add_negative atomic_add_negative
1161 
1162 #ifndef atomic_add_negative
1163 /**
1164  * atomic_add_negative - add and test if negative
1165  * @i: integer value to add
1166  * @v: pointer of type atomic_t
1167  *
1168  * Atomically adds @i to @v and returns true
1169  * if the result is negative, or false when
1170  * result is greater than or equal to zero.
1171  */
1172 static __always_inline bool
atomic_add_negative(int i,atomic_t * v)1173 atomic_add_negative(int i, atomic_t *v)
1174 {
1175 	return atomic_add_return(i, v) < 0;
1176 }
1177 #define atomic_add_negative atomic_add_negative
1178 #endif
1179 
1180 #define arch_atomic_fetch_add_unless atomic_fetch_add_unless
1181 
1182 #ifndef atomic_fetch_add_unless
1183 /**
1184  * atomic_fetch_add_unless - add unless the number is already a given value
1185  * @v: pointer of type atomic_t
1186  * @a: the amount to add to v...
1187  * @u: ...unless v is equal to u.
1188  *
1189  * Atomically adds @a to @v, so long as @v was not already @u.
1190  * Returns original value of @v
1191  */
1192 static __always_inline int
atomic_fetch_add_unless(atomic_t * v,int a,int u)1193 atomic_fetch_add_unless(atomic_t *v, int a, int u)
1194 {
1195 	int c = atomic_read(v);
1196 
1197 	do {
1198 		if (unlikely(c == u))
1199 			break;
1200 	} while (!atomic_try_cmpxchg(v, &c, c + a));
1201 
1202 	return c;
1203 }
1204 #define atomic_fetch_add_unless atomic_fetch_add_unless
1205 #endif
1206 
1207 #define arch_atomic_add_unless atomic_add_unless
1208 
1209 #ifndef atomic_add_unless
1210 /**
1211  * atomic_add_unless - add unless the number is already a given value
1212  * @v: pointer of type atomic_t
1213  * @a: the amount to add to v...
1214  * @u: ...unless v is equal to u.
1215  *
1216  * Atomically adds @a to @v, if @v was not already @u.
1217  * Returns true if the addition was done.
1218  */
1219 static __always_inline bool
atomic_add_unless(atomic_t * v,int a,int u)1220 atomic_add_unless(atomic_t *v, int a, int u)
1221 {
1222 	return atomic_fetch_add_unless(v, a, u) != u;
1223 }
1224 #define atomic_add_unless atomic_add_unless
1225 #endif
1226 
1227 #define arch_atomic_inc_not_zero atomic_inc_not_zero
1228 
1229 #ifndef atomic_inc_not_zero
1230 /**
1231  * atomic_inc_not_zero - increment unless the number is zero
1232  * @v: pointer of type atomic_t
1233  *
1234  * Atomically increments @v by 1, if @v is non-zero.
1235  * Returns true if the increment was done.
1236  */
1237 static __always_inline bool
atomic_inc_not_zero(atomic_t * v)1238 atomic_inc_not_zero(atomic_t *v)
1239 {
1240 	return atomic_add_unless(v, 1, 0);
1241 }
1242 #define atomic_inc_not_zero atomic_inc_not_zero
1243 #endif
1244 
1245 #define arch_atomic_inc_unless_negative atomic_inc_unless_negative
1246 
1247 #ifndef atomic_inc_unless_negative
1248 static __always_inline bool
atomic_inc_unless_negative(atomic_t * v)1249 atomic_inc_unless_negative(atomic_t *v)
1250 {
1251 	int c = atomic_read(v);
1252 
1253 	do {
1254 		if (unlikely(c < 0))
1255 			return false;
1256 	} while (!atomic_try_cmpxchg(v, &c, c + 1));
1257 
1258 	return true;
1259 }
1260 #define atomic_inc_unless_negative atomic_inc_unless_negative
1261 #endif
1262 
1263 #define arch_atomic_dec_unless_positive atomic_dec_unless_positive
1264 
1265 #ifndef atomic_dec_unless_positive
1266 static __always_inline bool
atomic_dec_unless_positive(atomic_t * v)1267 atomic_dec_unless_positive(atomic_t *v)
1268 {
1269 	int c = atomic_read(v);
1270 
1271 	do {
1272 		if (unlikely(c > 0))
1273 			return false;
1274 	} while (!atomic_try_cmpxchg(v, &c, c - 1));
1275 
1276 	return true;
1277 }
1278 #define atomic_dec_unless_positive atomic_dec_unless_positive
1279 #endif
1280 
1281 #define arch_atomic_dec_if_positive atomic_dec_if_positive
1282 
1283 #ifndef atomic_dec_if_positive
1284 static __always_inline int
atomic_dec_if_positive(atomic_t * v)1285 atomic_dec_if_positive(atomic_t *v)
1286 {
1287 	int dec, c = atomic_read(v);
1288 
1289 	do {
1290 		dec = c - 1;
1291 		if (unlikely(dec < 0))
1292 			break;
1293 	} while (!atomic_try_cmpxchg(v, &c, dec));
1294 
1295 	return dec;
1296 }
1297 #define atomic_dec_if_positive atomic_dec_if_positive
1298 #endif
1299 
1300 #ifdef CONFIG_GENERIC_ATOMIC64
1301 #include <asm-generic/atomic64.h>
1302 #endif
1303 
1304 #define arch_atomic64_read atomic64_read
1305 #define arch_atomic64_read_acquire atomic64_read_acquire
1306 
1307 #ifndef atomic64_read_acquire
1308 static __always_inline s64
atomic64_read_acquire(const atomic64_t * v)1309 atomic64_read_acquire(const atomic64_t *v)
1310 {
1311 	return smp_load_acquire(&(v)->counter);
1312 }
1313 #define atomic64_read_acquire atomic64_read_acquire
1314 #endif
1315 
1316 #define arch_atomic64_set atomic64_set
1317 #define arch_atomic64_set_release atomic64_set_release
1318 
1319 #ifndef atomic64_set_release
1320 static __always_inline void
atomic64_set_release(atomic64_t * v,s64 i)1321 atomic64_set_release(atomic64_t *v, s64 i)
1322 {
1323 	smp_store_release(&(v)->counter, i);
1324 }
1325 #define atomic64_set_release atomic64_set_release
1326 #endif
1327 
1328 #define arch_atomic64_add atomic64_add
1329 
1330 #define arch_atomic64_add_return atomic64_add_return
1331 #define arch_atomic64_add_return_acquire atomic64_add_return_acquire
1332 #define arch_atomic64_add_return_release atomic64_add_return_release
1333 #define arch_atomic64_add_return_relaxed atomic64_add_return_relaxed
1334 
1335 #ifndef atomic64_add_return_relaxed
1336 #define atomic64_add_return_acquire atomic64_add_return
1337 #define atomic64_add_return_release atomic64_add_return
1338 #define atomic64_add_return_relaxed atomic64_add_return
1339 #else /* atomic64_add_return_relaxed */
1340 
1341 #ifndef atomic64_add_return_acquire
1342 static __always_inline s64
atomic64_add_return_acquire(s64 i,atomic64_t * v)1343 atomic64_add_return_acquire(s64 i, atomic64_t *v)
1344 {
1345 	s64 ret = atomic64_add_return_relaxed(i, v);
1346 	__atomic_acquire_fence();
1347 	return ret;
1348 }
1349 #define atomic64_add_return_acquire atomic64_add_return_acquire
1350 #endif
1351 
1352 #ifndef atomic64_add_return_release
1353 static __always_inline s64
atomic64_add_return_release(s64 i,atomic64_t * v)1354 atomic64_add_return_release(s64 i, atomic64_t *v)
1355 {
1356 	__atomic_release_fence();
1357 	return atomic64_add_return_relaxed(i, v);
1358 }
1359 #define atomic64_add_return_release atomic64_add_return_release
1360 #endif
1361 
1362 #ifndef atomic64_add_return
1363 static __always_inline s64
atomic64_add_return(s64 i,atomic64_t * v)1364 atomic64_add_return(s64 i, atomic64_t *v)
1365 {
1366 	s64 ret;
1367 	__atomic_pre_full_fence();
1368 	ret = atomic64_add_return_relaxed(i, v);
1369 	__atomic_post_full_fence();
1370 	return ret;
1371 }
1372 #define atomic64_add_return atomic64_add_return
1373 #endif
1374 
1375 #endif /* atomic64_add_return_relaxed */
1376 
1377 #define arch_atomic64_fetch_add atomic64_fetch_add
1378 #define arch_atomic64_fetch_add_acquire atomic64_fetch_add_acquire
1379 #define arch_atomic64_fetch_add_release atomic64_fetch_add_release
1380 #define arch_atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
1381 
1382 #ifndef atomic64_fetch_add_relaxed
1383 #define atomic64_fetch_add_acquire atomic64_fetch_add
1384 #define atomic64_fetch_add_release atomic64_fetch_add
1385 #define atomic64_fetch_add_relaxed atomic64_fetch_add
1386 #else /* atomic64_fetch_add_relaxed */
1387 
1388 #ifndef atomic64_fetch_add_acquire
1389 static __always_inline s64
atomic64_fetch_add_acquire(s64 i,atomic64_t * v)1390 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1391 {
1392 	s64 ret = atomic64_fetch_add_relaxed(i, v);
1393 	__atomic_acquire_fence();
1394 	return ret;
1395 }
1396 #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
1397 #endif
1398 
1399 #ifndef atomic64_fetch_add_release
1400 static __always_inline s64
atomic64_fetch_add_release(s64 i,atomic64_t * v)1401 atomic64_fetch_add_release(s64 i, atomic64_t *v)
1402 {
1403 	__atomic_release_fence();
1404 	return atomic64_fetch_add_relaxed(i, v);
1405 }
1406 #define atomic64_fetch_add_release atomic64_fetch_add_release
1407 #endif
1408 
1409 #ifndef atomic64_fetch_add
1410 static __always_inline s64
atomic64_fetch_add(s64 i,atomic64_t * v)1411 atomic64_fetch_add(s64 i, atomic64_t *v)
1412 {
1413 	s64 ret;
1414 	__atomic_pre_full_fence();
1415 	ret = atomic64_fetch_add_relaxed(i, v);
1416 	__atomic_post_full_fence();
1417 	return ret;
1418 }
1419 #define atomic64_fetch_add atomic64_fetch_add
1420 #endif
1421 
1422 #endif /* atomic64_fetch_add_relaxed */
1423 
1424 #define arch_atomic64_sub atomic64_sub
1425 
1426 #define arch_atomic64_sub_return atomic64_sub_return
1427 #define arch_atomic64_sub_return_acquire atomic64_sub_return_acquire
1428 #define arch_atomic64_sub_return_release atomic64_sub_return_release
1429 #define arch_atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1430 
1431 #ifndef atomic64_sub_return_relaxed
1432 #define atomic64_sub_return_acquire atomic64_sub_return
1433 #define atomic64_sub_return_release atomic64_sub_return
1434 #define atomic64_sub_return_relaxed atomic64_sub_return
1435 #else /* atomic64_sub_return_relaxed */
1436 
1437 #ifndef atomic64_sub_return_acquire
1438 static __always_inline s64
atomic64_sub_return_acquire(s64 i,atomic64_t * v)1439 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1440 {
1441 	s64 ret = atomic64_sub_return_relaxed(i, v);
1442 	__atomic_acquire_fence();
1443 	return ret;
1444 }
1445 #define atomic64_sub_return_acquire atomic64_sub_return_acquire
1446 #endif
1447 
1448 #ifndef atomic64_sub_return_release
1449 static __always_inline s64
atomic64_sub_return_release(s64 i,atomic64_t * v)1450 atomic64_sub_return_release(s64 i, atomic64_t *v)
1451 {
1452 	__atomic_release_fence();
1453 	return atomic64_sub_return_relaxed(i, v);
1454 }
1455 #define atomic64_sub_return_release atomic64_sub_return_release
1456 #endif
1457 
1458 #ifndef atomic64_sub_return
1459 static __always_inline s64
atomic64_sub_return(s64 i,atomic64_t * v)1460 atomic64_sub_return(s64 i, atomic64_t *v)
1461 {
1462 	s64 ret;
1463 	__atomic_pre_full_fence();
1464 	ret = atomic64_sub_return_relaxed(i, v);
1465 	__atomic_post_full_fence();
1466 	return ret;
1467 }
1468 #define atomic64_sub_return atomic64_sub_return
1469 #endif
1470 
1471 #endif /* atomic64_sub_return_relaxed */
1472 
1473 #define arch_atomic64_fetch_sub atomic64_fetch_sub
1474 #define arch_atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1475 #define arch_atomic64_fetch_sub_release atomic64_fetch_sub_release
1476 #define arch_atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1477 
1478 #ifndef atomic64_fetch_sub_relaxed
1479 #define atomic64_fetch_sub_acquire atomic64_fetch_sub
1480 #define atomic64_fetch_sub_release atomic64_fetch_sub
1481 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
1482 #else /* atomic64_fetch_sub_relaxed */
1483 
1484 #ifndef atomic64_fetch_sub_acquire
1485 static __always_inline s64
atomic64_fetch_sub_acquire(s64 i,atomic64_t * v)1486 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1487 {
1488 	s64 ret = atomic64_fetch_sub_relaxed(i, v);
1489 	__atomic_acquire_fence();
1490 	return ret;
1491 }
1492 #define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1493 #endif
1494 
1495 #ifndef atomic64_fetch_sub_release
1496 static __always_inline s64
atomic64_fetch_sub_release(s64 i,atomic64_t * v)1497 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1498 {
1499 	__atomic_release_fence();
1500 	return atomic64_fetch_sub_relaxed(i, v);
1501 }
1502 #define atomic64_fetch_sub_release atomic64_fetch_sub_release
1503 #endif
1504 
1505 #ifndef atomic64_fetch_sub
1506 static __always_inline s64
atomic64_fetch_sub(s64 i,atomic64_t * v)1507 atomic64_fetch_sub(s64 i, atomic64_t *v)
1508 {
1509 	s64 ret;
1510 	__atomic_pre_full_fence();
1511 	ret = atomic64_fetch_sub_relaxed(i, v);
1512 	__atomic_post_full_fence();
1513 	return ret;
1514 }
1515 #define atomic64_fetch_sub atomic64_fetch_sub
1516 #endif
1517 
1518 #endif /* atomic64_fetch_sub_relaxed */
1519 
1520 #define arch_atomic64_inc atomic64_inc
1521 
1522 #ifndef atomic64_inc
1523 static __always_inline void
atomic64_inc(atomic64_t * v)1524 atomic64_inc(atomic64_t *v)
1525 {
1526 	atomic64_add(1, v);
1527 }
1528 #define atomic64_inc atomic64_inc
1529 #endif
1530 
1531 #define arch_atomic64_inc_return atomic64_inc_return
1532 #define arch_atomic64_inc_return_acquire atomic64_inc_return_acquire
1533 #define arch_atomic64_inc_return_release atomic64_inc_return_release
1534 #define arch_atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1535 
1536 #ifndef atomic64_inc_return_relaxed
1537 #ifdef atomic64_inc_return
1538 #define atomic64_inc_return_acquire atomic64_inc_return
1539 #define atomic64_inc_return_release atomic64_inc_return
1540 #define atomic64_inc_return_relaxed atomic64_inc_return
1541 #endif /* atomic64_inc_return */
1542 
1543 #ifndef atomic64_inc_return
1544 static __always_inline s64
atomic64_inc_return(atomic64_t * v)1545 atomic64_inc_return(atomic64_t *v)
1546 {
1547 	return atomic64_add_return(1, v);
1548 }
1549 #define atomic64_inc_return atomic64_inc_return
1550 #endif
1551 
1552 #ifndef atomic64_inc_return_acquire
1553 static __always_inline s64
atomic64_inc_return_acquire(atomic64_t * v)1554 atomic64_inc_return_acquire(atomic64_t *v)
1555 {
1556 	return atomic64_add_return_acquire(1, v);
1557 }
1558 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1559 #endif
1560 
1561 #ifndef atomic64_inc_return_release
1562 static __always_inline s64
atomic64_inc_return_release(atomic64_t * v)1563 atomic64_inc_return_release(atomic64_t *v)
1564 {
1565 	return atomic64_add_return_release(1, v);
1566 }
1567 #define atomic64_inc_return_release atomic64_inc_return_release
1568 #endif
1569 
1570 #ifndef atomic64_inc_return_relaxed
1571 static __always_inline s64
atomic64_inc_return_relaxed(atomic64_t * v)1572 atomic64_inc_return_relaxed(atomic64_t *v)
1573 {
1574 	return atomic64_add_return_relaxed(1, v);
1575 }
1576 #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1577 #endif
1578 
1579 #else /* atomic64_inc_return_relaxed */
1580 
1581 #ifndef atomic64_inc_return_acquire
1582 static __always_inline s64
atomic64_inc_return_acquire(atomic64_t * v)1583 atomic64_inc_return_acquire(atomic64_t *v)
1584 {
1585 	s64 ret = atomic64_inc_return_relaxed(v);
1586 	__atomic_acquire_fence();
1587 	return ret;
1588 }
1589 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1590 #endif
1591 
1592 #ifndef atomic64_inc_return_release
1593 static __always_inline s64
atomic64_inc_return_release(atomic64_t * v)1594 atomic64_inc_return_release(atomic64_t *v)
1595 {
1596 	__atomic_release_fence();
1597 	return atomic64_inc_return_relaxed(v);
1598 }
1599 #define atomic64_inc_return_release atomic64_inc_return_release
1600 #endif
1601 
1602 #ifndef atomic64_inc_return
1603 static __always_inline s64
atomic64_inc_return(atomic64_t * v)1604 atomic64_inc_return(atomic64_t *v)
1605 {
1606 	s64 ret;
1607 	__atomic_pre_full_fence();
1608 	ret = atomic64_inc_return_relaxed(v);
1609 	__atomic_post_full_fence();
1610 	return ret;
1611 }
1612 #define atomic64_inc_return atomic64_inc_return
1613 #endif
1614 
1615 #endif /* atomic64_inc_return_relaxed */
1616 
1617 #define arch_atomic64_fetch_inc atomic64_fetch_inc
1618 #define arch_atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1619 #define arch_atomic64_fetch_inc_release atomic64_fetch_inc_release
1620 #define arch_atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1621 
1622 #ifndef atomic64_fetch_inc_relaxed
1623 #ifdef atomic64_fetch_inc
1624 #define atomic64_fetch_inc_acquire atomic64_fetch_inc
1625 #define atomic64_fetch_inc_release atomic64_fetch_inc
1626 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
1627 #endif /* atomic64_fetch_inc */
1628 
1629 #ifndef atomic64_fetch_inc
1630 static __always_inline s64
atomic64_fetch_inc(atomic64_t * v)1631 atomic64_fetch_inc(atomic64_t *v)
1632 {
1633 	return atomic64_fetch_add(1, v);
1634 }
1635 #define atomic64_fetch_inc atomic64_fetch_inc
1636 #endif
1637 
1638 #ifndef atomic64_fetch_inc_acquire
1639 static __always_inline s64
atomic64_fetch_inc_acquire(atomic64_t * v)1640 atomic64_fetch_inc_acquire(atomic64_t *v)
1641 {
1642 	return atomic64_fetch_add_acquire(1, v);
1643 }
1644 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1645 #endif
1646 
1647 #ifndef atomic64_fetch_inc_release
1648 static __always_inline s64
atomic64_fetch_inc_release(atomic64_t * v)1649 atomic64_fetch_inc_release(atomic64_t *v)
1650 {
1651 	return atomic64_fetch_add_release(1, v);
1652 }
1653 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1654 #endif
1655 
1656 #ifndef atomic64_fetch_inc_relaxed
1657 static __always_inline s64
atomic64_fetch_inc_relaxed(atomic64_t * v)1658 atomic64_fetch_inc_relaxed(atomic64_t *v)
1659 {
1660 	return atomic64_fetch_add_relaxed(1, v);
1661 }
1662 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1663 #endif
1664 
1665 #else /* atomic64_fetch_inc_relaxed */
1666 
1667 #ifndef atomic64_fetch_inc_acquire
1668 static __always_inline s64
atomic64_fetch_inc_acquire(atomic64_t * v)1669 atomic64_fetch_inc_acquire(atomic64_t *v)
1670 {
1671 	s64 ret = atomic64_fetch_inc_relaxed(v);
1672 	__atomic_acquire_fence();
1673 	return ret;
1674 }
1675 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1676 #endif
1677 
1678 #ifndef atomic64_fetch_inc_release
1679 static __always_inline s64
atomic64_fetch_inc_release(atomic64_t * v)1680 atomic64_fetch_inc_release(atomic64_t *v)
1681 {
1682 	__atomic_release_fence();
1683 	return atomic64_fetch_inc_relaxed(v);
1684 }
1685 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1686 #endif
1687 
1688 #ifndef atomic64_fetch_inc
1689 static __always_inline s64
atomic64_fetch_inc(atomic64_t * v)1690 atomic64_fetch_inc(atomic64_t *v)
1691 {
1692 	s64 ret;
1693 	__atomic_pre_full_fence();
1694 	ret = atomic64_fetch_inc_relaxed(v);
1695 	__atomic_post_full_fence();
1696 	return ret;
1697 }
1698 #define atomic64_fetch_inc atomic64_fetch_inc
1699 #endif
1700 
1701 #endif /* atomic64_fetch_inc_relaxed */
1702 
1703 #define arch_atomic64_dec atomic64_dec
1704 
1705 #ifndef atomic64_dec
1706 static __always_inline void
atomic64_dec(atomic64_t * v)1707 atomic64_dec(atomic64_t *v)
1708 {
1709 	atomic64_sub(1, v);
1710 }
1711 #define atomic64_dec atomic64_dec
1712 #endif
1713 
1714 #define arch_atomic64_dec_return atomic64_dec_return
1715 #define arch_atomic64_dec_return_acquire atomic64_dec_return_acquire
1716 #define arch_atomic64_dec_return_release atomic64_dec_return_release
1717 #define arch_atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1718 
1719 #ifndef atomic64_dec_return_relaxed
1720 #ifdef atomic64_dec_return
1721 #define atomic64_dec_return_acquire atomic64_dec_return
1722 #define atomic64_dec_return_release atomic64_dec_return
1723 #define atomic64_dec_return_relaxed atomic64_dec_return
1724 #endif /* atomic64_dec_return */
1725 
1726 #ifndef atomic64_dec_return
1727 static __always_inline s64
atomic64_dec_return(atomic64_t * v)1728 atomic64_dec_return(atomic64_t *v)
1729 {
1730 	return atomic64_sub_return(1, v);
1731 }
1732 #define atomic64_dec_return atomic64_dec_return
1733 #endif
1734 
1735 #ifndef atomic64_dec_return_acquire
1736 static __always_inline s64
atomic64_dec_return_acquire(atomic64_t * v)1737 atomic64_dec_return_acquire(atomic64_t *v)
1738 {
1739 	return atomic64_sub_return_acquire(1, v);
1740 }
1741 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1742 #endif
1743 
1744 #ifndef atomic64_dec_return_release
1745 static __always_inline s64
atomic64_dec_return_release(atomic64_t * v)1746 atomic64_dec_return_release(atomic64_t *v)
1747 {
1748 	return atomic64_sub_return_release(1, v);
1749 }
1750 #define atomic64_dec_return_release atomic64_dec_return_release
1751 #endif
1752 
1753 #ifndef atomic64_dec_return_relaxed
1754 static __always_inline s64
atomic64_dec_return_relaxed(atomic64_t * v)1755 atomic64_dec_return_relaxed(atomic64_t *v)
1756 {
1757 	return atomic64_sub_return_relaxed(1, v);
1758 }
1759 #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1760 #endif
1761 
1762 #else /* atomic64_dec_return_relaxed */
1763 
1764 #ifndef atomic64_dec_return_acquire
1765 static __always_inline s64
atomic64_dec_return_acquire(atomic64_t * v)1766 atomic64_dec_return_acquire(atomic64_t *v)
1767 {
1768 	s64 ret = atomic64_dec_return_relaxed(v);
1769 	__atomic_acquire_fence();
1770 	return ret;
1771 }
1772 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1773 #endif
1774 
1775 #ifndef atomic64_dec_return_release
1776 static __always_inline s64
atomic64_dec_return_release(atomic64_t * v)1777 atomic64_dec_return_release(atomic64_t *v)
1778 {
1779 	__atomic_release_fence();
1780 	return atomic64_dec_return_relaxed(v);
1781 }
1782 #define atomic64_dec_return_release atomic64_dec_return_release
1783 #endif
1784 
1785 #ifndef atomic64_dec_return
1786 static __always_inline s64
atomic64_dec_return(atomic64_t * v)1787 atomic64_dec_return(atomic64_t *v)
1788 {
1789 	s64 ret;
1790 	__atomic_pre_full_fence();
1791 	ret = atomic64_dec_return_relaxed(v);
1792 	__atomic_post_full_fence();
1793 	return ret;
1794 }
1795 #define atomic64_dec_return atomic64_dec_return
1796 #endif
1797 
1798 #endif /* atomic64_dec_return_relaxed */
1799 
1800 #define arch_atomic64_fetch_dec atomic64_fetch_dec
1801 #define arch_atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1802 #define arch_atomic64_fetch_dec_release atomic64_fetch_dec_release
1803 #define arch_atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1804 
1805 #ifndef atomic64_fetch_dec_relaxed
1806 #ifdef atomic64_fetch_dec
1807 #define atomic64_fetch_dec_acquire atomic64_fetch_dec
1808 #define atomic64_fetch_dec_release atomic64_fetch_dec
1809 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
1810 #endif /* atomic64_fetch_dec */
1811 
1812 #ifndef atomic64_fetch_dec
1813 static __always_inline s64
atomic64_fetch_dec(atomic64_t * v)1814 atomic64_fetch_dec(atomic64_t *v)
1815 {
1816 	return atomic64_fetch_sub(1, v);
1817 }
1818 #define atomic64_fetch_dec atomic64_fetch_dec
1819 #endif
1820 
1821 #ifndef atomic64_fetch_dec_acquire
1822 static __always_inline s64
atomic64_fetch_dec_acquire(atomic64_t * v)1823 atomic64_fetch_dec_acquire(atomic64_t *v)
1824 {
1825 	return atomic64_fetch_sub_acquire(1, v);
1826 }
1827 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1828 #endif
1829 
1830 #ifndef atomic64_fetch_dec_release
1831 static __always_inline s64
atomic64_fetch_dec_release(atomic64_t * v)1832 atomic64_fetch_dec_release(atomic64_t *v)
1833 {
1834 	return atomic64_fetch_sub_release(1, v);
1835 }
1836 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1837 #endif
1838 
1839 #ifndef atomic64_fetch_dec_relaxed
1840 static __always_inline s64
atomic64_fetch_dec_relaxed(atomic64_t * v)1841 atomic64_fetch_dec_relaxed(atomic64_t *v)
1842 {
1843 	return atomic64_fetch_sub_relaxed(1, v);
1844 }
1845 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1846 #endif
1847 
1848 #else /* atomic64_fetch_dec_relaxed */
1849 
1850 #ifndef atomic64_fetch_dec_acquire
1851 static __always_inline s64
atomic64_fetch_dec_acquire(atomic64_t * v)1852 atomic64_fetch_dec_acquire(atomic64_t *v)
1853 {
1854 	s64 ret = atomic64_fetch_dec_relaxed(v);
1855 	__atomic_acquire_fence();
1856 	return ret;
1857 }
1858 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1859 #endif
1860 
1861 #ifndef atomic64_fetch_dec_release
1862 static __always_inline s64
atomic64_fetch_dec_release(atomic64_t * v)1863 atomic64_fetch_dec_release(atomic64_t *v)
1864 {
1865 	__atomic_release_fence();
1866 	return atomic64_fetch_dec_relaxed(v);
1867 }
1868 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1869 #endif
1870 
1871 #ifndef atomic64_fetch_dec
1872 static __always_inline s64
atomic64_fetch_dec(atomic64_t * v)1873 atomic64_fetch_dec(atomic64_t *v)
1874 {
1875 	s64 ret;
1876 	__atomic_pre_full_fence();
1877 	ret = atomic64_fetch_dec_relaxed(v);
1878 	__atomic_post_full_fence();
1879 	return ret;
1880 }
1881 #define atomic64_fetch_dec atomic64_fetch_dec
1882 #endif
1883 
1884 #endif /* atomic64_fetch_dec_relaxed */
1885 
1886 #define arch_atomic64_and atomic64_and
1887 
1888 #define arch_atomic64_fetch_and atomic64_fetch_and
1889 #define arch_atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1890 #define arch_atomic64_fetch_and_release atomic64_fetch_and_release
1891 #define arch_atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1892 
1893 #ifndef atomic64_fetch_and_relaxed
1894 #define atomic64_fetch_and_acquire atomic64_fetch_and
1895 #define atomic64_fetch_and_release atomic64_fetch_and
1896 #define atomic64_fetch_and_relaxed atomic64_fetch_and
1897 #else /* atomic64_fetch_and_relaxed */
1898 
1899 #ifndef atomic64_fetch_and_acquire
1900 static __always_inline s64
atomic64_fetch_and_acquire(s64 i,atomic64_t * v)1901 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1902 {
1903 	s64 ret = atomic64_fetch_and_relaxed(i, v);
1904 	__atomic_acquire_fence();
1905 	return ret;
1906 }
1907 #define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1908 #endif
1909 
1910 #ifndef atomic64_fetch_and_release
1911 static __always_inline s64
atomic64_fetch_and_release(s64 i,atomic64_t * v)1912 atomic64_fetch_and_release(s64 i, atomic64_t *v)
1913 {
1914 	__atomic_release_fence();
1915 	return atomic64_fetch_and_relaxed(i, v);
1916 }
1917 #define atomic64_fetch_and_release atomic64_fetch_and_release
1918 #endif
1919 
1920 #ifndef atomic64_fetch_and
1921 static __always_inline s64
atomic64_fetch_and(s64 i,atomic64_t * v)1922 atomic64_fetch_and(s64 i, atomic64_t *v)
1923 {
1924 	s64 ret;
1925 	__atomic_pre_full_fence();
1926 	ret = atomic64_fetch_and_relaxed(i, v);
1927 	__atomic_post_full_fence();
1928 	return ret;
1929 }
1930 #define atomic64_fetch_and atomic64_fetch_and
1931 #endif
1932 
1933 #endif /* atomic64_fetch_and_relaxed */
1934 
1935 #define arch_atomic64_andnot atomic64_andnot
1936 
1937 #ifndef atomic64_andnot
1938 static __always_inline void
atomic64_andnot(s64 i,atomic64_t * v)1939 atomic64_andnot(s64 i, atomic64_t *v)
1940 {
1941 	atomic64_and(~i, v);
1942 }
1943 #define atomic64_andnot atomic64_andnot
1944 #endif
1945 
1946 #define arch_atomic64_fetch_andnot atomic64_fetch_andnot
1947 #define arch_atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1948 #define arch_atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1949 #define arch_atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1950 
1951 #ifndef atomic64_fetch_andnot_relaxed
1952 #ifdef atomic64_fetch_andnot
1953 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
1954 #define atomic64_fetch_andnot_release atomic64_fetch_andnot
1955 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
1956 #endif /* atomic64_fetch_andnot */
1957 
1958 #ifndef atomic64_fetch_andnot
1959 static __always_inline s64
atomic64_fetch_andnot(s64 i,atomic64_t * v)1960 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1961 {
1962 	return atomic64_fetch_and(~i, v);
1963 }
1964 #define atomic64_fetch_andnot atomic64_fetch_andnot
1965 #endif
1966 
1967 #ifndef atomic64_fetch_andnot_acquire
1968 static __always_inline s64
atomic64_fetch_andnot_acquire(s64 i,atomic64_t * v)1969 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1970 {
1971 	return atomic64_fetch_and_acquire(~i, v);
1972 }
1973 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1974 #endif
1975 
1976 #ifndef atomic64_fetch_andnot_release
1977 static __always_inline s64
atomic64_fetch_andnot_release(s64 i,atomic64_t * v)1978 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1979 {
1980 	return atomic64_fetch_and_release(~i, v);
1981 }
1982 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1983 #endif
1984 
1985 #ifndef atomic64_fetch_andnot_relaxed
1986 static __always_inline s64
atomic64_fetch_andnot_relaxed(s64 i,atomic64_t * v)1987 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1988 {
1989 	return atomic64_fetch_and_relaxed(~i, v);
1990 }
1991 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1992 #endif
1993 
1994 #else /* atomic64_fetch_andnot_relaxed */
1995 
1996 #ifndef atomic64_fetch_andnot_acquire
1997 static __always_inline s64
atomic64_fetch_andnot_acquire(s64 i,atomic64_t * v)1998 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1999 {
2000 	s64 ret = atomic64_fetch_andnot_relaxed(i, v);
2001 	__atomic_acquire_fence();
2002 	return ret;
2003 }
2004 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
2005 #endif
2006 
2007 #ifndef atomic64_fetch_andnot_release
2008 static __always_inline s64
atomic64_fetch_andnot_release(s64 i,atomic64_t * v)2009 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
2010 {
2011 	__atomic_release_fence();
2012 	return atomic64_fetch_andnot_relaxed(i, v);
2013 }
2014 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
2015 #endif
2016 
2017 #ifndef atomic64_fetch_andnot
2018 static __always_inline s64
atomic64_fetch_andnot(s64 i,atomic64_t * v)2019 atomic64_fetch_andnot(s64 i, atomic64_t *v)
2020 {
2021 	s64 ret;
2022 	__atomic_pre_full_fence();
2023 	ret = atomic64_fetch_andnot_relaxed(i, v);
2024 	__atomic_post_full_fence();
2025 	return ret;
2026 }
2027 #define atomic64_fetch_andnot atomic64_fetch_andnot
2028 #endif
2029 
2030 #endif /* atomic64_fetch_andnot_relaxed */
2031 
2032 #define arch_atomic64_or atomic64_or
2033 
2034 #define arch_atomic64_fetch_or atomic64_fetch_or
2035 #define arch_atomic64_fetch_or_acquire atomic64_fetch_or_acquire
2036 #define arch_atomic64_fetch_or_release atomic64_fetch_or_release
2037 #define arch_atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
2038 
2039 #ifndef atomic64_fetch_or_relaxed
2040 #define atomic64_fetch_or_acquire atomic64_fetch_or
2041 #define atomic64_fetch_or_release atomic64_fetch_or
2042 #define atomic64_fetch_or_relaxed atomic64_fetch_or
2043 #else /* atomic64_fetch_or_relaxed */
2044 
2045 #ifndef atomic64_fetch_or_acquire
2046 static __always_inline s64
atomic64_fetch_or_acquire(s64 i,atomic64_t * v)2047 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
2048 {
2049 	s64 ret = atomic64_fetch_or_relaxed(i, v);
2050 	__atomic_acquire_fence();
2051 	return ret;
2052 }
2053 #define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
2054 #endif
2055 
2056 #ifndef atomic64_fetch_or_release
2057 static __always_inline s64
atomic64_fetch_or_release(s64 i,atomic64_t * v)2058 atomic64_fetch_or_release(s64 i, atomic64_t *v)
2059 {
2060 	__atomic_release_fence();
2061 	return atomic64_fetch_or_relaxed(i, v);
2062 }
2063 #define atomic64_fetch_or_release atomic64_fetch_or_release
2064 #endif
2065 
2066 #ifndef atomic64_fetch_or
2067 static __always_inline s64
atomic64_fetch_or(s64 i,atomic64_t * v)2068 atomic64_fetch_or(s64 i, atomic64_t *v)
2069 {
2070 	s64 ret;
2071 	__atomic_pre_full_fence();
2072 	ret = atomic64_fetch_or_relaxed(i, v);
2073 	__atomic_post_full_fence();
2074 	return ret;
2075 }
2076 #define atomic64_fetch_or atomic64_fetch_or
2077 #endif
2078 
2079 #endif /* atomic64_fetch_or_relaxed */
2080 
2081 #define arch_atomic64_xor atomic64_xor
2082 
2083 #define arch_atomic64_fetch_xor atomic64_fetch_xor
2084 #define arch_atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
2085 #define arch_atomic64_fetch_xor_release atomic64_fetch_xor_release
2086 #define arch_atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
2087 
2088 #ifndef atomic64_fetch_xor_relaxed
2089 #define atomic64_fetch_xor_acquire atomic64_fetch_xor
2090 #define atomic64_fetch_xor_release atomic64_fetch_xor
2091 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
2092 #else /* atomic64_fetch_xor_relaxed */
2093 
2094 #ifndef atomic64_fetch_xor_acquire
2095 static __always_inline s64
atomic64_fetch_xor_acquire(s64 i,atomic64_t * v)2096 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
2097 {
2098 	s64 ret = atomic64_fetch_xor_relaxed(i, v);
2099 	__atomic_acquire_fence();
2100 	return ret;
2101 }
2102 #define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
2103 #endif
2104 
2105 #ifndef atomic64_fetch_xor_release
2106 static __always_inline s64
atomic64_fetch_xor_release(s64 i,atomic64_t * v)2107 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
2108 {
2109 	__atomic_release_fence();
2110 	return atomic64_fetch_xor_relaxed(i, v);
2111 }
2112 #define atomic64_fetch_xor_release atomic64_fetch_xor_release
2113 #endif
2114 
2115 #ifndef atomic64_fetch_xor
2116 static __always_inline s64
atomic64_fetch_xor(s64 i,atomic64_t * v)2117 atomic64_fetch_xor(s64 i, atomic64_t *v)
2118 {
2119 	s64 ret;
2120 	__atomic_pre_full_fence();
2121 	ret = atomic64_fetch_xor_relaxed(i, v);
2122 	__atomic_post_full_fence();
2123 	return ret;
2124 }
2125 #define atomic64_fetch_xor atomic64_fetch_xor
2126 #endif
2127 
2128 #endif /* atomic64_fetch_xor_relaxed */
2129 
2130 #define arch_atomic64_xchg atomic64_xchg
2131 #define arch_atomic64_xchg_acquire atomic64_xchg_acquire
2132 #define arch_atomic64_xchg_release atomic64_xchg_release
2133 #define arch_atomic64_xchg_relaxed atomic64_xchg_relaxed
2134 
2135 #ifndef atomic64_xchg_relaxed
2136 #define atomic64_xchg_acquire atomic64_xchg
2137 #define atomic64_xchg_release atomic64_xchg
2138 #define atomic64_xchg_relaxed atomic64_xchg
2139 #else /* atomic64_xchg_relaxed */
2140 
2141 #ifndef atomic64_xchg_acquire
2142 static __always_inline s64
atomic64_xchg_acquire(atomic64_t * v,s64 i)2143 atomic64_xchg_acquire(atomic64_t *v, s64 i)
2144 {
2145 	s64 ret = atomic64_xchg_relaxed(v, i);
2146 	__atomic_acquire_fence();
2147 	return ret;
2148 }
2149 #define atomic64_xchg_acquire atomic64_xchg_acquire
2150 #endif
2151 
2152 #ifndef atomic64_xchg_release
2153 static __always_inline s64
atomic64_xchg_release(atomic64_t * v,s64 i)2154 atomic64_xchg_release(atomic64_t *v, s64 i)
2155 {
2156 	__atomic_release_fence();
2157 	return atomic64_xchg_relaxed(v, i);
2158 }
2159 #define atomic64_xchg_release atomic64_xchg_release
2160 #endif
2161 
2162 #ifndef atomic64_xchg
2163 static __always_inline s64
atomic64_xchg(atomic64_t * v,s64 i)2164 atomic64_xchg(atomic64_t *v, s64 i)
2165 {
2166 	s64 ret;
2167 	__atomic_pre_full_fence();
2168 	ret = atomic64_xchg_relaxed(v, i);
2169 	__atomic_post_full_fence();
2170 	return ret;
2171 }
2172 #define atomic64_xchg atomic64_xchg
2173 #endif
2174 
2175 #endif /* atomic64_xchg_relaxed */
2176 
2177 #define arch_atomic64_cmpxchg atomic64_cmpxchg
2178 #define arch_atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
2179 #define arch_atomic64_cmpxchg_release atomic64_cmpxchg_release
2180 #define arch_atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
2181 
2182 #ifndef atomic64_cmpxchg_relaxed
2183 #define atomic64_cmpxchg_acquire atomic64_cmpxchg
2184 #define atomic64_cmpxchg_release atomic64_cmpxchg
2185 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
2186 #else /* atomic64_cmpxchg_relaxed */
2187 
2188 #ifndef atomic64_cmpxchg_acquire
2189 static __always_inline s64
atomic64_cmpxchg_acquire(atomic64_t * v,s64 old,s64 new)2190 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2191 {
2192 	s64 ret = atomic64_cmpxchg_relaxed(v, old, new);
2193 	__atomic_acquire_fence();
2194 	return ret;
2195 }
2196 #define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
2197 #endif
2198 
2199 #ifndef atomic64_cmpxchg_release
2200 static __always_inline s64
atomic64_cmpxchg_release(atomic64_t * v,s64 old,s64 new)2201 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2202 {
2203 	__atomic_release_fence();
2204 	return atomic64_cmpxchg_relaxed(v, old, new);
2205 }
2206 #define atomic64_cmpxchg_release atomic64_cmpxchg_release
2207 #endif
2208 
2209 #ifndef atomic64_cmpxchg
2210 static __always_inline s64
atomic64_cmpxchg(atomic64_t * v,s64 old,s64 new)2211 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2212 {
2213 	s64 ret;
2214 	__atomic_pre_full_fence();
2215 	ret = atomic64_cmpxchg_relaxed(v, old, new);
2216 	__atomic_post_full_fence();
2217 	return ret;
2218 }
2219 #define atomic64_cmpxchg atomic64_cmpxchg
2220 #endif
2221 
2222 #endif /* atomic64_cmpxchg_relaxed */
2223 
2224 #define arch_atomic64_try_cmpxchg atomic64_try_cmpxchg
2225 #define arch_atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2226 #define arch_atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2227 #define arch_atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
2228 
2229 #ifndef atomic64_try_cmpxchg_relaxed
2230 #ifdef atomic64_try_cmpxchg
2231 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
2232 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
2233 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
2234 #endif /* atomic64_try_cmpxchg */
2235 
2236 #ifndef atomic64_try_cmpxchg
2237 static __always_inline bool
atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)2238 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2239 {
2240 	s64 r, o = *old;
2241 	r = atomic64_cmpxchg(v, o, new);
2242 	if (unlikely(r != o))
2243 		*old = r;
2244 	return likely(r == o);
2245 }
2246 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2247 #endif
2248 
2249 #ifndef atomic64_try_cmpxchg_acquire
2250 static __always_inline bool
atomic64_try_cmpxchg_acquire(atomic64_t * v,s64 * old,s64 new)2251 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2252 {
2253 	s64 r, o = *old;
2254 	r = atomic64_cmpxchg_acquire(v, o, new);
2255 	if (unlikely(r != o))
2256 		*old = r;
2257 	return likely(r == o);
2258 }
2259 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2260 #endif
2261 
2262 #ifndef atomic64_try_cmpxchg_release
2263 static __always_inline bool
atomic64_try_cmpxchg_release(atomic64_t * v,s64 * old,s64 new)2264 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2265 {
2266 	s64 r, o = *old;
2267 	r = atomic64_cmpxchg_release(v, o, new);
2268 	if (unlikely(r != o))
2269 		*old = r;
2270 	return likely(r == o);
2271 }
2272 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2273 #endif
2274 
2275 #ifndef atomic64_try_cmpxchg_relaxed
2276 static __always_inline bool
atomic64_try_cmpxchg_relaxed(atomic64_t * v,s64 * old,s64 new)2277 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2278 {
2279 	s64 r, o = *old;
2280 	r = atomic64_cmpxchg_relaxed(v, o, new);
2281 	if (unlikely(r != o))
2282 		*old = r;
2283 	return likely(r == o);
2284 }
2285 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
2286 #endif
2287 
2288 #else /* atomic64_try_cmpxchg_relaxed */
2289 
2290 #ifndef atomic64_try_cmpxchg_acquire
2291 static __always_inline bool
atomic64_try_cmpxchg_acquire(atomic64_t * v,s64 * old,s64 new)2292 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2293 {
2294 	bool ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2295 	__atomic_acquire_fence();
2296 	return ret;
2297 }
2298 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2299 #endif
2300 
2301 #ifndef atomic64_try_cmpxchg_release
2302 static __always_inline bool
atomic64_try_cmpxchg_release(atomic64_t * v,s64 * old,s64 new)2303 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2304 {
2305 	__atomic_release_fence();
2306 	return atomic64_try_cmpxchg_relaxed(v, old, new);
2307 }
2308 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2309 #endif
2310 
2311 #ifndef atomic64_try_cmpxchg
2312 static __always_inline bool
atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)2313 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2314 {
2315 	bool ret;
2316 	__atomic_pre_full_fence();
2317 	ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2318 	__atomic_post_full_fence();
2319 	return ret;
2320 }
2321 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2322 #endif
2323 
2324 #endif /* atomic64_try_cmpxchg_relaxed */
2325 
2326 #define arch_atomic64_sub_and_test atomic64_sub_and_test
2327 
2328 #ifndef atomic64_sub_and_test
2329 /**
2330  * atomic64_sub_and_test - subtract value from variable and test result
2331  * @i: integer value to subtract
2332  * @v: pointer of type atomic64_t
2333  *
2334  * Atomically subtracts @i from @v and returns
2335  * true if the result is zero, or false for all
2336  * other cases.
2337  */
2338 static __always_inline bool
atomic64_sub_and_test(s64 i,atomic64_t * v)2339 atomic64_sub_and_test(s64 i, atomic64_t *v)
2340 {
2341 	return atomic64_sub_return(i, v) == 0;
2342 }
2343 #define atomic64_sub_and_test atomic64_sub_and_test
2344 #endif
2345 
2346 #define arch_atomic64_dec_and_test atomic64_dec_and_test
2347 
2348 #ifndef atomic64_dec_and_test
2349 /**
2350  * atomic64_dec_and_test - decrement and test
2351  * @v: pointer of type atomic64_t
2352  *
2353  * Atomically decrements @v by 1 and
2354  * returns true if the result is 0, or false for all other
2355  * cases.
2356  */
2357 static __always_inline bool
atomic64_dec_and_test(atomic64_t * v)2358 atomic64_dec_and_test(atomic64_t *v)
2359 {
2360 	return atomic64_dec_return(v) == 0;
2361 }
2362 #define atomic64_dec_and_test atomic64_dec_and_test
2363 #endif
2364 
2365 #define arch_atomic64_inc_and_test atomic64_inc_and_test
2366 
2367 #ifndef atomic64_inc_and_test
2368 /**
2369  * atomic64_inc_and_test - increment and test
2370  * @v: pointer of type atomic64_t
2371  *
2372  * Atomically increments @v by 1
2373  * and returns true if the result is zero, or false for all
2374  * other cases.
2375  */
2376 static __always_inline bool
atomic64_inc_and_test(atomic64_t * v)2377 atomic64_inc_and_test(atomic64_t *v)
2378 {
2379 	return atomic64_inc_return(v) == 0;
2380 }
2381 #define atomic64_inc_and_test atomic64_inc_and_test
2382 #endif
2383 
2384 #define arch_atomic64_add_negative atomic64_add_negative
2385 
2386 #ifndef atomic64_add_negative
2387 /**
2388  * atomic64_add_negative - add and test if negative
2389  * @i: integer value to add
2390  * @v: pointer of type atomic64_t
2391  *
2392  * Atomically adds @i to @v and returns true
2393  * if the result is negative, or false when
2394  * result is greater than or equal to zero.
2395  */
2396 static __always_inline bool
atomic64_add_negative(s64 i,atomic64_t * v)2397 atomic64_add_negative(s64 i, atomic64_t *v)
2398 {
2399 	return atomic64_add_return(i, v) < 0;
2400 }
2401 #define atomic64_add_negative atomic64_add_negative
2402 #endif
2403 
2404 #define arch_atomic64_fetch_add_unless atomic64_fetch_add_unless
2405 
2406 #ifndef atomic64_fetch_add_unless
2407 /**
2408  * atomic64_fetch_add_unless - add unless the number is already a given value
2409  * @v: pointer of type atomic64_t
2410  * @a: the amount to add to v...
2411  * @u: ...unless v is equal to u.
2412  *
2413  * Atomically adds @a to @v, so long as @v was not already @u.
2414  * Returns original value of @v
2415  */
2416 static __always_inline s64
atomic64_fetch_add_unless(atomic64_t * v,s64 a,s64 u)2417 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2418 {
2419 	s64 c = atomic64_read(v);
2420 
2421 	do {
2422 		if (unlikely(c == u))
2423 			break;
2424 	} while (!atomic64_try_cmpxchg(v, &c, c + a));
2425 
2426 	return c;
2427 }
2428 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
2429 #endif
2430 
2431 #define arch_atomic64_add_unless atomic64_add_unless
2432 
2433 #ifndef atomic64_add_unless
2434 /**
2435  * atomic64_add_unless - add unless the number is already a given value
2436  * @v: pointer of type atomic64_t
2437  * @a: the amount to add to v...
2438  * @u: ...unless v is equal to u.
2439  *
2440  * Atomically adds @a to @v, if @v was not already @u.
2441  * Returns true if the addition was done.
2442  */
2443 static __always_inline bool
atomic64_add_unless(atomic64_t * v,s64 a,s64 u)2444 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2445 {
2446 	return atomic64_fetch_add_unless(v, a, u) != u;
2447 }
2448 #define atomic64_add_unless atomic64_add_unless
2449 #endif
2450 
2451 #define arch_atomic64_inc_not_zero atomic64_inc_not_zero
2452 
2453 #ifndef atomic64_inc_not_zero
2454 /**
2455  * atomic64_inc_not_zero - increment unless the number is zero
2456  * @v: pointer of type atomic64_t
2457  *
2458  * Atomically increments @v by 1, if @v is non-zero.
2459  * Returns true if the increment was done.
2460  */
2461 static __always_inline bool
atomic64_inc_not_zero(atomic64_t * v)2462 atomic64_inc_not_zero(atomic64_t *v)
2463 {
2464 	return atomic64_add_unless(v, 1, 0);
2465 }
2466 #define atomic64_inc_not_zero atomic64_inc_not_zero
2467 #endif
2468 
2469 #define arch_atomic64_inc_unless_negative atomic64_inc_unless_negative
2470 
2471 #ifndef atomic64_inc_unless_negative
2472 static __always_inline bool
atomic64_inc_unless_negative(atomic64_t * v)2473 atomic64_inc_unless_negative(atomic64_t *v)
2474 {
2475 	s64 c = atomic64_read(v);
2476 
2477 	do {
2478 		if (unlikely(c < 0))
2479 			return false;
2480 	} while (!atomic64_try_cmpxchg(v, &c, c + 1));
2481 
2482 	return true;
2483 }
2484 #define atomic64_inc_unless_negative atomic64_inc_unless_negative
2485 #endif
2486 
2487 #define arch_atomic64_dec_unless_positive atomic64_dec_unless_positive
2488 
2489 #ifndef atomic64_dec_unless_positive
2490 static __always_inline bool
atomic64_dec_unless_positive(atomic64_t * v)2491 atomic64_dec_unless_positive(atomic64_t *v)
2492 {
2493 	s64 c = atomic64_read(v);
2494 
2495 	do {
2496 		if (unlikely(c > 0))
2497 			return false;
2498 	} while (!atomic64_try_cmpxchg(v, &c, c - 1));
2499 
2500 	return true;
2501 }
2502 #define atomic64_dec_unless_positive atomic64_dec_unless_positive
2503 #endif
2504 
2505 #define arch_atomic64_dec_if_positive atomic64_dec_if_positive
2506 
2507 #ifndef atomic64_dec_if_positive
2508 static __always_inline s64
atomic64_dec_if_positive(atomic64_t * v)2509 atomic64_dec_if_positive(atomic64_t *v)
2510 {
2511 	s64 dec, c = atomic64_read(v);
2512 
2513 	do {
2514 		dec = c - 1;
2515 		if (unlikely(dec < 0))
2516 			break;
2517 	} while (!atomic64_try_cmpxchg(v, &c, dec));
2518 
2519 	return dec;
2520 }
2521 #define atomic64_dec_if_positive atomic64_dec_if_positive
2522 #endif
2523 
2524 #endif /* _LINUX_ATOMIC_FALLBACK_H */
2525 // 9d95b56f98d82a2a26c7b79ccdd0c47572d50a6f
2526