• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0
2 
3 // Generated by scripts/atomic/gen-atomic-instrumented.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
5 
6 /*
7  * This file provides wrappers with KASAN instrumentation for atomic operations.
8  * To use this functionality an arch's atomic.h file needs to define all
9  * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
10  * this file at the end. This file provides atomic_read() that forwards to
11  * arch_atomic_read() for actual atomic operation.
12  * Note: if an arch atomic operation is implemented by means of other atomic
13  * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
14  * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
15  * double instrumentation.
16  */
17 #ifndef _LINUX_ATOMIC_INSTRUMENTED_H
18 #define _LINUX_ATOMIC_INSTRUMENTED_H
19 
20 #include <linux/build_bug.h>
21 #include <linux/compiler.h>
22 #include <linux/instrumented.h>
23 
24 static __always_inline int
atomic_read(const atomic_t * v)25 atomic_read(const atomic_t *v)
26 {
27 	instrument_atomic_read(v, sizeof(*v));
28 	return arch_atomic_read(v);
29 }
30 
31 static __always_inline int
atomic_read_acquire(const atomic_t * v)32 atomic_read_acquire(const atomic_t *v)
33 {
34 	instrument_atomic_read(v, sizeof(*v));
35 	return arch_atomic_read_acquire(v);
36 }
37 
38 static __always_inline void
atomic_set(atomic_t * v,int i)39 atomic_set(atomic_t *v, int i)
40 {
41 	instrument_atomic_write(v, sizeof(*v));
42 	arch_atomic_set(v, i);
43 }
44 
45 static __always_inline void
atomic_set_release(atomic_t * v,int i)46 atomic_set_release(atomic_t *v, int i)
47 {
48 	instrument_atomic_write(v, sizeof(*v));
49 	arch_atomic_set_release(v, i);
50 }
51 
52 static __always_inline void
atomic_add(int i,atomic_t * v)53 atomic_add(int i, atomic_t *v)
54 {
55 	instrument_atomic_read_write(v, sizeof(*v));
56 	arch_atomic_add(i, v);
57 }
58 
59 static __always_inline int
atomic_add_return(int i,atomic_t * v)60 atomic_add_return(int i, atomic_t *v)
61 {
62 	instrument_atomic_read_write(v, sizeof(*v));
63 	return arch_atomic_add_return(i, v);
64 }
65 
66 static __always_inline int
atomic_add_return_acquire(int i,atomic_t * v)67 atomic_add_return_acquire(int i, atomic_t *v)
68 {
69 	instrument_atomic_read_write(v, sizeof(*v));
70 	return arch_atomic_add_return_acquire(i, v);
71 }
72 
73 static __always_inline int
atomic_add_return_release(int i,atomic_t * v)74 atomic_add_return_release(int i, atomic_t *v)
75 {
76 	instrument_atomic_read_write(v, sizeof(*v));
77 	return arch_atomic_add_return_release(i, v);
78 }
79 
80 static __always_inline int
atomic_add_return_relaxed(int i,atomic_t * v)81 atomic_add_return_relaxed(int i, atomic_t *v)
82 {
83 	instrument_atomic_read_write(v, sizeof(*v));
84 	return arch_atomic_add_return_relaxed(i, v);
85 }
86 
87 static __always_inline int
atomic_fetch_add(int i,atomic_t * v)88 atomic_fetch_add(int i, atomic_t *v)
89 {
90 	instrument_atomic_read_write(v, sizeof(*v));
91 	return arch_atomic_fetch_add(i, v);
92 }
93 
94 static __always_inline int
atomic_fetch_add_acquire(int i,atomic_t * v)95 atomic_fetch_add_acquire(int i, atomic_t *v)
96 {
97 	instrument_atomic_read_write(v, sizeof(*v));
98 	return arch_atomic_fetch_add_acquire(i, v);
99 }
100 
101 static __always_inline int
atomic_fetch_add_release(int i,atomic_t * v)102 atomic_fetch_add_release(int i, atomic_t *v)
103 {
104 	instrument_atomic_read_write(v, sizeof(*v));
105 	return arch_atomic_fetch_add_release(i, v);
106 }
107 
108 static __always_inline int
atomic_fetch_add_relaxed(int i,atomic_t * v)109 atomic_fetch_add_relaxed(int i, atomic_t *v)
110 {
111 	instrument_atomic_read_write(v, sizeof(*v));
112 	return arch_atomic_fetch_add_relaxed(i, v);
113 }
114 
115 static __always_inline void
atomic_sub(int i,atomic_t * v)116 atomic_sub(int i, atomic_t *v)
117 {
118 	instrument_atomic_read_write(v, sizeof(*v));
119 	arch_atomic_sub(i, v);
120 }
121 
122 static __always_inline int
atomic_sub_return(int i,atomic_t * v)123 atomic_sub_return(int i, atomic_t *v)
124 {
125 	instrument_atomic_read_write(v, sizeof(*v));
126 	return arch_atomic_sub_return(i, v);
127 }
128 
129 static __always_inline int
atomic_sub_return_acquire(int i,atomic_t * v)130 atomic_sub_return_acquire(int i, atomic_t *v)
131 {
132 	instrument_atomic_read_write(v, sizeof(*v));
133 	return arch_atomic_sub_return_acquire(i, v);
134 }
135 
136 static __always_inline int
atomic_sub_return_release(int i,atomic_t * v)137 atomic_sub_return_release(int i, atomic_t *v)
138 {
139 	instrument_atomic_read_write(v, sizeof(*v));
140 	return arch_atomic_sub_return_release(i, v);
141 }
142 
143 static __always_inline int
atomic_sub_return_relaxed(int i,atomic_t * v)144 atomic_sub_return_relaxed(int i, atomic_t *v)
145 {
146 	instrument_atomic_read_write(v, sizeof(*v));
147 	return arch_atomic_sub_return_relaxed(i, v);
148 }
149 
150 static __always_inline int
atomic_fetch_sub(int i,atomic_t * v)151 atomic_fetch_sub(int i, atomic_t *v)
152 {
153 	instrument_atomic_read_write(v, sizeof(*v));
154 	return arch_atomic_fetch_sub(i, v);
155 }
156 
157 static __always_inline int
atomic_fetch_sub_acquire(int i,atomic_t * v)158 atomic_fetch_sub_acquire(int i, atomic_t *v)
159 {
160 	instrument_atomic_read_write(v, sizeof(*v));
161 	return arch_atomic_fetch_sub_acquire(i, v);
162 }
163 
164 static __always_inline int
atomic_fetch_sub_release(int i,atomic_t * v)165 atomic_fetch_sub_release(int i, atomic_t *v)
166 {
167 	instrument_atomic_read_write(v, sizeof(*v));
168 	return arch_atomic_fetch_sub_release(i, v);
169 }
170 
171 static __always_inline int
atomic_fetch_sub_relaxed(int i,atomic_t * v)172 atomic_fetch_sub_relaxed(int i, atomic_t *v)
173 {
174 	instrument_atomic_read_write(v, sizeof(*v));
175 	return arch_atomic_fetch_sub_relaxed(i, v);
176 }
177 
178 static __always_inline void
atomic_inc(atomic_t * v)179 atomic_inc(atomic_t *v)
180 {
181 	instrument_atomic_read_write(v, sizeof(*v));
182 	arch_atomic_inc(v);
183 }
184 
185 static __always_inline int
atomic_inc_return(atomic_t * v)186 atomic_inc_return(atomic_t *v)
187 {
188 	instrument_atomic_read_write(v, sizeof(*v));
189 	return arch_atomic_inc_return(v);
190 }
191 
192 static __always_inline int
atomic_inc_return_acquire(atomic_t * v)193 atomic_inc_return_acquire(atomic_t *v)
194 {
195 	instrument_atomic_read_write(v, sizeof(*v));
196 	return arch_atomic_inc_return_acquire(v);
197 }
198 
199 static __always_inline int
atomic_inc_return_release(atomic_t * v)200 atomic_inc_return_release(atomic_t *v)
201 {
202 	instrument_atomic_read_write(v, sizeof(*v));
203 	return arch_atomic_inc_return_release(v);
204 }
205 
206 static __always_inline int
atomic_inc_return_relaxed(atomic_t * v)207 atomic_inc_return_relaxed(atomic_t *v)
208 {
209 	instrument_atomic_read_write(v, sizeof(*v));
210 	return arch_atomic_inc_return_relaxed(v);
211 }
212 
213 static __always_inline int
atomic_fetch_inc(atomic_t * v)214 atomic_fetch_inc(atomic_t *v)
215 {
216 	instrument_atomic_read_write(v, sizeof(*v));
217 	return arch_atomic_fetch_inc(v);
218 }
219 
220 static __always_inline int
atomic_fetch_inc_acquire(atomic_t * v)221 atomic_fetch_inc_acquire(atomic_t *v)
222 {
223 	instrument_atomic_read_write(v, sizeof(*v));
224 	return arch_atomic_fetch_inc_acquire(v);
225 }
226 
227 static __always_inline int
atomic_fetch_inc_release(atomic_t * v)228 atomic_fetch_inc_release(atomic_t *v)
229 {
230 	instrument_atomic_read_write(v, sizeof(*v));
231 	return arch_atomic_fetch_inc_release(v);
232 }
233 
234 static __always_inline int
atomic_fetch_inc_relaxed(atomic_t * v)235 atomic_fetch_inc_relaxed(atomic_t *v)
236 {
237 	instrument_atomic_read_write(v, sizeof(*v));
238 	return arch_atomic_fetch_inc_relaxed(v);
239 }
240 
241 static __always_inline void
atomic_dec(atomic_t * v)242 atomic_dec(atomic_t *v)
243 {
244 	instrument_atomic_read_write(v, sizeof(*v));
245 	arch_atomic_dec(v);
246 }
247 
248 static __always_inline int
atomic_dec_return(atomic_t * v)249 atomic_dec_return(atomic_t *v)
250 {
251 	instrument_atomic_read_write(v, sizeof(*v));
252 	return arch_atomic_dec_return(v);
253 }
254 
255 static __always_inline int
atomic_dec_return_acquire(atomic_t * v)256 atomic_dec_return_acquire(atomic_t *v)
257 {
258 	instrument_atomic_read_write(v, sizeof(*v));
259 	return arch_atomic_dec_return_acquire(v);
260 }
261 
262 static __always_inline int
atomic_dec_return_release(atomic_t * v)263 atomic_dec_return_release(atomic_t *v)
264 {
265 	instrument_atomic_read_write(v, sizeof(*v));
266 	return arch_atomic_dec_return_release(v);
267 }
268 
269 static __always_inline int
atomic_dec_return_relaxed(atomic_t * v)270 atomic_dec_return_relaxed(atomic_t *v)
271 {
272 	instrument_atomic_read_write(v, sizeof(*v));
273 	return arch_atomic_dec_return_relaxed(v);
274 }
275 
276 static __always_inline int
atomic_fetch_dec(atomic_t * v)277 atomic_fetch_dec(atomic_t *v)
278 {
279 	instrument_atomic_read_write(v, sizeof(*v));
280 	return arch_atomic_fetch_dec(v);
281 }
282 
283 static __always_inline int
atomic_fetch_dec_acquire(atomic_t * v)284 atomic_fetch_dec_acquire(atomic_t *v)
285 {
286 	instrument_atomic_read_write(v, sizeof(*v));
287 	return arch_atomic_fetch_dec_acquire(v);
288 }
289 
290 static __always_inline int
atomic_fetch_dec_release(atomic_t * v)291 atomic_fetch_dec_release(atomic_t *v)
292 {
293 	instrument_atomic_read_write(v, sizeof(*v));
294 	return arch_atomic_fetch_dec_release(v);
295 }
296 
297 static __always_inline int
atomic_fetch_dec_relaxed(atomic_t * v)298 atomic_fetch_dec_relaxed(atomic_t *v)
299 {
300 	instrument_atomic_read_write(v, sizeof(*v));
301 	return arch_atomic_fetch_dec_relaxed(v);
302 }
303 
304 static __always_inline void
atomic_and(int i,atomic_t * v)305 atomic_and(int i, atomic_t *v)
306 {
307 	instrument_atomic_read_write(v, sizeof(*v));
308 	arch_atomic_and(i, v);
309 }
310 
311 static __always_inline int
atomic_fetch_and(int i,atomic_t * v)312 atomic_fetch_and(int i, atomic_t *v)
313 {
314 	instrument_atomic_read_write(v, sizeof(*v));
315 	return arch_atomic_fetch_and(i, v);
316 }
317 
318 static __always_inline int
atomic_fetch_and_acquire(int i,atomic_t * v)319 atomic_fetch_and_acquire(int i, atomic_t *v)
320 {
321 	instrument_atomic_read_write(v, sizeof(*v));
322 	return arch_atomic_fetch_and_acquire(i, v);
323 }
324 
325 static __always_inline int
atomic_fetch_and_release(int i,atomic_t * v)326 atomic_fetch_and_release(int i, atomic_t *v)
327 {
328 	instrument_atomic_read_write(v, sizeof(*v));
329 	return arch_atomic_fetch_and_release(i, v);
330 }
331 
332 static __always_inline int
atomic_fetch_and_relaxed(int i,atomic_t * v)333 atomic_fetch_and_relaxed(int i, atomic_t *v)
334 {
335 	instrument_atomic_read_write(v, sizeof(*v));
336 	return arch_atomic_fetch_and_relaxed(i, v);
337 }
338 
339 static __always_inline void
atomic_andnot(int i,atomic_t * v)340 atomic_andnot(int i, atomic_t *v)
341 {
342 	instrument_atomic_read_write(v, sizeof(*v));
343 	arch_atomic_andnot(i, v);
344 }
345 
346 static __always_inline int
atomic_fetch_andnot(int i,atomic_t * v)347 atomic_fetch_andnot(int i, atomic_t *v)
348 {
349 	instrument_atomic_read_write(v, sizeof(*v));
350 	return arch_atomic_fetch_andnot(i, v);
351 }
352 
353 static __always_inline int
atomic_fetch_andnot_acquire(int i,atomic_t * v)354 atomic_fetch_andnot_acquire(int i, atomic_t *v)
355 {
356 	instrument_atomic_read_write(v, sizeof(*v));
357 	return arch_atomic_fetch_andnot_acquire(i, v);
358 }
359 
360 static __always_inline int
atomic_fetch_andnot_release(int i,atomic_t * v)361 atomic_fetch_andnot_release(int i, atomic_t *v)
362 {
363 	instrument_atomic_read_write(v, sizeof(*v));
364 	return arch_atomic_fetch_andnot_release(i, v);
365 }
366 
367 static __always_inline int
atomic_fetch_andnot_relaxed(int i,atomic_t * v)368 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
369 {
370 	instrument_atomic_read_write(v, sizeof(*v));
371 	return arch_atomic_fetch_andnot_relaxed(i, v);
372 }
373 
374 static __always_inline void
atomic_or(int i,atomic_t * v)375 atomic_or(int i, atomic_t *v)
376 {
377 	instrument_atomic_read_write(v, sizeof(*v));
378 	arch_atomic_or(i, v);
379 }
380 
381 static __always_inline int
atomic_fetch_or(int i,atomic_t * v)382 atomic_fetch_or(int i, atomic_t *v)
383 {
384 	instrument_atomic_read_write(v, sizeof(*v));
385 	return arch_atomic_fetch_or(i, v);
386 }
387 
388 static __always_inline int
atomic_fetch_or_acquire(int i,atomic_t * v)389 atomic_fetch_or_acquire(int i, atomic_t *v)
390 {
391 	instrument_atomic_read_write(v, sizeof(*v));
392 	return arch_atomic_fetch_or_acquire(i, v);
393 }
394 
395 static __always_inline int
atomic_fetch_or_release(int i,atomic_t * v)396 atomic_fetch_or_release(int i, atomic_t *v)
397 {
398 	instrument_atomic_read_write(v, sizeof(*v));
399 	return arch_atomic_fetch_or_release(i, v);
400 }
401 
402 static __always_inline int
atomic_fetch_or_relaxed(int i,atomic_t * v)403 atomic_fetch_or_relaxed(int i, atomic_t *v)
404 {
405 	instrument_atomic_read_write(v, sizeof(*v));
406 	return arch_atomic_fetch_or_relaxed(i, v);
407 }
408 
409 static __always_inline void
atomic_xor(int i,atomic_t * v)410 atomic_xor(int i, atomic_t *v)
411 {
412 	instrument_atomic_read_write(v, sizeof(*v));
413 	arch_atomic_xor(i, v);
414 }
415 
416 static __always_inline int
atomic_fetch_xor(int i,atomic_t * v)417 atomic_fetch_xor(int i, atomic_t *v)
418 {
419 	instrument_atomic_read_write(v, sizeof(*v));
420 	return arch_atomic_fetch_xor(i, v);
421 }
422 
423 static __always_inline int
atomic_fetch_xor_acquire(int i,atomic_t * v)424 atomic_fetch_xor_acquire(int i, atomic_t *v)
425 {
426 	instrument_atomic_read_write(v, sizeof(*v));
427 	return arch_atomic_fetch_xor_acquire(i, v);
428 }
429 
430 static __always_inline int
atomic_fetch_xor_release(int i,atomic_t * v)431 atomic_fetch_xor_release(int i, atomic_t *v)
432 {
433 	instrument_atomic_read_write(v, sizeof(*v));
434 	return arch_atomic_fetch_xor_release(i, v);
435 }
436 
437 static __always_inline int
atomic_fetch_xor_relaxed(int i,atomic_t * v)438 atomic_fetch_xor_relaxed(int i, atomic_t *v)
439 {
440 	instrument_atomic_read_write(v, sizeof(*v));
441 	return arch_atomic_fetch_xor_relaxed(i, v);
442 }
443 
444 static __always_inline int
atomic_xchg(atomic_t * v,int i)445 atomic_xchg(atomic_t *v, int i)
446 {
447 	instrument_atomic_read_write(v, sizeof(*v));
448 	return arch_atomic_xchg(v, i);
449 }
450 
451 static __always_inline int
atomic_xchg_acquire(atomic_t * v,int i)452 atomic_xchg_acquire(atomic_t *v, int i)
453 {
454 	instrument_atomic_read_write(v, sizeof(*v));
455 	return arch_atomic_xchg_acquire(v, i);
456 }
457 
458 static __always_inline int
atomic_xchg_release(atomic_t * v,int i)459 atomic_xchg_release(atomic_t *v, int i)
460 {
461 	instrument_atomic_read_write(v, sizeof(*v));
462 	return arch_atomic_xchg_release(v, i);
463 }
464 
465 static __always_inline int
atomic_xchg_relaxed(atomic_t * v,int i)466 atomic_xchg_relaxed(atomic_t *v, int i)
467 {
468 	instrument_atomic_read_write(v, sizeof(*v));
469 	return arch_atomic_xchg_relaxed(v, i);
470 }
471 
472 static __always_inline int
atomic_cmpxchg(atomic_t * v,int old,int new)473 atomic_cmpxchg(atomic_t *v, int old, int new)
474 {
475 	instrument_atomic_read_write(v, sizeof(*v));
476 	return arch_atomic_cmpxchg(v, old, new);
477 }
478 
479 static __always_inline int
atomic_cmpxchg_acquire(atomic_t * v,int old,int new)480 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
481 {
482 	instrument_atomic_read_write(v, sizeof(*v));
483 	return arch_atomic_cmpxchg_acquire(v, old, new);
484 }
485 
486 static __always_inline int
atomic_cmpxchg_release(atomic_t * v,int old,int new)487 atomic_cmpxchg_release(atomic_t *v, int old, int new)
488 {
489 	instrument_atomic_read_write(v, sizeof(*v));
490 	return arch_atomic_cmpxchg_release(v, old, new);
491 }
492 
493 static __always_inline int
atomic_cmpxchg_relaxed(atomic_t * v,int old,int new)494 atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
495 {
496 	instrument_atomic_read_write(v, sizeof(*v));
497 	return arch_atomic_cmpxchg_relaxed(v, old, new);
498 }
499 
500 static __always_inline bool
atomic_try_cmpxchg(atomic_t * v,int * old,int new)501 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
502 {
503 	instrument_atomic_read_write(v, sizeof(*v));
504 	instrument_atomic_read_write(old, sizeof(*old));
505 	return arch_atomic_try_cmpxchg(v, old, new);
506 }
507 
508 static __always_inline bool
atomic_try_cmpxchg_acquire(atomic_t * v,int * old,int new)509 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
510 {
511 	instrument_atomic_read_write(v, sizeof(*v));
512 	instrument_atomic_read_write(old, sizeof(*old));
513 	return arch_atomic_try_cmpxchg_acquire(v, old, new);
514 }
515 
516 static __always_inline bool
atomic_try_cmpxchg_release(atomic_t * v,int * old,int new)517 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
518 {
519 	instrument_atomic_read_write(v, sizeof(*v));
520 	instrument_atomic_read_write(old, sizeof(*old));
521 	return arch_atomic_try_cmpxchg_release(v, old, new);
522 }
523 
524 static __always_inline bool
atomic_try_cmpxchg_relaxed(atomic_t * v,int * old,int new)525 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
526 {
527 	instrument_atomic_read_write(v, sizeof(*v));
528 	instrument_atomic_read_write(old, sizeof(*old));
529 	return arch_atomic_try_cmpxchg_relaxed(v, old, new);
530 }
531 
532 static __always_inline bool
atomic_sub_and_test(int i,atomic_t * v)533 atomic_sub_and_test(int i, atomic_t *v)
534 {
535 	instrument_atomic_read_write(v, sizeof(*v));
536 	return arch_atomic_sub_and_test(i, v);
537 }
538 
539 static __always_inline bool
atomic_dec_and_test(atomic_t * v)540 atomic_dec_and_test(atomic_t *v)
541 {
542 	instrument_atomic_read_write(v, sizeof(*v));
543 	return arch_atomic_dec_and_test(v);
544 }
545 
546 static __always_inline bool
atomic_inc_and_test(atomic_t * v)547 atomic_inc_and_test(atomic_t *v)
548 {
549 	instrument_atomic_read_write(v, sizeof(*v));
550 	return arch_atomic_inc_and_test(v);
551 }
552 
553 static __always_inline bool
atomic_add_negative(int i,atomic_t * v)554 atomic_add_negative(int i, atomic_t *v)
555 {
556 	instrument_atomic_read_write(v, sizeof(*v));
557 	return arch_atomic_add_negative(i, v);
558 }
559 
560 static __always_inline int
atomic_fetch_add_unless(atomic_t * v,int a,int u)561 atomic_fetch_add_unless(atomic_t *v, int a, int u)
562 {
563 	instrument_atomic_read_write(v, sizeof(*v));
564 	return arch_atomic_fetch_add_unless(v, a, u);
565 }
566 
567 static __always_inline bool
atomic_add_unless(atomic_t * v,int a,int u)568 atomic_add_unless(atomic_t *v, int a, int u)
569 {
570 	instrument_atomic_read_write(v, sizeof(*v));
571 	return arch_atomic_add_unless(v, a, u);
572 }
573 
574 static __always_inline bool
atomic_inc_not_zero(atomic_t * v)575 atomic_inc_not_zero(atomic_t *v)
576 {
577 	instrument_atomic_read_write(v, sizeof(*v));
578 	return arch_atomic_inc_not_zero(v);
579 }
580 
581 static __always_inline bool
atomic_inc_unless_negative(atomic_t * v)582 atomic_inc_unless_negative(atomic_t *v)
583 {
584 	instrument_atomic_read_write(v, sizeof(*v));
585 	return arch_atomic_inc_unless_negative(v);
586 }
587 
588 static __always_inline bool
atomic_dec_unless_positive(atomic_t * v)589 atomic_dec_unless_positive(atomic_t *v)
590 {
591 	instrument_atomic_read_write(v, sizeof(*v));
592 	return arch_atomic_dec_unless_positive(v);
593 }
594 
595 static __always_inline int
atomic_dec_if_positive(atomic_t * v)596 atomic_dec_if_positive(atomic_t *v)
597 {
598 	instrument_atomic_read_write(v, sizeof(*v));
599 	return arch_atomic_dec_if_positive(v);
600 }
601 
602 static __always_inline s64
atomic64_read(const atomic64_t * v)603 atomic64_read(const atomic64_t *v)
604 {
605 	instrument_atomic_read(v, sizeof(*v));
606 	return arch_atomic64_read(v);
607 }
608 
609 static __always_inline s64
atomic64_read_acquire(const atomic64_t * v)610 atomic64_read_acquire(const atomic64_t *v)
611 {
612 	instrument_atomic_read(v, sizeof(*v));
613 	return arch_atomic64_read_acquire(v);
614 }
615 
616 static __always_inline void
atomic64_set(atomic64_t * v,s64 i)617 atomic64_set(atomic64_t *v, s64 i)
618 {
619 	instrument_atomic_write(v, sizeof(*v));
620 	arch_atomic64_set(v, i);
621 }
622 
623 static __always_inline void
atomic64_set_release(atomic64_t * v,s64 i)624 atomic64_set_release(atomic64_t *v, s64 i)
625 {
626 	instrument_atomic_write(v, sizeof(*v));
627 	arch_atomic64_set_release(v, i);
628 }
629 
630 static __always_inline void
atomic64_add(s64 i,atomic64_t * v)631 atomic64_add(s64 i, atomic64_t *v)
632 {
633 	instrument_atomic_read_write(v, sizeof(*v));
634 	arch_atomic64_add(i, v);
635 }
636 
637 static __always_inline s64
atomic64_add_return(s64 i,atomic64_t * v)638 atomic64_add_return(s64 i, atomic64_t *v)
639 {
640 	instrument_atomic_read_write(v, sizeof(*v));
641 	return arch_atomic64_add_return(i, v);
642 }
643 
644 static __always_inline s64
atomic64_add_return_acquire(s64 i,atomic64_t * v)645 atomic64_add_return_acquire(s64 i, atomic64_t *v)
646 {
647 	instrument_atomic_read_write(v, sizeof(*v));
648 	return arch_atomic64_add_return_acquire(i, v);
649 }
650 
651 static __always_inline s64
atomic64_add_return_release(s64 i,atomic64_t * v)652 atomic64_add_return_release(s64 i, atomic64_t *v)
653 {
654 	instrument_atomic_read_write(v, sizeof(*v));
655 	return arch_atomic64_add_return_release(i, v);
656 }
657 
658 static __always_inline s64
atomic64_add_return_relaxed(s64 i,atomic64_t * v)659 atomic64_add_return_relaxed(s64 i, atomic64_t *v)
660 {
661 	instrument_atomic_read_write(v, sizeof(*v));
662 	return arch_atomic64_add_return_relaxed(i, v);
663 }
664 
665 static __always_inline s64
atomic64_fetch_add(s64 i,atomic64_t * v)666 atomic64_fetch_add(s64 i, atomic64_t *v)
667 {
668 	instrument_atomic_read_write(v, sizeof(*v));
669 	return arch_atomic64_fetch_add(i, v);
670 }
671 
672 static __always_inline s64
atomic64_fetch_add_acquire(s64 i,atomic64_t * v)673 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
674 {
675 	instrument_atomic_read_write(v, sizeof(*v));
676 	return arch_atomic64_fetch_add_acquire(i, v);
677 }
678 
679 static __always_inline s64
atomic64_fetch_add_release(s64 i,atomic64_t * v)680 atomic64_fetch_add_release(s64 i, atomic64_t *v)
681 {
682 	instrument_atomic_read_write(v, sizeof(*v));
683 	return arch_atomic64_fetch_add_release(i, v);
684 }
685 
686 static __always_inline s64
atomic64_fetch_add_relaxed(s64 i,atomic64_t * v)687 atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
688 {
689 	instrument_atomic_read_write(v, sizeof(*v));
690 	return arch_atomic64_fetch_add_relaxed(i, v);
691 }
692 
693 static __always_inline void
atomic64_sub(s64 i,atomic64_t * v)694 atomic64_sub(s64 i, atomic64_t *v)
695 {
696 	instrument_atomic_read_write(v, sizeof(*v));
697 	arch_atomic64_sub(i, v);
698 }
699 
700 static __always_inline s64
atomic64_sub_return(s64 i,atomic64_t * v)701 atomic64_sub_return(s64 i, atomic64_t *v)
702 {
703 	instrument_atomic_read_write(v, sizeof(*v));
704 	return arch_atomic64_sub_return(i, v);
705 }
706 
707 static __always_inline s64
atomic64_sub_return_acquire(s64 i,atomic64_t * v)708 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
709 {
710 	instrument_atomic_read_write(v, sizeof(*v));
711 	return arch_atomic64_sub_return_acquire(i, v);
712 }
713 
714 static __always_inline s64
atomic64_sub_return_release(s64 i,atomic64_t * v)715 atomic64_sub_return_release(s64 i, atomic64_t *v)
716 {
717 	instrument_atomic_read_write(v, sizeof(*v));
718 	return arch_atomic64_sub_return_release(i, v);
719 }
720 
721 static __always_inline s64
atomic64_sub_return_relaxed(s64 i,atomic64_t * v)722 atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
723 {
724 	instrument_atomic_read_write(v, sizeof(*v));
725 	return arch_atomic64_sub_return_relaxed(i, v);
726 }
727 
728 static __always_inline s64
atomic64_fetch_sub(s64 i,atomic64_t * v)729 atomic64_fetch_sub(s64 i, atomic64_t *v)
730 {
731 	instrument_atomic_read_write(v, sizeof(*v));
732 	return arch_atomic64_fetch_sub(i, v);
733 }
734 
735 static __always_inline s64
atomic64_fetch_sub_acquire(s64 i,atomic64_t * v)736 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
737 {
738 	instrument_atomic_read_write(v, sizeof(*v));
739 	return arch_atomic64_fetch_sub_acquire(i, v);
740 }
741 
742 static __always_inline s64
atomic64_fetch_sub_release(s64 i,atomic64_t * v)743 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
744 {
745 	instrument_atomic_read_write(v, sizeof(*v));
746 	return arch_atomic64_fetch_sub_release(i, v);
747 }
748 
749 static __always_inline s64
atomic64_fetch_sub_relaxed(s64 i,atomic64_t * v)750 atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
751 {
752 	instrument_atomic_read_write(v, sizeof(*v));
753 	return arch_atomic64_fetch_sub_relaxed(i, v);
754 }
755 
756 static __always_inline void
atomic64_inc(atomic64_t * v)757 atomic64_inc(atomic64_t *v)
758 {
759 	instrument_atomic_read_write(v, sizeof(*v));
760 	arch_atomic64_inc(v);
761 }
762 
763 static __always_inline s64
atomic64_inc_return(atomic64_t * v)764 atomic64_inc_return(atomic64_t *v)
765 {
766 	instrument_atomic_read_write(v, sizeof(*v));
767 	return arch_atomic64_inc_return(v);
768 }
769 
770 static __always_inline s64
atomic64_inc_return_acquire(atomic64_t * v)771 atomic64_inc_return_acquire(atomic64_t *v)
772 {
773 	instrument_atomic_read_write(v, sizeof(*v));
774 	return arch_atomic64_inc_return_acquire(v);
775 }
776 
777 static __always_inline s64
atomic64_inc_return_release(atomic64_t * v)778 atomic64_inc_return_release(atomic64_t *v)
779 {
780 	instrument_atomic_read_write(v, sizeof(*v));
781 	return arch_atomic64_inc_return_release(v);
782 }
783 
784 static __always_inline s64
atomic64_inc_return_relaxed(atomic64_t * v)785 atomic64_inc_return_relaxed(atomic64_t *v)
786 {
787 	instrument_atomic_read_write(v, sizeof(*v));
788 	return arch_atomic64_inc_return_relaxed(v);
789 }
790 
791 static __always_inline s64
atomic64_fetch_inc(atomic64_t * v)792 atomic64_fetch_inc(atomic64_t *v)
793 {
794 	instrument_atomic_read_write(v, sizeof(*v));
795 	return arch_atomic64_fetch_inc(v);
796 }
797 
798 static __always_inline s64
atomic64_fetch_inc_acquire(atomic64_t * v)799 atomic64_fetch_inc_acquire(atomic64_t *v)
800 {
801 	instrument_atomic_read_write(v, sizeof(*v));
802 	return arch_atomic64_fetch_inc_acquire(v);
803 }
804 
805 static __always_inline s64
atomic64_fetch_inc_release(atomic64_t * v)806 atomic64_fetch_inc_release(atomic64_t *v)
807 {
808 	instrument_atomic_read_write(v, sizeof(*v));
809 	return arch_atomic64_fetch_inc_release(v);
810 }
811 
812 static __always_inline s64
atomic64_fetch_inc_relaxed(atomic64_t * v)813 atomic64_fetch_inc_relaxed(atomic64_t *v)
814 {
815 	instrument_atomic_read_write(v, sizeof(*v));
816 	return arch_atomic64_fetch_inc_relaxed(v);
817 }
818 
819 static __always_inline void
atomic64_dec(atomic64_t * v)820 atomic64_dec(atomic64_t *v)
821 {
822 	instrument_atomic_read_write(v, sizeof(*v));
823 	arch_atomic64_dec(v);
824 }
825 
826 static __always_inline s64
atomic64_dec_return(atomic64_t * v)827 atomic64_dec_return(atomic64_t *v)
828 {
829 	instrument_atomic_read_write(v, sizeof(*v));
830 	return arch_atomic64_dec_return(v);
831 }
832 
833 static __always_inline s64
atomic64_dec_return_acquire(atomic64_t * v)834 atomic64_dec_return_acquire(atomic64_t *v)
835 {
836 	instrument_atomic_read_write(v, sizeof(*v));
837 	return arch_atomic64_dec_return_acquire(v);
838 }
839 
840 static __always_inline s64
atomic64_dec_return_release(atomic64_t * v)841 atomic64_dec_return_release(atomic64_t *v)
842 {
843 	instrument_atomic_read_write(v, sizeof(*v));
844 	return arch_atomic64_dec_return_release(v);
845 }
846 
847 static __always_inline s64
atomic64_dec_return_relaxed(atomic64_t * v)848 atomic64_dec_return_relaxed(atomic64_t *v)
849 {
850 	instrument_atomic_read_write(v, sizeof(*v));
851 	return arch_atomic64_dec_return_relaxed(v);
852 }
853 
854 static __always_inline s64
atomic64_fetch_dec(atomic64_t * v)855 atomic64_fetch_dec(atomic64_t *v)
856 {
857 	instrument_atomic_read_write(v, sizeof(*v));
858 	return arch_atomic64_fetch_dec(v);
859 }
860 
861 static __always_inline s64
atomic64_fetch_dec_acquire(atomic64_t * v)862 atomic64_fetch_dec_acquire(atomic64_t *v)
863 {
864 	instrument_atomic_read_write(v, sizeof(*v));
865 	return arch_atomic64_fetch_dec_acquire(v);
866 }
867 
868 static __always_inline s64
atomic64_fetch_dec_release(atomic64_t * v)869 atomic64_fetch_dec_release(atomic64_t *v)
870 {
871 	instrument_atomic_read_write(v, sizeof(*v));
872 	return arch_atomic64_fetch_dec_release(v);
873 }
874 
875 static __always_inline s64
atomic64_fetch_dec_relaxed(atomic64_t * v)876 atomic64_fetch_dec_relaxed(atomic64_t *v)
877 {
878 	instrument_atomic_read_write(v, sizeof(*v));
879 	return arch_atomic64_fetch_dec_relaxed(v);
880 }
881 
882 static __always_inline void
atomic64_and(s64 i,atomic64_t * v)883 atomic64_and(s64 i, atomic64_t *v)
884 {
885 	instrument_atomic_read_write(v, sizeof(*v));
886 	arch_atomic64_and(i, v);
887 }
888 
889 static __always_inline s64
atomic64_fetch_and(s64 i,atomic64_t * v)890 atomic64_fetch_and(s64 i, atomic64_t *v)
891 {
892 	instrument_atomic_read_write(v, sizeof(*v));
893 	return arch_atomic64_fetch_and(i, v);
894 }
895 
896 static __always_inline s64
atomic64_fetch_and_acquire(s64 i,atomic64_t * v)897 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
898 {
899 	instrument_atomic_read_write(v, sizeof(*v));
900 	return arch_atomic64_fetch_and_acquire(i, v);
901 }
902 
903 static __always_inline s64
atomic64_fetch_and_release(s64 i,atomic64_t * v)904 atomic64_fetch_and_release(s64 i, atomic64_t *v)
905 {
906 	instrument_atomic_read_write(v, sizeof(*v));
907 	return arch_atomic64_fetch_and_release(i, v);
908 }
909 
910 static __always_inline s64
atomic64_fetch_and_relaxed(s64 i,atomic64_t * v)911 atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
912 {
913 	instrument_atomic_read_write(v, sizeof(*v));
914 	return arch_atomic64_fetch_and_relaxed(i, v);
915 }
916 
917 static __always_inline void
atomic64_andnot(s64 i,atomic64_t * v)918 atomic64_andnot(s64 i, atomic64_t *v)
919 {
920 	instrument_atomic_read_write(v, sizeof(*v));
921 	arch_atomic64_andnot(i, v);
922 }
923 
924 static __always_inline s64
atomic64_fetch_andnot(s64 i,atomic64_t * v)925 atomic64_fetch_andnot(s64 i, atomic64_t *v)
926 {
927 	instrument_atomic_read_write(v, sizeof(*v));
928 	return arch_atomic64_fetch_andnot(i, v);
929 }
930 
931 static __always_inline s64
atomic64_fetch_andnot_acquire(s64 i,atomic64_t * v)932 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
933 {
934 	instrument_atomic_read_write(v, sizeof(*v));
935 	return arch_atomic64_fetch_andnot_acquire(i, v);
936 }
937 
938 static __always_inline s64
atomic64_fetch_andnot_release(s64 i,atomic64_t * v)939 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
940 {
941 	instrument_atomic_read_write(v, sizeof(*v));
942 	return arch_atomic64_fetch_andnot_release(i, v);
943 }
944 
945 static __always_inline s64
atomic64_fetch_andnot_relaxed(s64 i,atomic64_t * v)946 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
947 {
948 	instrument_atomic_read_write(v, sizeof(*v));
949 	return arch_atomic64_fetch_andnot_relaxed(i, v);
950 }
951 
952 static __always_inline void
atomic64_or(s64 i,atomic64_t * v)953 atomic64_or(s64 i, atomic64_t *v)
954 {
955 	instrument_atomic_read_write(v, sizeof(*v));
956 	arch_atomic64_or(i, v);
957 }
958 
959 static __always_inline s64
atomic64_fetch_or(s64 i,atomic64_t * v)960 atomic64_fetch_or(s64 i, atomic64_t *v)
961 {
962 	instrument_atomic_read_write(v, sizeof(*v));
963 	return arch_atomic64_fetch_or(i, v);
964 }
965 
966 static __always_inline s64
atomic64_fetch_or_acquire(s64 i,atomic64_t * v)967 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
968 {
969 	instrument_atomic_read_write(v, sizeof(*v));
970 	return arch_atomic64_fetch_or_acquire(i, v);
971 }
972 
973 static __always_inline s64
atomic64_fetch_or_release(s64 i,atomic64_t * v)974 atomic64_fetch_or_release(s64 i, atomic64_t *v)
975 {
976 	instrument_atomic_read_write(v, sizeof(*v));
977 	return arch_atomic64_fetch_or_release(i, v);
978 }
979 
980 static __always_inline s64
atomic64_fetch_or_relaxed(s64 i,atomic64_t * v)981 atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
982 {
983 	instrument_atomic_read_write(v, sizeof(*v));
984 	return arch_atomic64_fetch_or_relaxed(i, v);
985 }
986 
987 static __always_inline void
atomic64_xor(s64 i,atomic64_t * v)988 atomic64_xor(s64 i, atomic64_t *v)
989 {
990 	instrument_atomic_read_write(v, sizeof(*v));
991 	arch_atomic64_xor(i, v);
992 }
993 
994 static __always_inline s64
atomic64_fetch_xor(s64 i,atomic64_t * v)995 atomic64_fetch_xor(s64 i, atomic64_t *v)
996 {
997 	instrument_atomic_read_write(v, sizeof(*v));
998 	return arch_atomic64_fetch_xor(i, v);
999 }
1000 
1001 static __always_inline s64
atomic64_fetch_xor_acquire(s64 i,atomic64_t * v)1002 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1003 {
1004 	instrument_atomic_read_write(v, sizeof(*v));
1005 	return arch_atomic64_fetch_xor_acquire(i, v);
1006 }
1007 
1008 static __always_inline s64
atomic64_fetch_xor_release(s64 i,atomic64_t * v)1009 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1010 {
1011 	instrument_atomic_read_write(v, sizeof(*v));
1012 	return arch_atomic64_fetch_xor_release(i, v);
1013 }
1014 
1015 static __always_inline s64
atomic64_fetch_xor_relaxed(s64 i,atomic64_t * v)1016 atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
1017 {
1018 	instrument_atomic_read_write(v, sizeof(*v));
1019 	return arch_atomic64_fetch_xor_relaxed(i, v);
1020 }
1021 
1022 static __always_inline s64
atomic64_xchg(atomic64_t * v,s64 i)1023 atomic64_xchg(atomic64_t *v, s64 i)
1024 {
1025 	instrument_atomic_read_write(v, sizeof(*v));
1026 	return arch_atomic64_xchg(v, i);
1027 }
1028 
1029 static __always_inline s64
atomic64_xchg_acquire(atomic64_t * v,s64 i)1030 atomic64_xchg_acquire(atomic64_t *v, s64 i)
1031 {
1032 	instrument_atomic_read_write(v, sizeof(*v));
1033 	return arch_atomic64_xchg_acquire(v, i);
1034 }
1035 
1036 static __always_inline s64
atomic64_xchg_release(atomic64_t * v,s64 i)1037 atomic64_xchg_release(atomic64_t *v, s64 i)
1038 {
1039 	instrument_atomic_read_write(v, sizeof(*v));
1040 	return arch_atomic64_xchg_release(v, i);
1041 }
1042 
1043 static __always_inline s64
atomic64_xchg_relaxed(atomic64_t * v,s64 i)1044 atomic64_xchg_relaxed(atomic64_t *v, s64 i)
1045 {
1046 	instrument_atomic_read_write(v, sizeof(*v));
1047 	return arch_atomic64_xchg_relaxed(v, i);
1048 }
1049 
1050 static __always_inline s64
atomic64_cmpxchg(atomic64_t * v,s64 old,s64 new)1051 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
1052 {
1053 	instrument_atomic_read_write(v, sizeof(*v));
1054 	return arch_atomic64_cmpxchg(v, old, new);
1055 }
1056 
1057 static __always_inline s64
atomic64_cmpxchg_acquire(atomic64_t * v,s64 old,s64 new)1058 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1059 {
1060 	instrument_atomic_read_write(v, sizeof(*v));
1061 	return arch_atomic64_cmpxchg_acquire(v, old, new);
1062 }
1063 
1064 static __always_inline s64
atomic64_cmpxchg_release(atomic64_t * v,s64 old,s64 new)1065 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1066 {
1067 	instrument_atomic_read_write(v, sizeof(*v));
1068 	return arch_atomic64_cmpxchg_release(v, old, new);
1069 }
1070 
1071 static __always_inline s64
atomic64_cmpxchg_relaxed(atomic64_t * v,s64 old,s64 new)1072 atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
1073 {
1074 	instrument_atomic_read_write(v, sizeof(*v));
1075 	return arch_atomic64_cmpxchg_relaxed(v, old, new);
1076 }
1077 
1078 static __always_inline bool
atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)1079 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
1080 {
1081 	instrument_atomic_read_write(v, sizeof(*v));
1082 	instrument_atomic_read_write(old, sizeof(*old));
1083 	return arch_atomic64_try_cmpxchg(v, old, new);
1084 }
1085 
1086 static __always_inline bool
atomic64_try_cmpxchg_acquire(atomic64_t * v,s64 * old,s64 new)1087 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
1088 {
1089 	instrument_atomic_read_write(v, sizeof(*v));
1090 	instrument_atomic_read_write(old, sizeof(*old));
1091 	return arch_atomic64_try_cmpxchg_acquire(v, old, new);
1092 }
1093 
1094 static __always_inline bool
atomic64_try_cmpxchg_release(atomic64_t * v,s64 * old,s64 new)1095 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
1096 {
1097 	instrument_atomic_read_write(v, sizeof(*v));
1098 	instrument_atomic_read_write(old, sizeof(*old));
1099 	return arch_atomic64_try_cmpxchg_release(v, old, new);
1100 }
1101 
1102 static __always_inline bool
atomic64_try_cmpxchg_relaxed(atomic64_t * v,s64 * old,s64 new)1103 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
1104 {
1105 	instrument_atomic_read_write(v, sizeof(*v));
1106 	instrument_atomic_read_write(old, sizeof(*old));
1107 	return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
1108 }
1109 
1110 static __always_inline bool
atomic64_sub_and_test(s64 i,atomic64_t * v)1111 atomic64_sub_and_test(s64 i, atomic64_t *v)
1112 {
1113 	instrument_atomic_read_write(v, sizeof(*v));
1114 	return arch_atomic64_sub_and_test(i, v);
1115 }
1116 
1117 static __always_inline bool
atomic64_dec_and_test(atomic64_t * v)1118 atomic64_dec_and_test(atomic64_t *v)
1119 {
1120 	instrument_atomic_read_write(v, sizeof(*v));
1121 	return arch_atomic64_dec_and_test(v);
1122 }
1123 
1124 static __always_inline bool
atomic64_inc_and_test(atomic64_t * v)1125 atomic64_inc_and_test(atomic64_t *v)
1126 {
1127 	instrument_atomic_read_write(v, sizeof(*v));
1128 	return arch_atomic64_inc_and_test(v);
1129 }
1130 
1131 static __always_inline bool
atomic64_add_negative(s64 i,atomic64_t * v)1132 atomic64_add_negative(s64 i, atomic64_t *v)
1133 {
1134 	instrument_atomic_read_write(v, sizeof(*v));
1135 	return arch_atomic64_add_negative(i, v);
1136 }
1137 
1138 static __always_inline s64
atomic64_fetch_add_unless(atomic64_t * v,s64 a,s64 u)1139 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
1140 {
1141 	instrument_atomic_read_write(v, sizeof(*v));
1142 	return arch_atomic64_fetch_add_unless(v, a, u);
1143 }
1144 
1145 static __always_inline bool
atomic64_add_unless(atomic64_t * v,s64 a,s64 u)1146 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
1147 {
1148 	instrument_atomic_read_write(v, sizeof(*v));
1149 	return arch_atomic64_add_unless(v, a, u);
1150 }
1151 
1152 static __always_inline bool
atomic64_inc_not_zero(atomic64_t * v)1153 atomic64_inc_not_zero(atomic64_t *v)
1154 {
1155 	instrument_atomic_read_write(v, sizeof(*v));
1156 	return arch_atomic64_inc_not_zero(v);
1157 }
1158 
1159 static __always_inline bool
atomic64_inc_unless_negative(atomic64_t * v)1160 atomic64_inc_unless_negative(atomic64_t *v)
1161 {
1162 	instrument_atomic_read_write(v, sizeof(*v));
1163 	return arch_atomic64_inc_unless_negative(v);
1164 }
1165 
1166 static __always_inline bool
atomic64_dec_unless_positive(atomic64_t * v)1167 atomic64_dec_unless_positive(atomic64_t *v)
1168 {
1169 	instrument_atomic_read_write(v, sizeof(*v));
1170 	return arch_atomic64_dec_unless_positive(v);
1171 }
1172 
1173 static __always_inline s64
atomic64_dec_if_positive(atomic64_t * v)1174 atomic64_dec_if_positive(atomic64_t *v)
1175 {
1176 	instrument_atomic_read_write(v, sizeof(*v));
1177 	return arch_atomic64_dec_if_positive(v);
1178 }
1179 
1180 static __always_inline long
atomic_long_read(const atomic_long_t * v)1181 atomic_long_read(const atomic_long_t *v)
1182 {
1183 	instrument_atomic_read(v, sizeof(*v));
1184 	return arch_atomic_long_read(v);
1185 }
1186 
1187 static __always_inline long
atomic_long_read_acquire(const atomic_long_t * v)1188 atomic_long_read_acquire(const atomic_long_t *v)
1189 {
1190 	instrument_atomic_read(v, sizeof(*v));
1191 	return arch_atomic_long_read_acquire(v);
1192 }
1193 
1194 static __always_inline void
atomic_long_set(atomic_long_t * v,long i)1195 atomic_long_set(atomic_long_t *v, long i)
1196 {
1197 	instrument_atomic_write(v, sizeof(*v));
1198 	arch_atomic_long_set(v, i);
1199 }
1200 
1201 static __always_inline void
atomic_long_set_release(atomic_long_t * v,long i)1202 atomic_long_set_release(atomic_long_t *v, long i)
1203 {
1204 	instrument_atomic_write(v, sizeof(*v));
1205 	arch_atomic_long_set_release(v, i);
1206 }
1207 
1208 static __always_inline void
atomic_long_add(long i,atomic_long_t * v)1209 atomic_long_add(long i, atomic_long_t *v)
1210 {
1211 	instrument_atomic_read_write(v, sizeof(*v));
1212 	arch_atomic_long_add(i, v);
1213 }
1214 
1215 static __always_inline long
atomic_long_add_return(long i,atomic_long_t * v)1216 atomic_long_add_return(long i, atomic_long_t *v)
1217 {
1218 	instrument_atomic_read_write(v, sizeof(*v));
1219 	return arch_atomic_long_add_return(i, v);
1220 }
1221 
1222 static __always_inline long
atomic_long_add_return_acquire(long i,atomic_long_t * v)1223 atomic_long_add_return_acquire(long i, atomic_long_t *v)
1224 {
1225 	instrument_atomic_read_write(v, sizeof(*v));
1226 	return arch_atomic_long_add_return_acquire(i, v);
1227 }
1228 
1229 static __always_inline long
atomic_long_add_return_release(long i,atomic_long_t * v)1230 atomic_long_add_return_release(long i, atomic_long_t *v)
1231 {
1232 	instrument_atomic_read_write(v, sizeof(*v));
1233 	return arch_atomic_long_add_return_release(i, v);
1234 }
1235 
1236 static __always_inline long
atomic_long_add_return_relaxed(long i,atomic_long_t * v)1237 atomic_long_add_return_relaxed(long i, atomic_long_t *v)
1238 {
1239 	instrument_atomic_read_write(v, sizeof(*v));
1240 	return arch_atomic_long_add_return_relaxed(i, v);
1241 }
1242 
1243 static __always_inline long
atomic_long_fetch_add(long i,atomic_long_t * v)1244 atomic_long_fetch_add(long i, atomic_long_t *v)
1245 {
1246 	instrument_atomic_read_write(v, sizeof(*v));
1247 	return arch_atomic_long_fetch_add(i, v);
1248 }
1249 
1250 static __always_inline long
atomic_long_fetch_add_acquire(long i,atomic_long_t * v)1251 atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
1252 {
1253 	instrument_atomic_read_write(v, sizeof(*v));
1254 	return arch_atomic_long_fetch_add_acquire(i, v);
1255 }
1256 
1257 static __always_inline long
atomic_long_fetch_add_release(long i,atomic_long_t * v)1258 atomic_long_fetch_add_release(long i, atomic_long_t *v)
1259 {
1260 	instrument_atomic_read_write(v, sizeof(*v));
1261 	return arch_atomic_long_fetch_add_release(i, v);
1262 }
1263 
1264 static __always_inline long
atomic_long_fetch_add_relaxed(long i,atomic_long_t * v)1265 atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
1266 {
1267 	instrument_atomic_read_write(v, sizeof(*v));
1268 	return arch_atomic_long_fetch_add_relaxed(i, v);
1269 }
1270 
1271 static __always_inline void
atomic_long_sub(long i,atomic_long_t * v)1272 atomic_long_sub(long i, atomic_long_t *v)
1273 {
1274 	instrument_atomic_read_write(v, sizeof(*v));
1275 	arch_atomic_long_sub(i, v);
1276 }
1277 
1278 static __always_inline long
atomic_long_sub_return(long i,atomic_long_t * v)1279 atomic_long_sub_return(long i, atomic_long_t *v)
1280 {
1281 	instrument_atomic_read_write(v, sizeof(*v));
1282 	return arch_atomic_long_sub_return(i, v);
1283 }
1284 
1285 static __always_inline long
atomic_long_sub_return_acquire(long i,atomic_long_t * v)1286 atomic_long_sub_return_acquire(long i, atomic_long_t *v)
1287 {
1288 	instrument_atomic_read_write(v, sizeof(*v));
1289 	return arch_atomic_long_sub_return_acquire(i, v);
1290 }
1291 
1292 static __always_inline long
atomic_long_sub_return_release(long i,atomic_long_t * v)1293 atomic_long_sub_return_release(long i, atomic_long_t *v)
1294 {
1295 	instrument_atomic_read_write(v, sizeof(*v));
1296 	return arch_atomic_long_sub_return_release(i, v);
1297 }
1298 
1299 static __always_inline long
atomic_long_sub_return_relaxed(long i,atomic_long_t * v)1300 atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
1301 {
1302 	instrument_atomic_read_write(v, sizeof(*v));
1303 	return arch_atomic_long_sub_return_relaxed(i, v);
1304 }
1305 
1306 static __always_inline long
atomic_long_fetch_sub(long i,atomic_long_t * v)1307 atomic_long_fetch_sub(long i, atomic_long_t *v)
1308 {
1309 	instrument_atomic_read_write(v, sizeof(*v));
1310 	return arch_atomic_long_fetch_sub(i, v);
1311 }
1312 
1313 static __always_inline long
atomic_long_fetch_sub_acquire(long i,atomic_long_t * v)1314 atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
1315 {
1316 	instrument_atomic_read_write(v, sizeof(*v));
1317 	return arch_atomic_long_fetch_sub_acquire(i, v);
1318 }
1319 
1320 static __always_inline long
atomic_long_fetch_sub_release(long i,atomic_long_t * v)1321 atomic_long_fetch_sub_release(long i, atomic_long_t *v)
1322 {
1323 	instrument_atomic_read_write(v, sizeof(*v));
1324 	return arch_atomic_long_fetch_sub_release(i, v);
1325 }
1326 
1327 static __always_inline long
atomic_long_fetch_sub_relaxed(long i,atomic_long_t * v)1328 atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
1329 {
1330 	instrument_atomic_read_write(v, sizeof(*v));
1331 	return arch_atomic_long_fetch_sub_relaxed(i, v);
1332 }
1333 
1334 static __always_inline void
atomic_long_inc(atomic_long_t * v)1335 atomic_long_inc(atomic_long_t *v)
1336 {
1337 	instrument_atomic_read_write(v, sizeof(*v));
1338 	arch_atomic_long_inc(v);
1339 }
1340 
1341 static __always_inline long
atomic_long_inc_return(atomic_long_t * v)1342 atomic_long_inc_return(atomic_long_t *v)
1343 {
1344 	instrument_atomic_read_write(v, sizeof(*v));
1345 	return arch_atomic_long_inc_return(v);
1346 }
1347 
1348 static __always_inline long
atomic_long_inc_return_acquire(atomic_long_t * v)1349 atomic_long_inc_return_acquire(atomic_long_t *v)
1350 {
1351 	instrument_atomic_read_write(v, sizeof(*v));
1352 	return arch_atomic_long_inc_return_acquire(v);
1353 }
1354 
1355 static __always_inline long
atomic_long_inc_return_release(atomic_long_t * v)1356 atomic_long_inc_return_release(atomic_long_t *v)
1357 {
1358 	instrument_atomic_read_write(v, sizeof(*v));
1359 	return arch_atomic_long_inc_return_release(v);
1360 }
1361 
1362 static __always_inline long
atomic_long_inc_return_relaxed(atomic_long_t * v)1363 atomic_long_inc_return_relaxed(atomic_long_t *v)
1364 {
1365 	instrument_atomic_read_write(v, sizeof(*v));
1366 	return arch_atomic_long_inc_return_relaxed(v);
1367 }
1368 
1369 static __always_inline long
atomic_long_fetch_inc(atomic_long_t * v)1370 atomic_long_fetch_inc(atomic_long_t *v)
1371 {
1372 	instrument_atomic_read_write(v, sizeof(*v));
1373 	return arch_atomic_long_fetch_inc(v);
1374 }
1375 
1376 static __always_inline long
atomic_long_fetch_inc_acquire(atomic_long_t * v)1377 atomic_long_fetch_inc_acquire(atomic_long_t *v)
1378 {
1379 	instrument_atomic_read_write(v, sizeof(*v));
1380 	return arch_atomic_long_fetch_inc_acquire(v);
1381 }
1382 
1383 static __always_inline long
atomic_long_fetch_inc_release(atomic_long_t * v)1384 atomic_long_fetch_inc_release(atomic_long_t *v)
1385 {
1386 	instrument_atomic_read_write(v, sizeof(*v));
1387 	return arch_atomic_long_fetch_inc_release(v);
1388 }
1389 
1390 static __always_inline long
atomic_long_fetch_inc_relaxed(atomic_long_t * v)1391 atomic_long_fetch_inc_relaxed(atomic_long_t *v)
1392 {
1393 	instrument_atomic_read_write(v, sizeof(*v));
1394 	return arch_atomic_long_fetch_inc_relaxed(v);
1395 }
1396 
1397 static __always_inline void
atomic_long_dec(atomic_long_t * v)1398 atomic_long_dec(atomic_long_t *v)
1399 {
1400 	instrument_atomic_read_write(v, sizeof(*v));
1401 	arch_atomic_long_dec(v);
1402 }
1403 
1404 static __always_inline long
atomic_long_dec_return(atomic_long_t * v)1405 atomic_long_dec_return(atomic_long_t *v)
1406 {
1407 	instrument_atomic_read_write(v, sizeof(*v));
1408 	return arch_atomic_long_dec_return(v);
1409 }
1410 
1411 static __always_inline long
atomic_long_dec_return_acquire(atomic_long_t * v)1412 atomic_long_dec_return_acquire(atomic_long_t *v)
1413 {
1414 	instrument_atomic_read_write(v, sizeof(*v));
1415 	return arch_atomic_long_dec_return_acquire(v);
1416 }
1417 
1418 static __always_inline long
atomic_long_dec_return_release(atomic_long_t * v)1419 atomic_long_dec_return_release(atomic_long_t *v)
1420 {
1421 	instrument_atomic_read_write(v, sizeof(*v));
1422 	return arch_atomic_long_dec_return_release(v);
1423 }
1424 
1425 static __always_inline long
atomic_long_dec_return_relaxed(atomic_long_t * v)1426 atomic_long_dec_return_relaxed(atomic_long_t *v)
1427 {
1428 	instrument_atomic_read_write(v, sizeof(*v));
1429 	return arch_atomic_long_dec_return_relaxed(v);
1430 }
1431 
1432 static __always_inline long
atomic_long_fetch_dec(atomic_long_t * v)1433 atomic_long_fetch_dec(atomic_long_t *v)
1434 {
1435 	instrument_atomic_read_write(v, sizeof(*v));
1436 	return arch_atomic_long_fetch_dec(v);
1437 }
1438 
1439 static __always_inline long
atomic_long_fetch_dec_acquire(atomic_long_t * v)1440 atomic_long_fetch_dec_acquire(atomic_long_t *v)
1441 {
1442 	instrument_atomic_read_write(v, sizeof(*v));
1443 	return arch_atomic_long_fetch_dec_acquire(v);
1444 }
1445 
1446 static __always_inline long
atomic_long_fetch_dec_release(atomic_long_t * v)1447 atomic_long_fetch_dec_release(atomic_long_t *v)
1448 {
1449 	instrument_atomic_read_write(v, sizeof(*v));
1450 	return arch_atomic_long_fetch_dec_release(v);
1451 }
1452 
1453 static __always_inline long
atomic_long_fetch_dec_relaxed(atomic_long_t * v)1454 atomic_long_fetch_dec_relaxed(atomic_long_t *v)
1455 {
1456 	instrument_atomic_read_write(v, sizeof(*v));
1457 	return arch_atomic_long_fetch_dec_relaxed(v);
1458 }
1459 
1460 static __always_inline void
atomic_long_and(long i,atomic_long_t * v)1461 atomic_long_and(long i, atomic_long_t *v)
1462 {
1463 	instrument_atomic_read_write(v, sizeof(*v));
1464 	arch_atomic_long_and(i, v);
1465 }
1466 
1467 static __always_inline long
atomic_long_fetch_and(long i,atomic_long_t * v)1468 atomic_long_fetch_and(long i, atomic_long_t *v)
1469 {
1470 	instrument_atomic_read_write(v, sizeof(*v));
1471 	return arch_atomic_long_fetch_and(i, v);
1472 }
1473 
1474 static __always_inline long
atomic_long_fetch_and_acquire(long i,atomic_long_t * v)1475 atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
1476 {
1477 	instrument_atomic_read_write(v, sizeof(*v));
1478 	return arch_atomic_long_fetch_and_acquire(i, v);
1479 }
1480 
1481 static __always_inline long
atomic_long_fetch_and_release(long i,atomic_long_t * v)1482 atomic_long_fetch_and_release(long i, atomic_long_t *v)
1483 {
1484 	instrument_atomic_read_write(v, sizeof(*v));
1485 	return arch_atomic_long_fetch_and_release(i, v);
1486 }
1487 
1488 static __always_inline long
atomic_long_fetch_and_relaxed(long i,atomic_long_t * v)1489 atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
1490 {
1491 	instrument_atomic_read_write(v, sizeof(*v));
1492 	return arch_atomic_long_fetch_and_relaxed(i, v);
1493 }
1494 
1495 static __always_inline void
atomic_long_andnot(long i,atomic_long_t * v)1496 atomic_long_andnot(long i, atomic_long_t *v)
1497 {
1498 	instrument_atomic_read_write(v, sizeof(*v));
1499 	arch_atomic_long_andnot(i, v);
1500 }
1501 
1502 static __always_inline long
atomic_long_fetch_andnot(long i,atomic_long_t * v)1503 atomic_long_fetch_andnot(long i, atomic_long_t *v)
1504 {
1505 	instrument_atomic_read_write(v, sizeof(*v));
1506 	return arch_atomic_long_fetch_andnot(i, v);
1507 }
1508 
1509 static __always_inline long
atomic_long_fetch_andnot_acquire(long i,atomic_long_t * v)1510 atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
1511 {
1512 	instrument_atomic_read_write(v, sizeof(*v));
1513 	return arch_atomic_long_fetch_andnot_acquire(i, v);
1514 }
1515 
1516 static __always_inline long
atomic_long_fetch_andnot_release(long i,atomic_long_t * v)1517 atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
1518 {
1519 	instrument_atomic_read_write(v, sizeof(*v));
1520 	return arch_atomic_long_fetch_andnot_release(i, v);
1521 }
1522 
1523 static __always_inline long
atomic_long_fetch_andnot_relaxed(long i,atomic_long_t * v)1524 atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
1525 {
1526 	instrument_atomic_read_write(v, sizeof(*v));
1527 	return arch_atomic_long_fetch_andnot_relaxed(i, v);
1528 }
1529 
1530 static __always_inline void
atomic_long_or(long i,atomic_long_t * v)1531 atomic_long_or(long i, atomic_long_t *v)
1532 {
1533 	instrument_atomic_read_write(v, sizeof(*v));
1534 	arch_atomic_long_or(i, v);
1535 }
1536 
1537 static __always_inline long
atomic_long_fetch_or(long i,atomic_long_t * v)1538 atomic_long_fetch_or(long i, atomic_long_t *v)
1539 {
1540 	instrument_atomic_read_write(v, sizeof(*v));
1541 	return arch_atomic_long_fetch_or(i, v);
1542 }
1543 
1544 static __always_inline long
atomic_long_fetch_or_acquire(long i,atomic_long_t * v)1545 atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
1546 {
1547 	instrument_atomic_read_write(v, sizeof(*v));
1548 	return arch_atomic_long_fetch_or_acquire(i, v);
1549 }
1550 
1551 static __always_inline long
atomic_long_fetch_or_release(long i,atomic_long_t * v)1552 atomic_long_fetch_or_release(long i, atomic_long_t *v)
1553 {
1554 	instrument_atomic_read_write(v, sizeof(*v));
1555 	return arch_atomic_long_fetch_or_release(i, v);
1556 }
1557 
1558 static __always_inline long
atomic_long_fetch_or_relaxed(long i,atomic_long_t * v)1559 atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
1560 {
1561 	instrument_atomic_read_write(v, sizeof(*v));
1562 	return arch_atomic_long_fetch_or_relaxed(i, v);
1563 }
1564 
1565 static __always_inline void
atomic_long_xor(long i,atomic_long_t * v)1566 atomic_long_xor(long i, atomic_long_t *v)
1567 {
1568 	instrument_atomic_read_write(v, sizeof(*v));
1569 	arch_atomic_long_xor(i, v);
1570 }
1571 
1572 static __always_inline long
atomic_long_fetch_xor(long i,atomic_long_t * v)1573 atomic_long_fetch_xor(long i, atomic_long_t *v)
1574 {
1575 	instrument_atomic_read_write(v, sizeof(*v));
1576 	return arch_atomic_long_fetch_xor(i, v);
1577 }
1578 
1579 static __always_inline long
atomic_long_fetch_xor_acquire(long i,atomic_long_t * v)1580 atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
1581 {
1582 	instrument_atomic_read_write(v, sizeof(*v));
1583 	return arch_atomic_long_fetch_xor_acquire(i, v);
1584 }
1585 
1586 static __always_inline long
atomic_long_fetch_xor_release(long i,atomic_long_t * v)1587 atomic_long_fetch_xor_release(long i, atomic_long_t *v)
1588 {
1589 	instrument_atomic_read_write(v, sizeof(*v));
1590 	return arch_atomic_long_fetch_xor_release(i, v);
1591 }
1592 
1593 static __always_inline long
atomic_long_fetch_xor_relaxed(long i,atomic_long_t * v)1594 atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
1595 {
1596 	instrument_atomic_read_write(v, sizeof(*v));
1597 	return arch_atomic_long_fetch_xor_relaxed(i, v);
1598 }
1599 
1600 static __always_inline long
atomic_long_xchg(atomic_long_t * v,long i)1601 atomic_long_xchg(atomic_long_t *v, long i)
1602 {
1603 	instrument_atomic_read_write(v, sizeof(*v));
1604 	return arch_atomic_long_xchg(v, i);
1605 }
1606 
1607 static __always_inline long
atomic_long_xchg_acquire(atomic_long_t * v,long i)1608 atomic_long_xchg_acquire(atomic_long_t *v, long i)
1609 {
1610 	instrument_atomic_read_write(v, sizeof(*v));
1611 	return arch_atomic_long_xchg_acquire(v, i);
1612 }
1613 
1614 static __always_inline long
atomic_long_xchg_release(atomic_long_t * v,long i)1615 atomic_long_xchg_release(atomic_long_t *v, long i)
1616 {
1617 	instrument_atomic_read_write(v, sizeof(*v));
1618 	return arch_atomic_long_xchg_release(v, i);
1619 }
1620 
1621 static __always_inline long
atomic_long_xchg_relaxed(atomic_long_t * v,long i)1622 atomic_long_xchg_relaxed(atomic_long_t *v, long i)
1623 {
1624 	instrument_atomic_read_write(v, sizeof(*v));
1625 	return arch_atomic_long_xchg_relaxed(v, i);
1626 }
1627 
1628 static __always_inline long
atomic_long_cmpxchg(atomic_long_t * v,long old,long new)1629 atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
1630 {
1631 	instrument_atomic_read_write(v, sizeof(*v));
1632 	return arch_atomic_long_cmpxchg(v, old, new);
1633 }
1634 
1635 static __always_inline long
atomic_long_cmpxchg_acquire(atomic_long_t * v,long old,long new)1636 atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
1637 {
1638 	instrument_atomic_read_write(v, sizeof(*v));
1639 	return arch_atomic_long_cmpxchg_acquire(v, old, new);
1640 }
1641 
1642 static __always_inline long
atomic_long_cmpxchg_release(atomic_long_t * v,long old,long new)1643 atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
1644 {
1645 	instrument_atomic_read_write(v, sizeof(*v));
1646 	return arch_atomic_long_cmpxchg_release(v, old, new);
1647 }
1648 
1649 static __always_inline long
atomic_long_cmpxchg_relaxed(atomic_long_t * v,long old,long new)1650 atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
1651 {
1652 	instrument_atomic_read_write(v, sizeof(*v));
1653 	return arch_atomic_long_cmpxchg_relaxed(v, old, new);
1654 }
1655 
1656 static __always_inline bool
atomic_long_try_cmpxchg(atomic_long_t * v,long * old,long new)1657 atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
1658 {
1659 	instrument_atomic_read_write(v, sizeof(*v));
1660 	instrument_atomic_read_write(old, sizeof(*old));
1661 	return arch_atomic_long_try_cmpxchg(v, old, new);
1662 }
1663 
1664 static __always_inline bool
atomic_long_try_cmpxchg_acquire(atomic_long_t * v,long * old,long new)1665 atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
1666 {
1667 	instrument_atomic_read_write(v, sizeof(*v));
1668 	instrument_atomic_read_write(old, sizeof(*old));
1669 	return arch_atomic_long_try_cmpxchg_acquire(v, old, new);
1670 }
1671 
1672 static __always_inline bool
atomic_long_try_cmpxchg_release(atomic_long_t * v,long * old,long new)1673 atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
1674 {
1675 	instrument_atomic_read_write(v, sizeof(*v));
1676 	instrument_atomic_read_write(old, sizeof(*old));
1677 	return arch_atomic_long_try_cmpxchg_release(v, old, new);
1678 }
1679 
1680 static __always_inline bool
atomic_long_try_cmpxchg_relaxed(atomic_long_t * v,long * old,long new)1681 atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
1682 {
1683 	instrument_atomic_read_write(v, sizeof(*v));
1684 	instrument_atomic_read_write(old, sizeof(*old));
1685 	return arch_atomic_long_try_cmpxchg_relaxed(v, old, new);
1686 }
1687 
1688 static __always_inline bool
atomic_long_sub_and_test(long i,atomic_long_t * v)1689 atomic_long_sub_and_test(long i, atomic_long_t *v)
1690 {
1691 	instrument_atomic_read_write(v, sizeof(*v));
1692 	return arch_atomic_long_sub_and_test(i, v);
1693 }
1694 
1695 static __always_inline bool
atomic_long_dec_and_test(atomic_long_t * v)1696 atomic_long_dec_and_test(atomic_long_t *v)
1697 {
1698 	instrument_atomic_read_write(v, sizeof(*v));
1699 	return arch_atomic_long_dec_and_test(v);
1700 }
1701 
1702 static __always_inline bool
atomic_long_inc_and_test(atomic_long_t * v)1703 atomic_long_inc_and_test(atomic_long_t *v)
1704 {
1705 	instrument_atomic_read_write(v, sizeof(*v));
1706 	return arch_atomic_long_inc_and_test(v);
1707 }
1708 
1709 static __always_inline bool
atomic_long_add_negative(long i,atomic_long_t * v)1710 atomic_long_add_negative(long i, atomic_long_t *v)
1711 {
1712 	instrument_atomic_read_write(v, sizeof(*v));
1713 	return arch_atomic_long_add_negative(i, v);
1714 }
1715 
1716 static __always_inline long
atomic_long_fetch_add_unless(atomic_long_t * v,long a,long u)1717 atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
1718 {
1719 	instrument_atomic_read_write(v, sizeof(*v));
1720 	return arch_atomic_long_fetch_add_unless(v, a, u);
1721 }
1722 
1723 static __always_inline bool
atomic_long_add_unless(atomic_long_t * v,long a,long u)1724 atomic_long_add_unless(atomic_long_t *v, long a, long u)
1725 {
1726 	instrument_atomic_read_write(v, sizeof(*v));
1727 	return arch_atomic_long_add_unless(v, a, u);
1728 }
1729 
1730 static __always_inline bool
atomic_long_inc_not_zero(atomic_long_t * v)1731 atomic_long_inc_not_zero(atomic_long_t *v)
1732 {
1733 	instrument_atomic_read_write(v, sizeof(*v));
1734 	return arch_atomic_long_inc_not_zero(v);
1735 }
1736 
1737 static __always_inline bool
atomic_long_inc_unless_negative(atomic_long_t * v)1738 atomic_long_inc_unless_negative(atomic_long_t *v)
1739 {
1740 	instrument_atomic_read_write(v, sizeof(*v));
1741 	return arch_atomic_long_inc_unless_negative(v);
1742 }
1743 
1744 static __always_inline bool
atomic_long_dec_unless_positive(atomic_long_t * v)1745 atomic_long_dec_unless_positive(atomic_long_t *v)
1746 {
1747 	instrument_atomic_read_write(v, sizeof(*v));
1748 	return arch_atomic_long_dec_unless_positive(v);
1749 }
1750 
1751 static __always_inline long
atomic_long_dec_if_positive(atomic_long_t * v)1752 atomic_long_dec_if_positive(atomic_long_t *v)
1753 {
1754 	instrument_atomic_read_write(v, sizeof(*v));
1755 	return arch_atomic_long_dec_if_positive(v);
1756 }
1757 
1758 #define xchg(ptr, ...) \
1759 ({ \
1760 	typeof(ptr) __ai_ptr = (ptr); \
1761 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1762 	arch_xchg(__ai_ptr, __VA_ARGS__); \
1763 })
1764 
1765 #define xchg_acquire(ptr, ...) \
1766 ({ \
1767 	typeof(ptr) __ai_ptr = (ptr); \
1768 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1769 	arch_xchg_acquire(__ai_ptr, __VA_ARGS__); \
1770 })
1771 
1772 #define xchg_release(ptr, ...) \
1773 ({ \
1774 	typeof(ptr) __ai_ptr = (ptr); \
1775 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1776 	arch_xchg_release(__ai_ptr, __VA_ARGS__); \
1777 })
1778 
1779 #define xchg_relaxed(ptr, ...) \
1780 ({ \
1781 	typeof(ptr) __ai_ptr = (ptr); \
1782 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1783 	arch_xchg_relaxed(__ai_ptr, __VA_ARGS__); \
1784 })
1785 
1786 #define cmpxchg(ptr, ...) \
1787 ({ \
1788 	typeof(ptr) __ai_ptr = (ptr); \
1789 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1790 	arch_cmpxchg(__ai_ptr, __VA_ARGS__); \
1791 })
1792 
1793 #define cmpxchg_acquire(ptr, ...) \
1794 ({ \
1795 	typeof(ptr) __ai_ptr = (ptr); \
1796 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1797 	arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \
1798 })
1799 
1800 #define cmpxchg_release(ptr, ...) \
1801 ({ \
1802 	typeof(ptr) __ai_ptr = (ptr); \
1803 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1804 	arch_cmpxchg_release(__ai_ptr, __VA_ARGS__); \
1805 })
1806 
1807 #define cmpxchg_relaxed(ptr, ...) \
1808 ({ \
1809 	typeof(ptr) __ai_ptr = (ptr); \
1810 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1811 	arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \
1812 })
1813 
1814 #define cmpxchg64(ptr, ...) \
1815 ({ \
1816 	typeof(ptr) __ai_ptr = (ptr); \
1817 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1818 	arch_cmpxchg64(__ai_ptr, __VA_ARGS__); \
1819 })
1820 
1821 #define cmpxchg64_acquire(ptr, ...) \
1822 ({ \
1823 	typeof(ptr) __ai_ptr = (ptr); \
1824 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1825 	arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \
1826 })
1827 
1828 #define cmpxchg64_release(ptr, ...) \
1829 ({ \
1830 	typeof(ptr) __ai_ptr = (ptr); \
1831 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1832 	arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \
1833 })
1834 
1835 #define cmpxchg64_relaxed(ptr, ...) \
1836 ({ \
1837 	typeof(ptr) __ai_ptr = (ptr); \
1838 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1839 	arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \
1840 })
1841 
1842 #define try_cmpxchg(ptr, oldp, ...) \
1843 ({ \
1844 	typeof(ptr) __ai_ptr = (ptr); \
1845 	typeof(oldp) __ai_oldp = (oldp); \
1846 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1847 	instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1848 	arch_try_cmpxchg(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1849 })
1850 
1851 #define try_cmpxchg_acquire(ptr, oldp, ...) \
1852 ({ \
1853 	typeof(ptr) __ai_ptr = (ptr); \
1854 	typeof(oldp) __ai_oldp = (oldp); \
1855 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1856 	instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1857 	arch_try_cmpxchg_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1858 })
1859 
1860 #define try_cmpxchg_release(ptr, oldp, ...) \
1861 ({ \
1862 	typeof(ptr) __ai_ptr = (ptr); \
1863 	typeof(oldp) __ai_oldp = (oldp); \
1864 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1865 	instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1866 	arch_try_cmpxchg_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1867 })
1868 
1869 #define try_cmpxchg_relaxed(ptr, oldp, ...) \
1870 ({ \
1871 	typeof(ptr) __ai_ptr = (ptr); \
1872 	typeof(oldp) __ai_oldp = (oldp); \
1873 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1874 	instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1875 	arch_try_cmpxchg_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1876 })
1877 
1878 #define cmpxchg_local(ptr, ...) \
1879 ({ \
1880 	typeof(ptr) __ai_ptr = (ptr); \
1881 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1882 	arch_cmpxchg_local(__ai_ptr, __VA_ARGS__); \
1883 })
1884 
1885 #define cmpxchg64_local(ptr, ...) \
1886 ({ \
1887 	typeof(ptr) __ai_ptr = (ptr); \
1888 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1889 	arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \
1890 })
1891 
1892 #define sync_cmpxchg(ptr, ...) \
1893 ({ \
1894 	typeof(ptr) __ai_ptr = (ptr); \
1895 	instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1896 	arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \
1897 })
1898 
1899 #define cmpxchg_double(ptr, ...) \
1900 ({ \
1901 	typeof(ptr) __ai_ptr = (ptr); \
1902 	instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1903 	arch_cmpxchg_double(__ai_ptr, __VA_ARGS__); \
1904 })
1905 
1906 
1907 #define cmpxchg_double_local(ptr, ...) \
1908 ({ \
1909 	typeof(ptr) __ai_ptr = (ptr); \
1910 	instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1911 	arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__); \
1912 })
1913 
1914 #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */
1915 // 2a9553f0a9d5619f19151092df5cabbbf16ce835
1916