1 #ifndef foopulseatomichfoo
2 #define foopulseatomichfoo
3
4 /***
5 This file is part of PulseAudio.
6
7 Copyright 2006-2008 Lennart Poettering
8 Copyright 2008 Nokia Corporation
9
10 PulseAudio is free software; you can redistribute it and/or modify
11 it under the terms of the GNU Lesser General Public License as
12 published by the Free Software Foundation; either version 2.1 of the
13 License, or (at your option) any later version.
14
15 PulseAudio is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20 You should have received a copy of the GNU Lesser General Public
21 License along with PulseAudio; if not, see <http://www.gnu.org/licenses/>.
22 ***/
23
24 #include <pulsecore/macro.h>
25
26 /*
27 * atomic_ops guarantees us that sizeof(AO_t) == sizeof(void*). It is
28 * not guaranteed however, that sizeof(AO_t) == sizeof(size_t).
29 * however very likely.
30 *
31 * For now we do only full memory barriers. Eventually we might want
32 * to support more elaborate memory barriers, in which case we will add
33 * suffixes to the function names.
34 *
35 * On gcc >= 4.1 we use the builtin atomic functions. otherwise we use
36 * libatomic_ops
37 */
38
39 #ifndef PACKAGE
40 #error "Please include config.h before including this file!"
41 #endif
42
43 #ifdef HAVE_ATOMIC_BUILTINS
44
45 /* __sync based implementation */
46
47 typedef struct pa_atomic {
48 volatile int value;
49 } pa_atomic_t;
50
51 #define PA_ATOMIC_INIT(v) { .value = (v) }
52
53 #ifdef HAVE_ATOMIC_BUILTINS_MEMORY_MODEL
54
55 /* __atomic based implementation */
56
pa_atomic_load(const pa_atomic_t * a)57 static inline int pa_atomic_load(const pa_atomic_t *a) {
58 return __atomic_load_n(&a->value, __ATOMIC_SEQ_CST);
59 }
60
pa_atomic_store(pa_atomic_t * a,int i)61 static inline void pa_atomic_store(pa_atomic_t *a, int i) {
62 __atomic_store_n(&a->value, i, __ATOMIC_SEQ_CST);
63 }
64
65 #else
66
pa_atomic_load(const pa_atomic_t * a)67 static inline int pa_atomic_load(const pa_atomic_t *a) {
68 __sync_synchronize();
69 return a->value;
70 }
71
pa_atomic_store(pa_atomic_t * a,int i)72 static inline void pa_atomic_store(pa_atomic_t *a, int i) {
73 a->value = i;
74 __sync_synchronize();
75 }
76
77 #endif
78
79
80 /* Returns the previously set value */
pa_atomic_add(pa_atomic_t * a,int i)81 static inline int pa_atomic_add(pa_atomic_t *a, int i) {
82 return __sync_fetch_and_add(&a->value, i);
83 }
84
85 /* Returns the previously set value */
pa_atomic_sub(pa_atomic_t * a,int i)86 static inline int pa_atomic_sub(pa_atomic_t *a, int i) {
87 return __sync_fetch_and_sub(&a->value, i);
88 }
89
90 /* Returns the previously set value */
pa_atomic_inc(pa_atomic_t * a)91 static inline int pa_atomic_inc(pa_atomic_t *a) {
92 return pa_atomic_add(a, 1);
93 }
94
95 /* Returns the previously set value */
pa_atomic_dec(pa_atomic_t * a)96 static inline int pa_atomic_dec(pa_atomic_t *a) {
97 return pa_atomic_sub(a, 1);
98 }
99
100 /* Returns true when the operation was successful. */
pa_atomic_cmpxchg(pa_atomic_t * a,int old_i,int new_i)101 static inline bool pa_atomic_cmpxchg(pa_atomic_t *a, int old_i, int new_i) {
102 return __sync_bool_compare_and_swap(&a->value, old_i, new_i);
103 }
104
105 typedef struct pa_atomic_ptr {
106 volatile unsigned long value;
107 } pa_atomic_ptr_t;
108
109 #define PA_ATOMIC_PTR_INIT(v) { .value = (long) (v) }
110
111 #ifdef HAVE_ATOMIC_BUILTINS_MEMORY_MODEL
112
113 /* __atomic based implementation */
114
pa_atomic_ptr_load(const pa_atomic_ptr_t * a)115 static inline void* pa_atomic_ptr_load(const pa_atomic_ptr_t *a) {
116 return (void*) __atomic_load_n(&a->value, __ATOMIC_SEQ_CST);
117 }
118
pa_atomic_ptr_store(pa_atomic_ptr_t * a,void * p)119 static inline void pa_atomic_ptr_store(pa_atomic_ptr_t *a, void* p) {
120 __atomic_store_n(&a->value, (unsigned long) p, __ATOMIC_SEQ_CST);
121 }
122
123 #else
124
pa_atomic_ptr_load(const pa_atomic_ptr_t * a)125 static inline void* pa_atomic_ptr_load(const pa_atomic_ptr_t *a) {
126 __sync_synchronize();
127 return (void*) a->value;
128 }
129
pa_atomic_ptr_store(pa_atomic_ptr_t * a,void * p)130 static inline void pa_atomic_ptr_store(pa_atomic_ptr_t *a, void *p) {
131 a->value = (unsigned long) p;
132 __sync_synchronize();
133 }
134
135 #endif
136
pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t * a,void * old_p,void * new_p)137 static inline bool pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t *a, void *old_p, void* new_p) {
138 return __sync_bool_compare_and_swap(&a->value, (long) old_p, (long) new_p);
139 }
140
141 #elif defined(__NetBSD__) && defined(HAVE_SYS_ATOMIC_H)
142
143 /* NetBSD 5.0+ atomic_ops(3) implementation */
144
145 #include <sys/atomic.h>
146
147 typedef struct pa_atomic {
148 volatile unsigned int value;
149 } pa_atomic_t;
150
151 #define PA_ATOMIC_INIT(v) { .value = (unsigned int) (v) }
152
pa_atomic_load(const pa_atomic_t * a)153 static inline int pa_atomic_load(const pa_atomic_t *a) {
154 membar_sync();
155 return (int) a->value;
156 }
157
pa_atomic_store(pa_atomic_t * a,int i)158 static inline void pa_atomic_store(pa_atomic_t *a, int i) {
159 a->value = (unsigned int) i;
160 membar_sync();
161 }
162
163 /* Returns the previously set value */
pa_atomic_add(pa_atomic_t * a,int i)164 static inline int pa_atomic_add(pa_atomic_t *a, int i) {
165 int nv = (int) atomic_add_int_nv(&a->value, i);
166 return nv - i;
167 }
168
169 /* Returns the previously set value */
pa_atomic_sub(pa_atomic_t * a,int i)170 static inline int pa_atomic_sub(pa_atomic_t *a, int i) {
171 int nv = (int) atomic_add_int_nv(&a->value, -i);
172 return nv + i;
173 }
174
175 /* Returns the previously set value */
pa_atomic_inc(pa_atomic_t * a)176 static inline int pa_atomic_inc(pa_atomic_t *a) {
177 int nv = (int) atomic_inc_uint_nv(&a->value);
178 return nv - 1;
179 }
180
181 /* Returns the previously set value */
pa_atomic_dec(pa_atomic_t * a)182 static inline int pa_atomic_dec(pa_atomic_t *a) {
183 int nv = (int) atomic_dec_uint_nv(&a->value);
184 return nv + 1;
185 }
186
187 /* Returns true when the operation was successful. */
pa_atomic_cmpxchg(pa_atomic_t * a,int old_i,int new_i)188 static inline bool pa_atomic_cmpxchg(pa_atomic_t *a, int old_i, int new_i) {
189 unsigned int r = atomic_cas_uint(&a->value, (unsigned int) old_i, (unsigned int) new_i);
190 return (int) r == old_i;
191 }
192
193 typedef struct pa_atomic_ptr {
194 volatile void *value;
195 } pa_atomic_ptr_t;
196
197 #define PA_ATOMIC_PTR_INIT(v) { .value = (v) }
198
pa_atomic_ptr_load(const pa_atomic_ptr_t * a)199 static inline void* pa_atomic_ptr_load(const pa_atomic_ptr_t *a) {
200 membar_sync();
201 return (void *) a->value;
202 }
203
pa_atomic_ptr_store(pa_atomic_ptr_t * a,void * p)204 static inline void pa_atomic_ptr_store(pa_atomic_ptr_t *a, void *p) {
205 a->value = p;
206 membar_sync();
207 }
208
pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t * a,void * old_p,void * new_p)209 static inline bool pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t *a, void *old_p, void* new_p) {
210 void *r = atomic_cas_ptr(&a->value, old_p, new_p);
211 return r == old_p;
212 }
213
214 #elif defined(__FreeBSD__) || defined(__FreeBSD_kernel__)
215
216 #include <sys/cdefs.h>
217 #include <sys/types.h>
218 #include <sys/param.h>
219 #include <machine/atomic.h>
220
221 #if __FreeBSD_version < 600000
222 #if defined(__i386__) || defined(__amd64__)
223 #if defined(__amd64__)
224 #define atomic_load_acq_64 atomic_load_acq_long
225 #endif
atomic_fetchadd_int(volatile u_int * p,u_int v)226 static inline u_int atomic_fetchadd_int(volatile u_int *p, u_int v) {
227 __asm __volatile(
228 " " __XSTRING(MPLOCKED) " "
229 " xaddl %0, %1 ; "
230 "# atomic_fetchadd_int"
231 : "+r" (v),
232 "=m" (*p)
233 : "m" (*p));
234
235 return (v);
236 }
237 #elif defined(__sparc__) && defined(__arch64__)
238 #define atomic_load_acq_64 atomic_load_acq_long
239 #define atomic_fetchadd_int atomic_add_int
240 #elif defined(__ia64__)
241 #define atomic_load_acq_64 atomic_load_acq_long
242 static inline uint32_t
atomic_fetchadd_int(volatile uint32_t * p,uint32_t v)243 atomic_fetchadd_int(volatile uint32_t *p, uint32_t v) {
244 uint32_t value;
245
246 do {
247 value = *p;
248 } while (!atomic_cmpset_32(p, value, value + v));
249 return (value);
250 }
251 #endif
252 #endif
253
254 typedef struct pa_atomic {
255 volatile unsigned long value;
256 } pa_atomic_t;
257
258 #define PA_ATOMIC_INIT(v) { .value = (v) }
259
pa_atomic_load(const pa_atomic_t * a)260 static inline int pa_atomic_load(const pa_atomic_t *a) {
261 return (int) atomic_load_acq_int((unsigned int *) &a->value);
262 }
263
pa_atomic_store(pa_atomic_t * a,int i)264 static inline void pa_atomic_store(pa_atomic_t *a, int i) {
265 atomic_store_rel_int((unsigned int *) &a->value, i);
266 }
267
pa_atomic_add(pa_atomic_t * a,int i)268 static inline int pa_atomic_add(pa_atomic_t *a, int i) {
269 return atomic_fetchadd_int((unsigned int *) &a->value, i);
270 }
271
pa_atomic_sub(pa_atomic_t * a,int i)272 static inline int pa_atomic_sub(pa_atomic_t *a, int i) {
273 return atomic_fetchadd_int((unsigned int *) &a->value, -(i));
274 }
275
pa_atomic_inc(pa_atomic_t * a)276 static inline int pa_atomic_inc(pa_atomic_t *a) {
277 return atomic_fetchadd_int((unsigned int *) &a->value, 1);
278 }
279
pa_atomic_dec(pa_atomic_t * a)280 static inline int pa_atomic_dec(pa_atomic_t *a) {
281 return atomic_fetchadd_int((unsigned int *) &a->value, -1);
282 }
283
pa_atomic_cmpxchg(pa_atomic_t * a,int old_i,int new_i)284 static inline int pa_atomic_cmpxchg(pa_atomic_t *a, int old_i, int new_i) {
285 return atomic_cmpset_int((unsigned int *) &a->value, old_i, new_i);
286 }
287
288 typedef struct pa_atomic_ptr {
289 volatile unsigned long value;
290 } pa_atomic_ptr_t;
291
292 #define PA_ATOMIC_PTR_INIT(v) { .value = (unsigned long) (v) }
293
pa_atomic_ptr_load(const pa_atomic_ptr_t * a)294 static inline void* pa_atomic_ptr_load(const pa_atomic_ptr_t *a) {
295 #ifdef atomic_load_acq_64
296 return (void*) atomic_load_acq_ptr((unsigned long *) &a->value);
297 #else
298 return (void*) atomic_load_acq_ptr((unsigned int *) &a->value);
299 #endif
300 }
301
pa_atomic_ptr_store(pa_atomic_ptr_t * a,void * p)302 static inline void pa_atomic_ptr_store(pa_atomic_ptr_t *a, void *p) {
303 #ifdef atomic_load_acq_64
304 atomic_store_rel_ptr(&a->value, (unsigned long) p);
305 #else
306 atomic_store_rel_ptr((unsigned int *) &a->value, (unsigned int) p);
307 #endif
308 }
309
pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t * a,void * old_p,void * new_p)310 static inline int pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t *a, void *old_p, void* new_p) {
311 #ifdef atomic_load_acq_64
312 return atomic_cmpset_ptr(&a->value, (unsigned long) old_p, (unsigned long) new_p);
313 #else
314 return atomic_cmpset_ptr((unsigned int *) &a->value, (unsigned int) old_p, (unsigned int) new_p);
315 #endif
316 }
317
318 #elif defined(__GNUC__) && (defined(__amd64__) || defined(__x86_64__))
319
320 #warn "The native atomic operations implementation for AMD64 has not been tested thoroughly. libatomic_ops is known to not work properly on AMD64 and your gcc version is too old for the gcc-builtin atomic ops support. You have three options now: test the native atomic operations implementation for AMD64, fix libatomic_ops, or upgrade your GCC."
321
322 /* Adapted from glibc */
323
324 typedef struct pa_atomic {
325 volatile int value;
326 } pa_atomic_t;
327
328 #define PA_ATOMIC_INIT(v) { .value = (v) }
329
pa_atomic_load(const pa_atomic_t * a)330 static inline int pa_atomic_load(const pa_atomic_t *a) {
331 return a->value;
332 }
333
pa_atomic_store(pa_atomic_t * a,int i)334 static inline void pa_atomic_store(pa_atomic_t *a, int i) {
335 a->value = i;
336 }
337
pa_atomic_add(pa_atomic_t * a,int i)338 static inline int pa_atomic_add(pa_atomic_t *a, int i) {
339 int result;
340
341 __asm __volatile ("lock; xaddl %0, %1"
342 : "=r" (result), "=m" (a->value)
343 : "0" (i), "m" (a->value));
344
345 return result;
346 }
347
pa_atomic_sub(pa_atomic_t * a,int i)348 static inline int pa_atomic_sub(pa_atomic_t *a, int i) {
349 return pa_atomic_add(a, -i);
350 }
351
pa_atomic_inc(pa_atomic_t * a)352 static inline int pa_atomic_inc(pa_atomic_t *a) {
353 return pa_atomic_add(a, 1);
354 }
355
pa_atomic_dec(pa_atomic_t * a)356 static inline int pa_atomic_dec(pa_atomic_t *a) {
357 return pa_atomic_sub(a, 1);
358 }
359
pa_atomic_cmpxchg(pa_atomic_t * a,int old_i,int new_i)360 static inline bool pa_atomic_cmpxchg(pa_atomic_t *a, int old_i, int new_i) {
361 int result;
362
363 __asm__ __volatile__ ("lock; cmpxchgl %2, %1"
364 : "=a" (result), "=m" (a->value)
365 : "r" (new_i), "m" (a->value), "0" (old_i));
366
367 return result == old_i;
368 }
369
370 typedef struct pa_atomic_ptr {
371 volatile unsigned long value;
372 } pa_atomic_ptr_t;
373
374 #define PA_ATOMIC_PTR_INIT(v) { .value = (long) (v) }
375
pa_atomic_ptr_load(const pa_atomic_ptr_t * a)376 static inline void* pa_atomic_ptr_load(const pa_atomic_ptr_t *a) {
377 return (void*) a->value;
378 }
379
pa_atomic_ptr_store(pa_atomic_ptr_t * a,void * p)380 static inline void pa_atomic_ptr_store(pa_atomic_ptr_t *a, void *p) {
381 a->value = (unsigned long) p;
382 }
383
pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t * a,void * old_p,void * new_p)384 static inline bool pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t *a, void *old_p, void* new_p) {
385 void *result;
386
387 __asm__ __volatile__ ("lock; cmpxchgq %q2, %1"
388 : "=a" (result), "=m" (a->value)
389 : "r" (new_p), "m" (a->value), "0" (old_p));
390
391 return result == old_p;
392 }
393
394 #elif defined(ATOMIC_ARM_INLINE_ASM)
395
396 /*
397 These should only be enabled if we have ARMv6 or better.
398 */
399
400 typedef struct pa_atomic {
401 volatile int value;
402 } pa_atomic_t;
403
404 #define PA_ATOMIC_INIT(v) { .value = (v) }
405
pa_memory_barrier(void)406 static inline void pa_memory_barrier(void) {
407 #ifdef ATOMIC_ARM_MEMORY_BARRIER_ENABLED
408 asm volatile ("mcr p15, 0, r0, c7, c10, 5 @ dmb");
409 #endif
410 }
411
pa_atomic_load(const pa_atomic_t * a)412 static inline int pa_atomic_load(const pa_atomic_t *a) {
413 pa_memory_barrier();
414 return a->value;
415 }
416
pa_atomic_store(pa_atomic_t * a,int i)417 static inline void pa_atomic_store(pa_atomic_t *a, int i) {
418 a->value = i;
419 pa_memory_barrier();
420 }
421
422 /* Returns the previously set value */
pa_atomic_add(pa_atomic_t * a,int i)423 static inline int pa_atomic_add(pa_atomic_t *a, int i) {
424 unsigned long not_exclusive;
425 int new_val, old_val;
426
427 pa_memory_barrier();
428 do {
429 asm volatile ("ldrex %0, [%3]\n"
430 "add %2, %0, %4\n"
431 "strex %1, %2, [%3]\n"
432 : "=&r" (old_val), "=&r" (not_exclusive), "=&r" (new_val)
433 : "r" (&a->value), "Ir" (i)
434 : "cc");
435 } while(not_exclusive);
436 pa_memory_barrier();
437
438 return old_val;
439 }
440
441 /* Returns the previously set value */
pa_atomic_sub(pa_atomic_t * a,int i)442 static inline int pa_atomic_sub(pa_atomic_t *a, int i) {
443 unsigned long not_exclusive;
444 int new_val, old_val;
445
446 pa_memory_barrier();
447 do {
448 asm volatile ("ldrex %0, [%3]\n"
449 "sub %2, %0, %4\n"
450 "strex %1, %2, [%3]\n"
451 : "=&r" (old_val), "=&r" (not_exclusive), "=&r" (new_val)
452 : "r" (&a->value), "Ir" (i)
453 : "cc");
454 } while(not_exclusive);
455 pa_memory_barrier();
456
457 return old_val;
458 }
459
pa_atomic_inc(pa_atomic_t * a)460 static inline int pa_atomic_inc(pa_atomic_t *a) {
461 return pa_atomic_add(a, 1);
462 }
463
pa_atomic_dec(pa_atomic_t * a)464 static inline int pa_atomic_dec(pa_atomic_t *a) {
465 return pa_atomic_sub(a, 1);
466 }
467
pa_atomic_cmpxchg(pa_atomic_t * a,int old_i,int new_i)468 static inline bool pa_atomic_cmpxchg(pa_atomic_t *a, int old_i, int new_i) {
469 unsigned long not_equal, not_exclusive;
470
471 pa_memory_barrier();
472 do {
473 asm volatile ("ldrex %0, [%2]\n"
474 "subs %0, %0, %3\n"
475 "mov %1, %0\n"
476 "strexeq %0, %4, [%2]\n"
477 : "=&r" (not_exclusive), "=&r" (not_equal)
478 : "r" (&a->value), "Ir" (old_i), "r" (new_i)
479 : "cc");
480 } while(not_exclusive && !not_equal);
481 pa_memory_barrier();
482
483 return !not_equal;
484 }
485
486 typedef struct pa_atomic_ptr {
487 volatile unsigned long value;
488 } pa_atomic_ptr_t;
489
490 #define PA_ATOMIC_PTR_INIT(v) { .value = (long) (v) }
491
pa_atomic_ptr_load(const pa_atomic_ptr_t * a)492 static inline void* pa_atomic_ptr_load(const pa_atomic_ptr_t *a) {
493 pa_memory_barrier();
494 return (void*) a->value;
495 }
496
pa_atomic_ptr_store(pa_atomic_ptr_t * a,void * p)497 static inline void pa_atomic_ptr_store(pa_atomic_ptr_t *a, void *p) {
498 a->value = (unsigned long) p;
499 pa_memory_barrier();
500 }
501
pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t * a,void * old_p,void * new_p)502 static inline bool pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t *a, void *old_p, void* new_p) {
503 unsigned long not_equal, not_exclusive;
504
505 pa_memory_barrier();
506 do {
507 asm volatile ("ldrex %0, [%2]\n"
508 "subs %0, %0, %3\n"
509 "mov %1, %0\n"
510 "strexeq %0, %4, [%2]\n"
511 : "=&r" (not_exclusive), "=&r" (not_equal)
512 : "r" (&a->value), "Ir" (old_p), "r" (new_p)
513 : "cc");
514 } while(not_exclusive && !not_equal);
515 pa_memory_barrier();
516
517 return !not_equal;
518 }
519
520 #elif defined(ATOMIC_ARM_LINUX_HELPERS)
521
522 /* See file arch/arm/kernel/entry-armv.S in your kernel sources for more
523 information about these functions. The arm kernel helper functions first
524 appeared in 2.6.16.
525 Apply --disable-atomic-arm-linux-helpers flag to configure if you prefer
526 inline asm implementation or you have an obsolete Linux kernel.
527 */
528 /* Memory barrier */
529 typedef void (__kernel_dmb_t)(void);
530 #define __kernel_dmb (*(__kernel_dmb_t *)0xffff0fa0)
531
pa_memory_barrier(void)532 static inline void pa_memory_barrier(void) {
533 #ifndef ATOMIC_ARM_MEMORY_BARRIER_ENABLED
534 __kernel_dmb();
535 #endif
536 }
537
538 /* Atomic exchange (__kernel_cmpxchg_t contains memory barriers if needed) */
539 typedef int (__kernel_cmpxchg_t)(int oldval, int newval, volatile int *ptr);
540 #define __kernel_cmpxchg (*(__kernel_cmpxchg_t *)0xffff0fc0)
541
542 /* This is just to get rid of all warnings */
543 typedef int (__kernel_cmpxchg_u_t)(unsigned long oldval, unsigned long newval, volatile unsigned long *ptr);
544 #define __kernel_cmpxchg_u (*(__kernel_cmpxchg_u_t *)0xffff0fc0)
545
546 typedef struct pa_atomic {
547 volatile int value;
548 } pa_atomic_t;
549
550 #define PA_ATOMIC_INIT(v) { .value = (v) }
551
pa_atomic_load(const pa_atomic_t * a)552 static inline int pa_atomic_load(const pa_atomic_t *a) {
553 pa_memory_barrier();
554 return a->value;
555 }
556
pa_atomic_store(pa_atomic_t * a,int i)557 static inline void pa_atomic_store(pa_atomic_t *a, int i) {
558 a->value = i;
559 pa_memory_barrier();
560 }
561
562 /* Returns the previously set value */
pa_atomic_add(pa_atomic_t * a,int i)563 static inline int pa_atomic_add(pa_atomic_t *a, int i) {
564 int old_val;
565 do {
566 old_val = a->value;
567 } while(__kernel_cmpxchg(old_val, old_val + i, &a->value));
568 return old_val;
569 }
570
571 /* Returns the previously set value */
pa_atomic_sub(pa_atomic_t * a,int i)572 static inline int pa_atomic_sub(pa_atomic_t *a, int i) {
573 int old_val;
574 do {
575 old_val = a->value;
576 } while(__kernel_cmpxchg(old_val, old_val - i, &a->value));
577 return old_val;
578 }
579
580 /* Returns the previously set value */
pa_atomic_inc(pa_atomic_t * a)581 static inline int pa_atomic_inc(pa_atomic_t *a) {
582 return pa_atomic_add(a, 1);
583 }
584
585 /* Returns the previously set value */
pa_atomic_dec(pa_atomic_t * a)586 static inline int pa_atomic_dec(pa_atomic_t *a) {
587 return pa_atomic_sub(a, 1);
588 }
589
590 /* Returns true when the operation was successful. */
pa_atomic_cmpxchg(pa_atomic_t * a,int old_i,int new_i)591 static inline bool pa_atomic_cmpxchg(pa_atomic_t *a, int old_i, int new_i) {
592 bool failed;
593 do {
594 failed = !!__kernel_cmpxchg(old_i, new_i, &a->value);
595 } while(failed && a->value == old_i);
596 return !failed;
597 }
598
599 typedef struct pa_atomic_ptr {
600 volatile unsigned long value;
601 } pa_atomic_ptr_t;
602
603 #define PA_ATOMIC_PTR_INIT(v) { .value = (unsigned long) (v) }
604
pa_atomic_ptr_load(const pa_atomic_ptr_t * a)605 static inline void* pa_atomic_ptr_load(const pa_atomic_ptr_t *a) {
606 pa_memory_barrier();
607 return (void*) a->value;
608 }
609
pa_atomic_ptr_store(pa_atomic_ptr_t * a,void * p)610 static inline void pa_atomic_ptr_store(pa_atomic_ptr_t *a, void *p) {
611 a->value = (unsigned long) p;
612 pa_memory_barrier();
613 }
614
pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t * a,void * old_p,void * new_p)615 static inline bool pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t *a, void *old_p, void* new_p) {
616 bool failed;
617 do {
618 failed = !!__kernel_cmpxchg_u((unsigned long) old_p, (unsigned long) new_p, &a->value);
619 } while(failed && a->value == (unsigned long) old_p);
620 return !failed;
621 }
622
623 #else
624
625 /* libatomic_ops based implementation */
626
627 #include <atomic_ops.h>
628
629 typedef struct pa_atomic {
630 volatile AO_t value;
631 } pa_atomic_t;
632
633 #define PA_ATOMIC_INIT(v) { .value = (AO_t) (v) }
634
pa_atomic_load(const pa_atomic_t * a)635 static inline int pa_atomic_load(const pa_atomic_t *a) {
636 return (int) AO_load_full((AO_t*) &a->value);
637 }
638
pa_atomic_store(pa_atomic_t * a,int i)639 static inline void pa_atomic_store(pa_atomic_t *a, int i) {
640 AO_store_full(&a->value, (AO_t) i);
641 }
642
pa_atomic_add(pa_atomic_t * a,int i)643 static inline int pa_atomic_add(pa_atomic_t *a, int i) {
644 return (int) AO_fetch_and_add_full(&a->value, (AO_t) i);
645 }
646
pa_atomic_sub(pa_atomic_t * a,int i)647 static inline int pa_atomic_sub(pa_atomic_t *a, int i) {
648 return (int) AO_fetch_and_add_full(&a->value, (AO_t) -i);
649 }
650
pa_atomic_inc(pa_atomic_t * a)651 static inline int pa_atomic_inc(pa_atomic_t *a) {
652 return (int) AO_fetch_and_add1_full(&a->value);
653 }
654
pa_atomic_dec(pa_atomic_t * a)655 static inline int pa_atomic_dec(pa_atomic_t *a) {
656 return (int) AO_fetch_and_sub1_full(&a->value);
657 }
658
pa_atomic_cmpxchg(pa_atomic_t * a,int old_i,int new_i)659 static inline bool pa_atomic_cmpxchg(pa_atomic_t *a, int old_i, int new_i) {
660 return AO_compare_and_swap_full(&a->value, (unsigned long) old_i, (unsigned long) new_i);
661 }
662
663 typedef struct pa_atomic_ptr {
664 volatile AO_t value;
665 } pa_atomic_ptr_t;
666
667 #define PA_ATOMIC_PTR_INIT(v) { .value = (AO_t) (v) }
668
pa_atomic_ptr_load(const pa_atomic_ptr_t * a)669 static inline void* pa_atomic_ptr_load(const pa_atomic_ptr_t *a) {
670 return (void*) AO_load_full((AO_t*) &a->value);
671 }
672
pa_atomic_ptr_store(pa_atomic_ptr_t * a,void * p)673 static inline void pa_atomic_ptr_store(pa_atomic_ptr_t *a, void *p) {
674 AO_store_full(&a->value, (AO_t) p);
675 }
676
pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t * a,void * old_p,void * new_p)677 static inline bool pa_atomic_ptr_cmpxchg(pa_atomic_ptr_t *a, void *old_p, void* new_p) {
678 return AO_compare_and_swap_full(&a->value, (AO_t) old_p, (AO_t) new_p);
679 }
680
681 #endif
682
683 #endif
684