/arch/sh/include/asm/ |
D | atomic.h | 39 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) macro 57 old = atomic_cmpxchg((v), c, c + (a)); in __atomic_add_unless()
|
/arch/m68k/include/asm/ |
D | atomic.h | 120 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) macro 125 static inline int atomic_cmpxchg(atomic_t *v, int old, int new) in atomic_cmpxchg() function 190 old = atomic_cmpxchg((v), c, c + (a)); in __atomic_add_unless()
|
/arch/sh/kernel/ |
D | ftrace.c | 122 old = atomic_cmpxchg(&nmi_running, old, new); in clear_mod_flag() 162 if (!atomic_cmpxchg(&nmi_running, 0, MOD_CODE_WRITE_FLAG)) in wait_for_nmi_and_set_mod_flag() 167 } while (atomic_cmpxchg(&nmi_running, 0, MOD_CODE_WRITE_FLAG)); in wait_for_nmi_and_set_mod_flag()
|
/arch/x86/include/asm/ |
D | atomic.h | 175 static inline int atomic_cmpxchg(atomic_t *v, int old, int new) in atomic_cmpxchg() function 201 old = atomic_cmpxchg((v), c, c + (a)); in __atomic_add_unless()
|
D | mutex_32.h | 104 if (likely(atomic_cmpxchg(count, 1, 0) == 1)) in __mutex_fastpath_trylock()
|
D | mutex_64.h | 121 if (likely(atomic_cmpxchg(count, 1, 0) == 1)) in __mutex_fastpath_trylock()
|
/arch/mn10300/include/asm/ |
D | atomic.h | 122 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ 128 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) macro
|
/arch/sparc/include/asm/ |
D | atomic_64.h | 72 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) in ATOMIC_OPS() macro 82 old = atomic_cmpxchg((v), c, c + (a)); in ATOMIC_OPS()
|
D | atomic_32.h | 24 int atomic_cmpxchg(atomic_t *, int, int);
|
/arch/xtensa/include/asm/ |
D | atomic.h | 226 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) in ATOMIC_OPS() macro 245 old = atomic_cmpxchg((v), c, c + (a)); in ATOMIC_OPS()
|
/arch/m32r/include/asm/ |
D | atomic.h | 215 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) macro 234 old = atomic_cmpxchg((v), c, c + (a)); in __atomic_add_unless()
|
/arch/frv/include/asm/ |
D | atomic.h | 179 #define atomic_cmpxchg(v, old, new) (cmpxchg(&(v)->counter, old, new)) macro 191 old = atomic_cmpxchg((v), c, c + (a)); in __atomic_add_unless()
|
/arch/sparc/lib/ |
D | atomic32.c | 61 int atomic_cmpxchg(atomic_t *v, int old, int new) in atomic_cmpxchg() function 74 EXPORT_SYMBOL(atomic_cmpxchg);
|
/arch/arm64/include/asm/ |
D | atomic.h | 92 static inline int atomic_cmpxchg(atomic_t *ptr, int old, int new) in ATOMIC_OPS() 121 while (c != u && (old = atomic_cmpxchg((v), c, c + a)) != c) in __atomic_add_unless()
|
/arch/arc/include/asm/ |
D | cmpxchg.h | 81 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) macro
|
D | atomic.h | 164 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c)\
|
/arch/tile/lib/ |
D | spinlock_64.c | 28 return atomic_cmpxchg((atomic_t *)lock, -1, -1); in arch_spin_read_noalloc()
|
/arch/arm/include/asm/ |
D | atomic.h | 83 static inline int atomic_cmpxchg(atomic_t *ptr, int old, int new) in atomic_cmpxchg() function 164 static inline int atomic_cmpxchg(atomic_t *v, int old, int new) in atomic_cmpxchg() function 183 while (c != u && (old = atomic_cmpxchg((v), c, c + a)) != c) in __atomic_add_unless()
|
/arch/tile/include/asm/ |
D | atomic.h | 137 static inline int atomic_cmpxchg(atomic_t *v, int o, int n) in atomic_cmpxchg() function
|
/arch/ia64/include/asm/ |
D | atomic.h | 118 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) macro 132 old = atomic_cmpxchg((v), c, c + (a)); in __atomic_add_unless()
|
/arch/hexagon/include/asm/ |
D | atomic.h | 78 static inline int atomic_cmpxchg(atomic_t *v, int old, int new) in atomic_cmpxchg() function
|
/arch/metag/include/asm/ |
D | atomic_lnkget.h | 115 static inline int atomic_cmpxchg(atomic_t *v, int old, int new) in atomic_cmpxchg() function
|
D | atomic_lock1.h | 96 static inline int atomic_cmpxchg(atomic_t *v, int old, int new) in atomic_cmpxchg() function
|
/arch/parisc/include/asm/ |
D | atomic.h | 74 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) macro 93 old = atomic_cmpxchg((v), c, c + (a)); in __atomic_add_unless()
|
/arch/avr32/include/asm/ |
D | atomic.h | 170 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) macro
|