/arch/arc/include/asm/ |
D | spinlock.h | 32 smp_mb(); in arch_spin_lock() 45 smp_mb(); in arch_spin_lock() 53 smp_mb(); in arch_spin_trylock() 69 smp_mb(); in arch_spin_trylock() 76 smp_mb(); in arch_spin_unlock() 80 smp_mb(); in arch_spin_unlock() 92 smp_mb(); in arch_read_lock() 116 smp_mb(); in arch_read_lock() 124 smp_mb(); in arch_read_trylock() 142 smp_mb(); in arch_read_trylock() [all …]
|
D | futex.h | 23 smp_mb(); \ 45 smp_mb() \ 51 smp_mb(); \ 72 smp_mb() \ 135 smp_mb(); in futex_atomic_cmpxchg_inatomic() 162 smp_mb(); in futex_atomic_cmpxchg_inatomic()
|
D | cmpxchg.h | 28 smp_mb(); in __cmpxchg() 42 smp_mb(); in __cmpxchg() 94 smp_mb(); in __xchg() 102 smp_mb(); in __xchg()
|
D | atomic.h | 85 smp_mb(); \ 100 smp_mb(); \ 207 smp_mb(); \ 213 smp_mb(); \
|
/arch/metag/include/asm/ |
D | spinlock_lnkget.h | 41 smp_mb(); in arch_spin_lock() 63 smp_mb(); in arch_spin_trylock() 70 smp_mb(); in arch_spin_unlock() 103 smp_mb(); in arch_write_lock() 124 smp_mb(); in arch_write_trylock() 131 smp_mb(); in arch_write_unlock() 182 smp_mb(); in arch_read_lock() 189 smp_mb(); in arch_read_unlock() 222 smp_mb(); in arch_read_trylock()
|
D | cmpxchg_lnkget.h | 8 smp_mb(); in xchg_u32() 25 smp_mb(); in xchg_u32() 34 smp_mb(); in xchg_u8() 51 smp_mb(); in xchg_u8() 61 smp_mb(); in __cmpxchg_u32() 81 smp_mb(); in __cmpxchg_u32()
|
D | barrier.h | 52 #define smp_mb() barrier() macro 73 #define smp_mb() fence() macro 78 #define smp_mb() barrier() macro 87 #define smp_store_mb(var, value) do { WRITE_ONCE(var, value); smp_mb(); } while (0) 92 smp_mb(); \ 100 smp_mb(); \
|
D | atomic_lnkget.h | 53 smp_mb(); \ 67 smp_mb(); \ 89 smp_mb(); in ATOMIC_OPS() 105 smp_mb(); in ATOMIC_OPS() 132 smp_mb(); in __atomic_add_unless() 149 smp_mb(); in __atomic_add_unless()
|
/arch/tile/include/asm/ |
D | cmpxchg.h | 47 smp_mb(); \ 55 smp_mb(); \ 64 smp_mb(); \ 73 smp_mb(); \ 83 smp_mb(); \ 98 smp_mb(); \ 106 smp_mb(); \ 121 smp_mb(); \
|
D | barrier.h | 82 #define smp_mb__before_atomic() smp_mb() 85 #define smp_mb__before_atomic() smp_mb() 86 #define smp_mb__after_atomic() smp_mb()
|
D | bitops_32.h | 87 smp_mb(); /* barrier for proper semantics */ in test_and_set_bit() 103 smp_mb(); /* barrier for proper semantics */ in test_and_clear_bit() 120 smp_mb(); /* barrier for proper semantics */ in test_and_change_bit()
|
D | atomic_32.h | 59 smp_mb(); /* barrier for proper semantics */ in ATOMIC_OP() 74 smp_mb(); /* barrier for proper semantics */ in __atomic_add_unless() 149 smp_mb(); /* barrier for proper semantics */ in ATOMIC64_OP() 165 smp_mb(); /* barrier for proper semantics */ in atomic64_add_unless()
|
/arch/arm/include/asm/ |
D | barrier.h | 79 #define smp_mb() barrier() macro 83 #define smp_mb() dmb(ish) macro 84 #define smp_rmb() smp_mb() 91 smp_mb(); \ 99 smp_mb(); \ 106 #define smp_store_mb(var, value) do { WRITE_ONCE(var, value); smp_mb(); } while (0) 108 #define smp_mb__before_atomic() smp_mb() 109 #define smp_mb__after_atomic() smp_mb()
|
D | spinlock.h | 80 smp_mb(); in arch_spin_lock() 102 smp_mb(); in arch_spin_trylock() 111 smp_mb(); in arch_spin_unlock() 157 smp_mb(); in arch_write_lock() 177 smp_mb(); in arch_write_trylock() 186 smp_mb(); in arch_write_unlock() 228 smp_mb(); in arch_read_lock() 235 smp_mb(); in arch_read_unlock() 270 smp_mb(); in arch_read_trylock()
|
/arch/alpha/include/asm/ |
D | xchg.h | 25 smp_mb(); in ____xchg() 50 smp_mb(); in ____xchg() 75 smp_mb(); in ____xchg() 96 smp_mb(); in ____xchg() 151 smp_mb(); in ____cmpxchg() 179 smp_mb(); in ____cmpxchg() 207 smp_mb(); in ____cmpxchg() 231 smp_mb(); in ____cmpxchg()
|
D | atomic.h | 52 smp_mb(); \ 64 smp_mb(); \ 88 smp_mb(); \ 100 smp_mb(); \ 149 smp_mb(); in ATOMIC_OPS() 164 smp_mb(); in ATOMIC_OPS() 181 smp_mb(); in atomic64_add_unless() 196 smp_mb(); in atomic64_add_unless() 210 smp_mb(); in atomic64_dec_if_positive() 224 smp_mb(); in atomic64_dec_if_positive()
|
/arch/mips/include/asm/ |
D | barrier.h | 95 # define smp_mb() __sync() macro 99 # define smp_mb() __asm__ __volatile__("sync" : : :"memory") macro 104 #define smp_mb() barrier() macro 116 do { WRITE_ONCE(var, value); smp_mb(); } while (0) 135 smp_mb(); \ 143 smp_mb(); \
|
/arch/ia64/include/asm/ |
D | barrier.h | 46 # define smp_mb() mb() macro 48 # define smp_mb() barrier() macro 51 #define smp_rmb() smp_mb() 52 #define smp_wmb() smp_mb()
|
/arch/powerpc/include/asm/ |
D | barrier.h | 55 #define smp_mb() mb() macro 61 #define smp_mb() barrier() macro 93 #define smp_mb__before_atomic() smp_mb() 94 #define smp_mb__after_atomic() smp_mb() 95 #define smp_mb__before_spinlock() smp_mb()
|
/arch/x86/include/asm/ |
D | barrier.h | 63 #define smp_mb() mb() macro 68 #define smp_mb() barrier() macro 87 smp_mb(); \ 95 smp_mb(); \
|
/arch/arm64/include/asm/ |
D | barrier.h | 41 #define smp_mb() dmb(ish) macro 110 #define smp_store_mb(var, value) do { WRITE_ONCE(var, value); smp_mb(); } while (0) 113 #define smp_mb__before_atomic() smp_mb() 114 #define smp_mb__after_atomic() smp_mb()
|
/arch/s390/include/asm/ |
D | barrier.h | 29 #define smp_mb() mb() macro 36 #define smp_mb__before_atomic() smp_mb() 37 #define smp_mb__after_atomic() smp_mb()
|
/arch/sh/kernel/ |
D | ftrace.c | 150 smp_mb(); in ftrace_nmi_enter() 156 smp_mb(); in ftrace_nmi_exit() 191 smp_mb(); in do_ftrace_mod_code() 196 smp_mb(); in do_ftrace_mod_code() 201 smp_mb(); in do_ftrace_mod_code()
|
/arch/sparc/include/asm/ |
D | barrier_64.h | 47 #define smp_mb() mb() macro 51 #define smp_mb() __asm__ __volatile__("":::"memory") macro
|
/arch/powerpc/lib/ |
D | locks.c | 74 smp_mb(); in arch_spin_unlock_wait() 83 smp_mb(); in arch_spin_unlock_wait()
|