1 /*
2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved.
3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com)
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30 #ifndef Atomics_h
31 #define Atomics_h
32
33 #include "wtf/Assertions.h"
34 #include "wtf/CPU.h"
35
36 #include <stdint.h>
37
38 #if COMPILER(MSVC)
39 #include <windows.h>
40 #endif
41
42 #if defined(THREAD_SANITIZER)
43 #include <sanitizer/tsan_interface_atomic.h>
44 #endif
45
46 #if defined(ADDRESS_SANITIZER)
47 #include <sanitizer/asan_interface.h>
48 #endif
49
50 namespace WTF {
51
52 #if COMPILER(MSVC)
53
54 // atomicAdd returns the result of the addition.
atomicAdd(int volatile * addend,int increment)55 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment)
56 {
57 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), static_cast<long>(increment)) + increment;
58 }
59
60 // atomicSubtract returns the result of the subtraction.
atomicSubtract(int volatile * addend,int decrement)61 ALWAYS_INLINE int atomicSubtract(int volatile* addend, int decrement)
62 {
63 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), static_cast<long>(-decrement)) - decrement;
64 }
65
atomicIncrement(int volatile * addend)66 ALWAYS_INLINE int atomicIncrement(int volatile* addend) { return InterlockedIncrement(reinterpret_cast<long volatile*>(addend)); }
atomicDecrement(int volatile * addend)67 ALWAYS_INLINE int atomicDecrement(int volatile* addend) { return InterlockedDecrement(reinterpret_cast<long volatile*>(addend)); }
68
atomicIncrement(int64_t volatile * addend)69 ALWAYS_INLINE int64_t atomicIncrement(int64_t volatile* addend) { return InterlockedIncrement64(reinterpret_cast<long long volatile*>(addend)); }
atomicDecrement(int64_t volatile * addend)70 ALWAYS_INLINE int64_t atomicDecrement(int64_t volatile* addend) { return InterlockedDecrement64(reinterpret_cast<long long volatile*>(addend)); }
71
atomicTestAndSetToOne(int volatile * ptr)72 ALWAYS_INLINE int atomicTestAndSetToOne(int volatile* ptr)
73 {
74 int ret = InterlockedExchange(reinterpret_cast<long volatile*>(ptr), 1);
75 ASSERT(!ret || ret == 1);
76 return ret;
77 }
78
atomicSetOneToZero(int volatile * ptr)79 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr)
80 {
81 ASSERT(*ptr == 1);
82 InterlockedExchange(reinterpret_cast<long volatile*>(ptr), 0);
83 }
84
85 #else
86
87 // atomicAdd returns the result of the addition.
88 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment) { return __sync_add_and_fetch(addend, increment); }
89 // atomicSubtract returns the result of the subtraction.
90 ALWAYS_INLINE int atomicSubtract(int volatile* addend, int decrement) { return __sync_sub_and_fetch(addend, decrement); }
91
92 ALWAYS_INLINE int atomicIncrement(int volatile* addend) { return atomicAdd(addend, 1); }
93 ALWAYS_INLINE int atomicDecrement(int volatile* addend) { return atomicSubtract(addend, 1); }
94
95 ALWAYS_INLINE int64_t atomicIncrement(int64_t volatile* addend) { return __sync_add_and_fetch(addend, 1); }
96 ALWAYS_INLINE int64_t atomicDecrement(int64_t volatile* addend) { return __sync_sub_and_fetch(addend, 1); }
97
98 ALWAYS_INLINE int atomicTestAndSetToOne(int volatile* ptr)
99 {
100 int ret = __sync_lock_test_and_set(ptr, 1);
101 ASSERT(!ret || ret == 1);
102 return ret;
103 }
104
105 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr)
106 {
107 ASSERT(*ptr == 1);
108 __sync_lock_release(ptr);
109 }
110 #endif
111
112 #if defined(THREAD_SANITIZER)
113
releaseStore(volatile int * ptr,int value)114 ALWAYS_INLINE void releaseStore(volatile int* ptr, int value)
115 {
116 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release);
117 }
118
acquireLoad(volatile const int * ptr)119 ALWAYS_INLINE int acquireLoad(volatile const int* ptr)
120 {
121 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire);
122 }
123
releaseStore(volatile unsigned * ptr,unsigned value)124 ALWAYS_INLINE void releaseStore(volatile unsigned* ptr, unsigned value)
125 {
126 __tsan_atomic32_store(reinterpret_cast<volatile int*>(ptr), static_cast<int>(value), __tsan_memory_order_release);
127 }
128
acquireLoad(volatile const unsigned * ptr)129 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr)
130 {
131 return static_cast<unsigned>(__tsan_atomic32_load(reinterpret_cast<volatile const int*>(ptr), __tsan_memory_order_acquire));
132 }
133
134 #else
135
136 #if CPU(X86) || CPU(X86_64)
137 // Only compiler barrier is needed.
138 #if COMPILER(MSVC)
139 // Starting from Visual Studio 2005 compiler guarantees acquire and release
140 // semantics for operations on volatile variables. See MSDN entry for
141 // MemoryBarrier macro.
142 #define MEMORY_BARRIER()
143 #else
144 #define MEMORY_BARRIER() __asm__ __volatile__("" : : : "memory")
145 #endif
146 #elif CPU(ARM) && (OS(LINUX) || OS(ANDROID))
147 // On ARM __sync_synchronize generates dmb which is very expensive on single
148 // core devices which don't actually need it. Avoid the cost by calling into
149 // kuser_memory_barrier helper.
memoryBarrier()150 inline void memoryBarrier()
151 {
152 // Note: This is a function call, which is also an implicit compiler barrier.
153 typedef void (*KernelMemoryBarrierFunc)();
154 ((KernelMemoryBarrierFunc)0xffff0fa0)();
155 }
156 #define MEMORY_BARRIER() memoryBarrier()
157 #else
158 // Fallback to the compiler intrinsic on all other platforms.
159 #define MEMORY_BARRIER() __sync_synchronize()
160 #endif
161
releaseStore(volatile int * ptr,int value)162 ALWAYS_INLINE void releaseStore(volatile int* ptr, int value)
163 {
164 MEMORY_BARRIER();
165 *ptr = value;
166 }
167
acquireLoad(volatile const int * ptr)168 ALWAYS_INLINE int acquireLoad(volatile const int* ptr)
169 {
170 int value = *ptr;
171 MEMORY_BARRIER();
172 return value;
173 }
174
releaseStore(volatile unsigned * ptr,unsigned value)175 ALWAYS_INLINE void releaseStore(volatile unsigned* ptr, unsigned value)
176 {
177 MEMORY_BARRIER();
178 *ptr = value;
179 }
180
acquireLoad(volatile const unsigned * ptr)181 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr)
182 {
183 unsigned value = *ptr;
184 MEMORY_BARRIER();
185 return value;
186 }
187
188 #if defined(ADDRESS_SANITIZER)
189
190 // FIXME: See comment on NO_SANITIZE_ADDRESS in platform/heap/AddressSanitizer.h
191 #if !OS(WIN) || COMPILER(CLANG)
192 #define NO_SANITIZE_ADDRESS_ATOMICS __attribute__((no_sanitize_address))
193 #else
194 #define NO_SANITIZE_ADDRESS_ATOMICS
195 #endif
196
asanUnsafeReleaseStore(volatile unsigned * ptr,unsigned value)197 NO_SANITIZE_ADDRESS_ATOMICS ALWAYS_INLINE void asanUnsafeReleaseStore(volatile unsigned* ptr, unsigned value)
198 {
199 MEMORY_BARRIER();
200 *ptr = value;
201 }
202
asanUnsafeAcquireLoad(volatile const unsigned * ptr)203 NO_SANITIZE_ADDRESS_ATOMICS ALWAYS_INLINE unsigned asanUnsafeAcquireLoad(volatile const unsigned* ptr)
204 {
205 unsigned value = *ptr;
206 MEMORY_BARRIER();
207 return value;
208 }
209
210 #undef NO_SANITIZE_ADDRESS_ATOMICS
211
212 #endif // defined(ADDRESS_SANITIZER)
213
214 #undef MEMORY_BARRIER
215
216 #endif
217
218 #if !defined(ADDRESS_SANITIZER)
219
asanUnsafeReleaseStore(volatile unsigned * ptr,unsigned value)220 ALWAYS_INLINE void asanUnsafeReleaseStore(volatile unsigned* ptr, unsigned value)
221 {
222 releaseStore(ptr, value);
223 }
224
asanUnsafeAcquireLoad(volatile const unsigned * ptr)225 ALWAYS_INLINE unsigned asanUnsafeAcquireLoad(volatile const unsigned* ptr)
226 {
227 return acquireLoad(ptr);
228 }
229
230 #endif
231
232 } // namespace WTF
233
234 using WTF::atomicAdd;
235 using WTF::atomicSubtract;
236 using WTF::atomicDecrement;
237 using WTF::atomicIncrement;
238 using WTF::atomicTestAndSetToOne;
239 using WTF::atomicSetOneToZero;
240 using WTF::acquireLoad;
241 using WTF::releaseStore;
242
243 // These methods allow loading from and storing to poisoned memory. Only
244 // use these methods if you know what you are doing since they will
245 // silence use-after-poison errors from ASan.
246 using WTF::asanUnsafeAcquireLoad;
247 using WTF::asanUnsafeReleaseStore;
248
249 #endif // Atomics_h
250