1 /*
2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved.
3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com)
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30 #ifndef Atomics_h
31 #define Atomics_h
32
33 #include "wtf/Assertions.h"
34 #include "wtf/CPU.h"
35
36 #include <stdint.h>
37
38 #if COMPILER(MSVC)
39 #include <windows.h>
40 #endif
41
42 #if defined(THREAD_SANITIZER)
43 #include <sanitizer/tsan_interface_atomic.h>
44 #endif
45
46 namespace WTF {
47
48 #if COMPILER(MSVC)
49
50 // atomicAdd returns the result of the addition.
atomicAdd(int volatile * addend,int increment)51 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment)
52 {
53 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), static_cast<long>(increment)) + increment;
54 }
55
56 // atomicSubtract returns the result of the subtraction.
atomicSubtract(int volatile * addend,int decrement)57 ALWAYS_INLINE int atomicSubtract(int volatile* addend, int decrement)
58 {
59 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), static_cast<long>(-decrement)) - decrement;
60 }
61
atomicIncrement(int volatile * addend)62 ALWAYS_INLINE int atomicIncrement(int volatile* addend) { return InterlockedIncrement(reinterpret_cast<long volatile*>(addend)); }
atomicDecrement(int volatile * addend)63 ALWAYS_INLINE int atomicDecrement(int volatile* addend) { return InterlockedDecrement(reinterpret_cast<long volatile*>(addend)); }
64
atomicIncrement(int64_t volatile * addend)65 ALWAYS_INLINE int64_t atomicIncrement(int64_t volatile* addend) { return InterlockedIncrement64(reinterpret_cast<long long volatile*>(addend)); }
atomicDecrement(int64_t volatile * addend)66 ALWAYS_INLINE int64_t atomicDecrement(int64_t volatile* addend) { return InterlockedDecrement64(reinterpret_cast<long long volatile*>(addend)); }
67
atomicTestAndSetToOne(int volatile * ptr)68 ALWAYS_INLINE int atomicTestAndSetToOne(int volatile* ptr)
69 {
70 int ret = InterlockedExchange(reinterpret_cast<long volatile*>(ptr), 1);
71 ASSERT(!ret || ret == 1);
72 return ret;
73 }
74
atomicSetOneToZero(int volatile * ptr)75 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr)
76 {
77 ASSERT(*ptr == 1);
78 InterlockedExchange(reinterpret_cast<long volatile*>(ptr), 0);
79 }
80
81 #else
82
83 // atomicAdd returns the result of the addition.
84 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment) { return __sync_add_and_fetch(addend, increment); }
85 // atomicSubtract returns the result of the subtraction.
86 ALWAYS_INLINE int atomicSubtract(int volatile* addend, int decrement) { return __sync_sub_and_fetch(addend, decrement); }
87
88 ALWAYS_INLINE int atomicIncrement(int volatile* addend) { return atomicAdd(addend, 1); }
89 ALWAYS_INLINE int atomicDecrement(int volatile* addend) { return atomicSubtract(addend, 1); }
90
91 ALWAYS_INLINE int64_t atomicIncrement(int64_t volatile* addend) { return __sync_add_and_fetch(addend, 1); }
92 ALWAYS_INLINE int64_t atomicDecrement(int64_t volatile* addend) { return __sync_sub_and_fetch(addend, 1); }
93
94 ALWAYS_INLINE int atomicTestAndSetToOne(int volatile* ptr)
95 {
96 int ret = __sync_lock_test_and_set(ptr, 1);
97 ASSERT(!ret || ret == 1);
98 return ret;
99 }
100
101 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr)
102 {
103 ASSERT(*ptr == 1);
104 __sync_lock_release(ptr);
105 }
106 #endif
107
108 #if defined(THREAD_SANITIZER)
releaseStore(volatile int * ptr,int value)109 ALWAYS_INLINE void releaseStore(volatile int* ptr, int value)
110 {
111 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release);
112 }
113
acquireLoad(volatile const int * ptr)114 ALWAYS_INLINE int acquireLoad(volatile const int* ptr)
115 {
116 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire);
117 }
118 #else
119
120 #if CPU(X86) || CPU(X86_64)
121 // Only compiler barrier is needed.
122 #if COMPILER(MSVC)
123 // Starting from Visual Studio 2005 compiler guarantees acquire and release
124 // semantics for operations on volatile variables. See MSDN entry for
125 // MemoryBarrier macro.
126 #define MEMORY_BARRIER()
127 #else
128 #define MEMORY_BARRIER() __asm__ __volatile__("" : : : "memory")
129 #endif
130 #elif CPU(ARM) && (OS(LINUX) || OS(ANDROID))
131 // On ARM __sync_synchronize generates dmb which is very expensive on single
132 // core devices which don't actually need it. Avoid the cost by calling into
133 // kuser_memory_barrier helper.
memoryBarrier()134 inline void memoryBarrier()
135 {
136 // Note: This is a function call, which is also an implicit compiler barrier.
137 typedef void (*KernelMemoryBarrierFunc)();
138 ((KernelMemoryBarrierFunc)0xffff0fa0)();
139 }
140 #define MEMORY_BARRIER() memoryBarrier()
141 #else
142 // Fallback to the compiler intrinsic on all other platforms.
143 #define MEMORY_BARRIER() __sync_synchronize()
144 #endif
145
releaseStore(volatile int * ptr,int value)146 ALWAYS_INLINE void releaseStore(volatile int* ptr, int value)
147 {
148 MEMORY_BARRIER();
149 *ptr = value;
150 }
151
acquireLoad(volatile const int * ptr)152 ALWAYS_INLINE int acquireLoad(volatile const int* ptr)
153 {
154 int value = *ptr;
155 MEMORY_BARRIER();
156 return value;
157 }
158
159 #undef MEMORY_BARRIER
160
161 #endif
162
163 } // namespace WTF
164
165 using WTF::atomicAdd;
166 using WTF::atomicSubtract;
167 using WTF::atomicDecrement;
168 using WTF::atomicIncrement;
169 using WTF::atomicTestAndSetToOne;
170 using WTF::atomicSetOneToZero;
171 using WTF::acquireLoad;
172 using WTF::releaseStore;
173
174 #endif // Atomics_h
175