1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_BASE_ATOMICOPS_INTERNALS_STD_H_
6 #define V8_BASE_ATOMICOPS_INTERNALS_STD_H_
7
8 #include <atomic>
9
10 #include "src/base/build_config.h"
11 #include "src/base/macros.h"
12
13 namespace v8 {
14 namespace base {
15
16 namespace helper {
17 template <typename T>
to_std_atomic(volatile T * ptr)18 volatile std::atomic<T>* to_std_atomic(volatile T* ptr) {
19 return reinterpret_cast<volatile std::atomic<T>*>(ptr);
20 }
21 template <typename T>
to_std_atomic_const(volatile const T * ptr)22 volatile const std::atomic<T>* to_std_atomic_const(volatile const T* ptr) {
23 return reinterpret_cast<volatile const std::atomic<T>*>(ptr);
24 }
25 } // namespace helper
26
SeqCst_MemoryFence()27 inline void SeqCst_MemoryFence() {
28 std::atomic_thread_fence(std::memory_order_seq_cst);
29 }
30
Relaxed_CompareAndSwap(volatile Atomic8 * ptr,Atomic8 old_value,Atomic8 new_value)31 inline Atomic8 Relaxed_CompareAndSwap(volatile Atomic8* ptr, Atomic8 old_value,
32 Atomic8 new_value) {
33 std::atomic_compare_exchange_strong_explicit(
34 helper::to_std_atomic(ptr), &old_value, new_value,
35 std::memory_order_relaxed, std::memory_order_relaxed);
36 return old_value;
37 }
38
Relaxed_CompareAndSwap(volatile Atomic16 * ptr,Atomic16 old_value,Atomic16 new_value)39 inline Atomic16 Relaxed_CompareAndSwap(volatile Atomic16* ptr,
40 Atomic16 old_value, Atomic16 new_value) {
41 std::atomic_compare_exchange_strong_explicit(
42 helper::to_std_atomic(ptr), &old_value, new_value,
43 std::memory_order_relaxed, std::memory_order_relaxed);
44 return old_value;
45 }
46
Relaxed_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)47 inline Atomic32 Relaxed_CompareAndSwap(volatile Atomic32* ptr,
48 Atomic32 old_value, Atomic32 new_value) {
49 std::atomic_compare_exchange_strong_explicit(
50 helper::to_std_atomic(ptr), &old_value, new_value,
51 std::memory_order_relaxed, std::memory_order_relaxed);
52 return old_value;
53 }
54
Relaxed_AtomicExchange(volatile Atomic32 * ptr,Atomic32 new_value)55 inline Atomic32 Relaxed_AtomicExchange(volatile Atomic32* ptr,
56 Atomic32 new_value) {
57 return std::atomic_exchange_explicit(helper::to_std_atomic(ptr), new_value,
58 std::memory_order_relaxed);
59 }
60
Relaxed_AtomicIncrement(volatile Atomic32 * ptr,Atomic32 increment)61 inline Atomic32 Relaxed_AtomicIncrement(volatile Atomic32* ptr,
62 Atomic32 increment) {
63 return increment + std::atomic_fetch_add_explicit(helper::to_std_atomic(ptr),
64 increment,
65 std::memory_order_relaxed);
66 }
67
Acquire_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)68 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
69 Atomic32 old_value, Atomic32 new_value) {
70 atomic_compare_exchange_strong_explicit(
71 helper::to_std_atomic(ptr), &old_value, new_value,
72 std::memory_order_acquire, std::memory_order_acquire);
73 return old_value;
74 }
75
Release_CompareAndSwap(volatile Atomic8 * ptr,Atomic8 old_value,Atomic8 new_value)76 inline Atomic8 Release_CompareAndSwap(volatile Atomic8* ptr, Atomic8 old_value,
77 Atomic8 new_value) {
78 bool result = atomic_compare_exchange_strong_explicit(
79 helper::to_std_atomic(ptr), &old_value, new_value,
80 std::memory_order_release, std::memory_order_relaxed);
81 USE(result); // Make gcc compiler happy.
82 return old_value;
83 }
84
Release_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)85 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
86 Atomic32 old_value, Atomic32 new_value) {
87 atomic_compare_exchange_strong_explicit(
88 helper::to_std_atomic(ptr), &old_value, new_value,
89 std::memory_order_release, std::memory_order_relaxed);
90 return old_value;
91 }
92
AcquireRelease_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)93 inline Atomic32 AcquireRelease_CompareAndSwap(volatile Atomic32* ptr,
94 Atomic32 old_value,
95 Atomic32 new_value) {
96 atomic_compare_exchange_strong_explicit(
97 helper::to_std_atomic(ptr), &old_value, new_value,
98 std::memory_order_acq_rel, std::memory_order_acquire);
99 return old_value;
100 }
101
Relaxed_Store(volatile Atomic8 * ptr,Atomic8 value)102 inline void Relaxed_Store(volatile Atomic8* ptr, Atomic8 value) {
103 std::atomic_store_explicit(helper::to_std_atomic(ptr), value,
104 std::memory_order_relaxed);
105 }
106
Relaxed_Store(volatile Atomic16 * ptr,Atomic16 value)107 inline void Relaxed_Store(volatile Atomic16* ptr, Atomic16 value) {
108 std::atomic_store_explicit(helper::to_std_atomic(ptr), value,
109 std::memory_order_relaxed);
110 }
111
Relaxed_Store(volatile Atomic32 * ptr,Atomic32 value)112 inline void Relaxed_Store(volatile Atomic32* ptr, Atomic32 value) {
113 std::atomic_store_explicit(helper::to_std_atomic(ptr), value,
114 std::memory_order_relaxed);
115 }
116
Release_Store(volatile Atomic32 * ptr,Atomic32 value)117 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
118 std::atomic_store_explicit(helper::to_std_atomic(ptr), value,
119 std::memory_order_release);
120 }
121
Relaxed_Load(volatile const Atomic8 * ptr)122 inline Atomic8 Relaxed_Load(volatile const Atomic8* ptr) {
123 return std::atomic_load_explicit(helper::to_std_atomic_const(ptr),
124 std::memory_order_relaxed);
125 }
126
Relaxed_Load(volatile const Atomic16 * ptr)127 inline Atomic16 Relaxed_Load(volatile const Atomic16* ptr) {
128 return std::atomic_load_explicit(helper::to_std_atomic_const(ptr),
129 std::memory_order_relaxed);
130 }
131
Relaxed_Load(volatile const Atomic32 * ptr)132 inline Atomic32 Relaxed_Load(volatile const Atomic32* ptr) {
133 return std::atomic_load_explicit(helper::to_std_atomic_const(ptr),
134 std::memory_order_relaxed);
135 }
136
Acquire_Load(volatile const Atomic32 * ptr)137 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
138 return std::atomic_load_explicit(helper::to_std_atomic_const(ptr),
139 std::memory_order_acquire);
140 }
141
142 #if defined(V8_HOST_ARCH_64_BIT)
143
Relaxed_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)144 inline Atomic64 Relaxed_CompareAndSwap(volatile Atomic64* ptr,
145 Atomic64 old_value, Atomic64 new_value) {
146 std::atomic_compare_exchange_strong_explicit(
147 helper::to_std_atomic(ptr), &old_value, new_value,
148 std::memory_order_relaxed, std::memory_order_relaxed);
149 return old_value;
150 }
151
Relaxed_AtomicExchange(volatile Atomic64 * ptr,Atomic64 new_value)152 inline Atomic64 Relaxed_AtomicExchange(volatile Atomic64* ptr,
153 Atomic64 new_value) {
154 return std::atomic_exchange_explicit(helper::to_std_atomic(ptr), new_value,
155 std::memory_order_relaxed);
156 }
157
Relaxed_AtomicIncrement(volatile Atomic64 * ptr,Atomic64 increment)158 inline Atomic64 Relaxed_AtomicIncrement(volatile Atomic64* ptr,
159 Atomic64 increment) {
160 return increment + std::atomic_fetch_add_explicit(helper::to_std_atomic(ptr),
161 increment,
162 std::memory_order_relaxed);
163 }
164
Acquire_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)165 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
166 Atomic64 old_value, Atomic64 new_value) {
167 std::atomic_compare_exchange_strong_explicit(
168 helper::to_std_atomic(ptr), &old_value, new_value,
169 std::memory_order_acquire, std::memory_order_acquire);
170 return old_value;
171 }
172
Release_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)173 inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
174 Atomic64 old_value, Atomic64 new_value) {
175 std::atomic_compare_exchange_strong_explicit(
176 helper::to_std_atomic(ptr), &old_value, new_value,
177 std::memory_order_release, std::memory_order_relaxed);
178 return old_value;
179 }
180
AcquireRelease_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)181 inline Atomic64 AcquireRelease_CompareAndSwap(volatile Atomic64* ptr,
182 Atomic64 old_value,
183 Atomic64 new_value) {
184 std::atomic_compare_exchange_strong_explicit(
185 helper::to_std_atomic(ptr), &old_value, new_value,
186 std::memory_order_acq_rel, std::memory_order_acquire);
187 return old_value;
188 }
189
Relaxed_Store(volatile Atomic64 * ptr,Atomic64 value)190 inline void Relaxed_Store(volatile Atomic64* ptr, Atomic64 value) {
191 std::atomic_store_explicit(helper::to_std_atomic(ptr), value,
192 std::memory_order_relaxed);
193 }
194
Release_Store(volatile Atomic64 * ptr,Atomic64 value)195 inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
196 std::atomic_store_explicit(helper::to_std_atomic(ptr), value,
197 std::memory_order_release);
198 }
199
Relaxed_Load(volatile const Atomic64 * ptr)200 inline Atomic64 Relaxed_Load(volatile const Atomic64* ptr) {
201 return std::atomic_load_explicit(helper::to_std_atomic_const(ptr),
202 std::memory_order_relaxed);
203 }
204
Acquire_Load(volatile const Atomic64 * ptr)205 inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
206 return std::atomic_load_explicit(helper::to_std_atomic_const(ptr),
207 std::memory_order_acquire);
208 }
209
210 #endif // defined(V8_HOST_ARCH_64_BIT)
211 } // namespace base
212 } // namespace v8
213
214 #endif // V8_BASE_ATOMICOPS_INTERNALS_STD_H_
215