• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //===----------------------------------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #ifndef _LIBCPP___ATOMIC_CXX_ATOMIC_IMPL_H
10 #define _LIBCPP___ATOMIC_CXX_ATOMIC_IMPL_H
11 
12 #include <__atomic/is_always_lock_free.h>
13 #include <__atomic/memory_order.h>
14 #include <__config>
15 #include <__memory/addressof.h>
16 #include <__type_traits/conditional.h>
17 #include <__type_traits/is_assignable.h>
18 #include <__type_traits/is_trivially_copyable.h>
19 #include <__type_traits/remove_const.h>
20 #include <cstddef>
21 #include <cstring>
22 
23 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
24 #  pragma GCC system_header
25 #endif
26 
27 _LIBCPP_BEGIN_NAMESPACE_STD
28 
29 #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
30     defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
31 
32 // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
33 // the default operator= in an object is not volatile, a byte-by-byte copy
34 // is required.
35 template <typename _Tp, typename _Tv, __enable_if_t<is_assignable<_Tp&, _Tv>::value, int> = 0> _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_assign_volatile(_Tp & __a_value,_Tv const & __val)36 void __cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
37   __a_value = __val;
38 }
39 template <typename _Tp, typename _Tv, __enable_if_t<is_assignable<_Tp&, _Tv>::value, int> = 0> _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_assign_volatile(_Tp volatile & __a_value,_Tv volatile const & __val)40 void __cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
41   volatile char* __to         = reinterpret_cast<volatile char*>(std::addressof(__a_value));
42   volatile char* __end = __to + sizeof(_Tp);
43   volatile const char* __from = reinterpret_cast<volatile const char*>(std::addressof(__val));
44   while (__to != __end)
45     *__to++ = *__from++;
46 }
47 
48 #endif
49 
50 #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
51 
52 template <typename _Tp>
53 struct __cxx_atomic_base_impl {
54 
55   _LIBCPP_HIDE_FROM_ABI
56 #ifndef _LIBCPP_CXX03_LANG
57     __cxx_atomic_base_impl() _NOEXCEPT = default;
58 #else
59     __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
60 #endif // _LIBCPP_CXX03_LANG
__cxx_atomic_base_impl__cxx_atomic_base_impl61   _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
62     : __a_value(value) {}
63   _Tp __a_value;
64 };
65 
__to_gcc_order(memory_order __order)66 _LIBCPP_HIDE_FROM_ABI inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
67   // Avoid switch statement to make this a constexpr.
68   return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
69          (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
70           (__order == memory_order_release ? __ATOMIC_RELEASE:
71            (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
72             (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
73               __ATOMIC_CONSUME))));
74 }
75 
__to_gcc_failure_order(memory_order __order)76 _LIBCPP_HIDE_FROM_ABI inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
77   // Avoid switch statement to make this a constexpr.
78   return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
79          (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
80           (__order == memory_order_release ? __ATOMIC_RELAXED:
81            (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
82             (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
83               __ATOMIC_CONSUME))));
84 }
85 
86 template <typename _Tp>
87 _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp> * __a,_Tp __val)88 void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
89   __cxx_atomic_assign_volatile(__a->__a_value, __val);
90 }
91 
92 template <typename _Tp>
93 _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a,_Tp __val)94 void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
95   __a->__a_value = __val;
96 }
97 
98 _LIBCPP_HIDE_FROM_ABI inline
__cxx_atomic_thread_fence(memory_order __order)99 void __cxx_atomic_thread_fence(memory_order __order) {
100   __atomic_thread_fence(__to_gcc_order(__order));
101 }
102 
103 _LIBCPP_HIDE_FROM_ABI inline
__cxx_atomic_signal_fence(memory_order __order)104 void __cxx_atomic_signal_fence(memory_order __order) {
105   __atomic_signal_fence(__to_gcc_order(__order));
106 }
107 
108 template <typename _Tp>
109 _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp> * __a,_Tp __val,memory_order __order)110 void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
111                         memory_order __order) {
112   __atomic_store(std::addressof(__a->__a_value), std::addressof(__val), __to_gcc_order(__order));
113 }
114 
115 template <typename _Tp>
116 _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a,_Tp __val,memory_order __order)117 void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
118                         memory_order __order) {
119   __atomic_store(std::addressof(__a->__a_value), std::addressof(__val), __to_gcc_order(__order));
120 }
121 
122 template <typename _Tp>
123 _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp> * __a,memory_order __order)124 _Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
125                       memory_order __order) {
126   _Tp __ret;
127   __atomic_load(std::addressof(__a->__a_value), std::addressof(__ret), __to_gcc_order(__order));
128   return __ret;
129 }
130 
131 template <typename _Tp>
132 _LIBCPP_HIDE_FROM_ABI void
__cxx_atomic_load_inplace(const volatile __cxx_atomic_base_impl<_Tp> * __a,_Tp * __dst,memory_order __order)133 __cxx_atomic_load_inplace(const volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __dst, memory_order __order) {
134   __atomic_load(std::addressof(__a->__a_value), __dst, __to_gcc_order(__order));
135 }
136 
137 template <typename _Tp>
138 _LIBCPP_HIDE_FROM_ABI void
__cxx_atomic_load_inplace(const __cxx_atomic_base_impl<_Tp> * __a,_Tp * __dst,memory_order __order)139 __cxx_atomic_load_inplace(const __cxx_atomic_base_impl<_Tp>* __a, _Tp* __dst, memory_order __order) {
140   __atomic_load(std::addressof(__a->__a_value), __dst, __to_gcc_order(__order));
141 }
142 
143 template <typename _Tp>
144 _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_load(const __cxx_atomic_base_impl<_Tp> * __a,memory_order __order)145 _Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
146   _Tp __ret;
147   __atomic_load(std::addressof(__a->__a_value), std::addressof(__ret), __to_gcc_order(__order));
148   return __ret;
149 }
150 
151 template <typename _Tp>
152 _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp> * __a,_Tp __value,memory_order __order)153 _Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
154                           _Tp __value, memory_order __order) {
155   _Tp __ret;
156   __atomic_exchange(
157       std::addressof(__a->__a_value), std::addressof(__value), std::addressof(__ret), __to_gcc_order(__order));
158   return __ret;
159 }
160 
161 template <typename _Tp>
162 _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a,_Tp __value,memory_order __order)163 _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
164                           memory_order __order) {
165   _Tp __ret;
166   __atomic_exchange(
167       std::addressof(__a->__a_value), std::addressof(__value), std::addressof(__ret), __to_gcc_order(__order));
168   return __ret;
169 }
170 
171 template <typename _Tp>
172 _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_base_impl<_Tp> * __a,_Tp * __expected,_Tp __value,memory_order __success,memory_order __failure)173 bool __cxx_atomic_compare_exchange_strong(
174     volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
175     memory_order __success, memory_order __failure) {
176   return __atomic_compare_exchange(
177       std::addressof(__a->__a_value),
178       __expected,
179       std::addressof(__value),
180       false,
181       __to_gcc_order(__success),
182       __to_gcc_failure_order(__failure));
183 }
184 
185 template <typename _Tp>
186 _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a,_Tp * __expected,_Tp __value,memory_order __success,memory_order __failure)187 bool __cxx_atomic_compare_exchange_strong(
188     __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
189     memory_order __failure) {
190   return __atomic_compare_exchange(
191       std::addressof(__a->__a_value),
192       __expected,
193       std::addressof(__value),
194       false,
195       __to_gcc_order(__success),
196       __to_gcc_failure_order(__failure));
197 }
198 
199 template <typename _Tp>
200 _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_base_impl<_Tp> * __a,_Tp * __expected,_Tp __value,memory_order __success,memory_order __failure)201 bool __cxx_atomic_compare_exchange_weak(
202     volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
203     memory_order __success, memory_order __failure) {
204   return __atomic_compare_exchange(
205       std::addressof(__a->__a_value),
206       __expected,
207       std::addressof(__value),
208       true,
209       __to_gcc_order(__success),
210       __to_gcc_failure_order(__failure));
211 }
212 
213 template <typename _Tp>
214 _LIBCPP_HIDE_FROM_ABI
__cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a,_Tp * __expected,_Tp __value,memory_order __success,memory_order __failure)215 bool __cxx_atomic_compare_exchange_weak(
216     __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
217     memory_order __failure) {
218   return __atomic_compare_exchange(
219       std::addressof(__a->__a_value),
220       __expected,
221       std::addressof(__value),
222       true,
223       __to_gcc_order(__success),
224       __to_gcc_failure_order(__failure));
225 }
226 
227 template <typename _Tp>
228 struct __skip_amt { enum {value = 1}; };
229 
230 template <typename _Tp>
231 struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
232 
233 // FIXME: Haven't figured out what the spec says about using arrays with
234 // atomic_fetch_add. Force a failure rather than creating bad behavior.
235 template <typename _Tp>
236 struct __skip_amt<_Tp[]> { };
237 template <typename _Tp, int n>
238 struct __skip_amt<_Tp[n]> { };
239 
240 template <typename _Tp, typename _Td>
241 _LIBCPP_HIDE_FROM_ABI
242 _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
243                            _Td __delta, memory_order __order) {
244   return __atomic_fetch_add(std::addressof(__a->__a_value), __delta * __skip_amt<_Tp>::value, __to_gcc_order(__order));
245 }
246 
247 template <typename _Tp, typename _Td>
248 _LIBCPP_HIDE_FROM_ABI
249 _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
250                            memory_order __order) {
251   return __atomic_fetch_add(std::addressof(__a->__a_value), __delta * __skip_amt<_Tp>::value, __to_gcc_order(__order));
252 }
253 
254 template <typename _Tp, typename _Td>
255 _LIBCPP_HIDE_FROM_ABI
256 _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
257                            _Td __delta, memory_order __order) {
258   return __atomic_fetch_sub(std::addressof(__a->__a_value), __delta * __skip_amt<_Tp>::value, __to_gcc_order(__order));
259 }
260 
261 template <typename _Tp, typename _Td>
262 _LIBCPP_HIDE_FROM_ABI
263 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
264                            memory_order __order) {
265   return __atomic_fetch_sub(std::addressof(__a->__a_value), __delta * __skip_amt<_Tp>::value, __to_gcc_order(__order));
266 }
267 
268 template <typename _Tp>
269 _LIBCPP_HIDE_FROM_ABI
270 _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
271                            _Tp __pattern, memory_order __order) {
272   return __atomic_fetch_and(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
273 }
274 
275 template <typename _Tp>
276 _LIBCPP_HIDE_FROM_ABI
277 _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
278                            _Tp __pattern, memory_order __order) {
279   return __atomic_fetch_and(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
280 }
281 
282 template <typename _Tp>
283 _LIBCPP_HIDE_FROM_ABI
284 _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
285                           _Tp __pattern, memory_order __order) {
286   return __atomic_fetch_or(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
287 }
288 
289 template <typename _Tp>
290 _LIBCPP_HIDE_FROM_ABI
291 _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
292                           memory_order __order) {
293   return __atomic_fetch_or(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
294 }
295 
296 template <typename _Tp>
297 _LIBCPP_HIDE_FROM_ABI
298 _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
299                            _Tp __pattern, memory_order __order) {
300   return __atomic_fetch_xor(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
301 }
302 
303 template <typename _Tp>
304 _LIBCPP_HIDE_FROM_ABI
305 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
306                            memory_order __order) {
307   return __atomic_fetch_xor(std::addressof(__a->__a_value), __pattern, __to_gcc_order(__order));
308 }
309 
310 #define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
311 
312 #elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
313 
314 template <typename _Tp>
315 struct __cxx_atomic_base_impl {
316 
317   _LIBCPP_HIDE_FROM_ABI
318 #ifndef _LIBCPP_CXX03_LANG
319     __cxx_atomic_base_impl() _NOEXCEPT = default;
320 #else
321     __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
322 #endif // _LIBCPP_CXX03_LANG
323   _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp __value) _NOEXCEPT
324     : __a_value(__value) {}
325   _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value;
326 };
327 
328 #define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
329 
330 _LIBCPP_HIDE_FROM_ABI inline
331 void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
332     __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
333 }
334 
335 _LIBCPP_HIDE_FROM_ABI inline
336 void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
337     __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
338 }
339 
340 template<class _Tp>
341 _LIBCPP_HIDE_FROM_ABI
342 void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
343     __c11_atomic_init(std::addressof(__a->__a_value), __val);
344 }
345 template<class _Tp>
346 _LIBCPP_HIDE_FROM_ABI
347 void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
348     __c11_atomic_init(std::addressof(__a->__a_value), __val);
349 }
350 
351 template<class _Tp>
352 _LIBCPP_HIDE_FROM_ABI
353 void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
354     __c11_atomic_store(std::addressof(__a->__a_value), __val, static_cast<__memory_order_underlying_t>(__order));
355 }
356 template<class _Tp>
357 _LIBCPP_HIDE_FROM_ABI
358 void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
359     __c11_atomic_store(std::addressof(__a->__a_value), __val, static_cast<__memory_order_underlying_t>(__order));
360 }
361 
362 template<class _Tp>
363 _LIBCPP_HIDE_FROM_ABI
364 _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
365     using __ptr_type = __remove_const_t<decltype(__a->__a_value)>*;
366     return __c11_atomic_load(
367         const_cast<__ptr_type>(std::addressof(__a->__a_value)), static_cast<__memory_order_underlying_t>(__order));
368 }
369 template<class _Tp>
370 _LIBCPP_HIDE_FROM_ABI
371 _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
372     using __ptr_type = __remove_const_t<decltype(__a->__a_value)>*;
373     return __c11_atomic_load(
374         const_cast<__ptr_type>(std::addressof(__a->__a_value)), static_cast<__memory_order_underlying_t>(__order));
375 }
376 
377 template <class _Tp>
378 _LIBCPP_HIDE_FROM_ABI void
379 __cxx_atomic_load_inplace(__cxx_atomic_base_impl<_Tp> const volatile* __a, _Tp* __dst, memory_order __order) _NOEXCEPT {
380     using __ptr_type = __remove_const_t<decltype(__a->__a_value)>*;
381     *__dst           = __c11_atomic_load(
382         const_cast<__ptr_type>(std::addressof(__a->__a_value)), static_cast<__memory_order_underlying_t>(__order));
383 }
384 template <class _Tp>
385 _LIBCPP_HIDE_FROM_ABI void
386 __cxx_atomic_load_inplace(__cxx_atomic_base_impl<_Tp> const* __a, _Tp* __dst, memory_order __order) _NOEXCEPT {
387     using __ptr_type = __remove_const_t<decltype(__a->__a_value)>*;
388     *__dst           = __c11_atomic_load(
389         const_cast<__ptr_type>(std::addressof(__a->__a_value)), static_cast<__memory_order_underlying_t>(__order));
390 }
391 
392 template<class _Tp>
393 _LIBCPP_HIDE_FROM_ABI
394 _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
395     return __c11_atomic_exchange(
396         std::addressof(__a->__a_value), __value, static_cast<__memory_order_underlying_t>(__order));
397 }
398 template<class _Tp>
399 _LIBCPP_HIDE_FROM_ABI
400 _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
401     return __c11_atomic_exchange(
402         std::addressof(__a->__a_value), __value, static_cast<__memory_order_underlying_t>(__order));
403 }
404 
405 _LIBCPP_HIDE_FROM_ABI inline _LIBCPP_CONSTEXPR memory_order __to_failure_order(memory_order __order) {
406   // Avoid switch statement to make this a constexpr.
407   return __order == memory_order_release ? memory_order_relaxed:
408          (__order == memory_order_acq_rel ? memory_order_acquire:
409              __order);
410 }
411 
412 template<class _Tp>
413 _LIBCPP_HIDE_FROM_ABI
414 bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
415   return __c11_atomic_compare_exchange_strong(
416       std::addressof(__a->__a_value),
417       __expected,
418       __value,
419       static_cast<__memory_order_underlying_t>(__success),
420       static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
421 }
422 template<class _Tp>
423 _LIBCPP_HIDE_FROM_ABI
424 bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
425   return __c11_atomic_compare_exchange_strong(
426       std::addressof(__a->__a_value),
427       __expected,
428       __value,
429       static_cast<__memory_order_underlying_t>(__success),
430       static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
431 }
432 
433 template<class _Tp>
434 _LIBCPP_HIDE_FROM_ABI
435 bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
436   return __c11_atomic_compare_exchange_weak(
437       std::addressof(__a->__a_value),
438       __expected,
439       __value,
440       static_cast<__memory_order_underlying_t>(__success),
441       static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
442 }
443 template<class _Tp>
444 _LIBCPP_HIDE_FROM_ABI
445 bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
446   return __c11_atomic_compare_exchange_weak(
447       std::addressof(__a->__a_value),
448       __expected,
449       __value,
450       static_cast<__memory_order_underlying_t>(__success),
451       static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
452 }
453 
454 template<class _Tp>
455 _LIBCPP_HIDE_FROM_ABI
456 _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
457   return __c11_atomic_fetch_add(
458       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
459 }
460 template<class _Tp>
461 _LIBCPP_HIDE_FROM_ABI
462 _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
463   return __c11_atomic_fetch_add(
464       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
465 }
466 
467 template<class _Tp>
468 _LIBCPP_HIDE_FROM_ABI
469 _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
470   return __c11_atomic_fetch_add(
471       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
472 }
473 template<class _Tp>
474 _LIBCPP_HIDE_FROM_ABI
475 _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
476   return __c11_atomic_fetch_add(
477       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
478 }
479 
480 template<class _Tp>
481 _LIBCPP_HIDE_FROM_ABI
482 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
483   return __c11_atomic_fetch_sub(
484       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
485 }
486 template<class _Tp>
487 _LIBCPP_HIDE_FROM_ABI
488 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
489   return __c11_atomic_fetch_sub(
490       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
491 }
492 template<class _Tp>
493 _LIBCPP_HIDE_FROM_ABI
494 _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
495   return __c11_atomic_fetch_sub(
496       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
497 }
498 template<class _Tp>
499 _LIBCPP_HIDE_FROM_ABI
500 _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
501   return __c11_atomic_fetch_sub(
502       std::addressof(__a->__a_value), __delta, static_cast<__memory_order_underlying_t>(__order));
503 }
504 
505 template<class _Tp>
506 _LIBCPP_HIDE_FROM_ABI
507 _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
508   return __c11_atomic_fetch_and(
509       std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
510 }
511 template<class _Tp>
512 _LIBCPP_HIDE_FROM_ABI
513 _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
514   return __c11_atomic_fetch_and(
515       std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
516 }
517 
518 template<class _Tp>
519 _LIBCPP_HIDE_FROM_ABI
520 _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
521   return __c11_atomic_fetch_or(
522       std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
523 }
524 template<class _Tp>
525 _LIBCPP_HIDE_FROM_ABI
526 _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
527   return __c11_atomic_fetch_or(
528       std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
529 }
530 
531 template<class _Tp>
532 _LIBCPP_HIDE_FROM_ABI
533 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
534   return __c11_atomic_fetch_xor(
535       std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
536 }
537 template<class _Tp>
538 _LIBCPP_HIDE_FROM_ABI
539 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
540   return __c11_atomic_fetch_xor(
541       std::addressof(__a->__a_value), __pattern, static_cast<__memory_order_underlying_t>(__order));
542 }
543 
544 #endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
545 
546 #ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
547 
548 template<typename _Tp>
549 struct __cxx_atomic_lock_impl {
550 
551   _LIBCPP_HIDE_FROM_ABI
552   __cxx_atomic_lock_impl() _NOEXCEPT
553     : __a_value(), __a_lock(0) {}
554   _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR explicit
555   __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
556     : __a_value(value), __a_lock(0) {}
557 
558   _Tp __a_value;
559   mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
560 
561   _LIBCPP_HIDE_FROM_ABI void __lock() const volatile {
562     while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
563         /*spin*/;
564   }
565   _LIBCPP_HIDE_FROM_ABI void __lock() const {
566     while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
567         /*spin*/;
568   }
569   _LIBCPP_HIDE_FROM_ABI void __unlock() const volatile {
570     __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
571   }
572   _LIBCPP_HIDE_FROM_ABI void __unlock() const {
573     __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
574   }
575   _LIBCPP_HIDE_FROM_ABI _Tp __read() const volatile {
576     __lock();
577     _Tp __old;
578     __cxx_atomic_assign_volatile(__old, __a_value);
579     __unlock();
580     return __old;
581   }
582   _LIBCPP_HIDE_FROM_ABI _Tp __read() const {
583     __lock();
584     _Tp __old = __a_value;
585     __unlock();
586     return __old;
587   }
588   _LIBCPP_HIDE_FROM_ABI void __read_inplace(_Tp* __dst) const volatile {
589     __lock();
590     __cxx_atomic_assign_volatile(*__dst, __a_value);
591     __unlock();
592   }
593   _LIBCPP_HIDE_FROM_ABI void __read_inplace(_Tp* __dst) const {
594     __lock();
595     *__dst = __a_value;
596     __unlock();
597   }
598 };
599 
600 template <typename _Tp>
601 _LIBCPP_HIDE_FROM_ABI
602 void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
603   __cxx_atomic_assign_volatile(__a->__a_value, __val);
604 }
605 template <typename _Tp>
606 _LIBCPP_HIDE_FROM_ABI
607 void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
608   __a->__a_value = __val;
609 }
610 
611 template <typename _Tp>
612 _LIBCPP_HIDE_FROM_ABI
613 void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
614   __a->__lock();
615   __cxx_atomic_assign_volatile(__a->__a_value, __val);
616   __a->__unlock();
617 }
618 template <typename _Tp>
619 _LIBCPP_HIDE_FROM_ABI
620 void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
621   __a->__lock();
622   __a->__a_value = __val;
623   __a->__unlock();
624 }
625 
626 template <typename _Tp>
627 _LIBCPP_HIDE_FROM_ABI
628 _Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
629   return __a->__read();
630 }
631 template <typename _Tp>
632 _LIBCPP_HIDE_FROM_ABI
633 _Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
634   return __a->__read();
635 }
636 
637 template <typename _Tp>
638 _LIBCPP_HIDE_FROM_ABI void
639 __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp* __dst, memory_order) {
640   __a->__read_inplace(__dst);
641 }
642 template <typename _Tp>
643 _LIBCPP_HIDE_FROM_ABI void __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, _Tp* __dst, memory_order) {
644   __a->__read_inplace(__dst);
645 }
646 
647 template <typename _Tp>
648 _LIBCPP_HIDE_FROM_ABI
649 _Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
650   __a->__lock();
651   _Tp __old;
652   __cxx_atomic_assign_volatile(__old, __a->__a_value);
653   __cxx_atomic_assign_volatile(__a->__a_value, __value);
654   __a->__unlock();
655   return __old;
656 }
657 template <typename _Tp>
658 _LIBCPP_HIDE_FROM_ABI
659 _Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
660   __a->__lock();
661   _Tp __old = __a->__a_value;
662   __a->__a_value = __value;
663   __a->__unlock();
664   return __old;
665 }
666 
667 template <typename _Tp>
668 _LIBCPP_HIDE_FROM_ABI
669 bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
670                                           _Tp* __expected, _Tp __value, memory_order, memory_order) {
671   _Tp __temp;
672   __a->__lock();
673   __cxx_atomic_assign_volatile(__temp, __a->__a_value);
674   bool __ret = (std::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
675   if(__ret)
676     __cxx_atomic_assign_volatile(__a->__a_value, __value);
677   else
678     __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
679   __a->__unlock();
680   return __ret;
681 }
682 template <typename _Tp>
683 _LIBCPP_HIDE_FROM_ABI
684 bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
685                                           _Tp* __expected, _Tp __value, memory_order, memory_order) {
686   __a->__lock();
687   bool __ret = (std::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
688   if(__ret)
689     std::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
690   else
691     std::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
692   __a->__unlock();
693   return __ret;
694 }
695 
696 template <typename _Tp>
697 _LIBCPP_HIDE_FROM_ABI
698 bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
699                                         _Tp* __expected, _Tp __value, memory_order, memory_order) {
700   _Tp __temp;
701   __a->__lock();
702   __cxx_atomic_assign_volatile(__temp, __a->__a_value);
703   bool __ret = (std::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
704   if(__ret)
705     __cxx_atomic_assign_volatile(__a->__a_value, __value);
706   else
707     __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
708   __a->__unlock();
709   return __ret;
710 }
711 template <typename _Tp>
712 _LIBCPP_HIDE_FROM_ABI
713 bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
714                                         _Tp* __expected, _Tp __value, memory_order, memory_order) {
715   __a->__lock();
716   bool __ret = (std::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
717   if(__ret)
718     std::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
719   else
720     std::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
721   __a->__unlock();
722   return __ret;
723 }
724 
725 template <typename _Tp, typename _Td>
726 _LIBCPP_HIDE_FROM_ABI
727 _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
728                            _Td __delta, memory_order) {
729   __a->__lock();
730   _Tp __old;
731   __cxx_atomic_assign_volatile(__old, __a->__a_value);
732   __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
733   __a->__unlock();
734   return __old;
735 }
736 template <typename _Tp, typename _Td>
737 _LIBCPP_HIDE_FROM_ABI
738 _Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
739                            _Td __delta, memory_order) {
740   __a->__lock();
741   _Tp __old = __a->__a_value;
742   __a->__a_value += __delta;
743   __a->__unlock();
744   return __old;
745 }
746 
747 template <typename _Tp, typename _Td>
748 _LIBCPP_HIDE_FROM_ABI
749 _Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
750                            ptrdiff_t __delta, memory_order) {
751   __a->__lock();
752   _Tp* __old;
753   __cxx_atomic_assign_volatile(__old, __a->__a_value);
754   __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
755   __a->__unlock();
756   return __old;
757 }
758 template <typename _Tp, typename _Td>
759 _LIBCPP_HIDE_FROM_ABI
760 _Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
761                            ptrdiff_t __delta, memory_order) {
762   __a->__lock();
763   _Tp* __old = __a->__a_value;
764   __a->__a_value += __delta;
765   __a->__unlock();
766   return __old;
767 }
768 
769 template <typename _Tp, typename _Td>
770 _LIBCPP_HIDE_FROM_ABI
771 _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
772                            _Td __delta, memory_order) {
773   __a->__lock();
774   _Tp __old;
775   __cxx_atomic_assign_volatile(__old, __a->__a_value);
776   __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
777   __a->__unlock();
778   return __old;
779 }
780 template <typename _Tp, typename _Td>
781 _LIBCPP_HIDE_FROM_ABI
782 _Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
783                            _Td __delta, memory_order) {
784   __a->__lock();
785   _Tp __old = __a->__a_value;
786   __a->__a_value -= __delta;
787   __a->__unlock();
788   return __old;
789 }
790 
791 template <typename _Tp>
792 _LIBCPP_HIDE_FROM_ABI
793 _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
794                            _Tp __pattern, memory_order) {
795   __a->__lock();
796   _Tp __old;
797   __cxx_atomic_assign_volatile(__old, __a->__a_value);
798   __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
799   __a->__unlock();
800   return __old;
801 }
802 template <typename _Tp>
803 _LIBCPP_HIDE_FROM_ABI
804 _Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
805                            _Tp __pattern, memory_order) {
806   __a->__lock();
807   _Tp __old = __a->__a_value;
808   __a->__a_value &= __pattern;
809   __a->__unlock();
810   return __old;
811 }
812 
813 template <typename _Tp>
814 _LIBCPP_HIDE_FROM_ABI
815 _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
816                           _Tp __pattern, memory_order) {
817   __a->__lock();
818   _Tp __old;
819   __cxx_atomic_assign_volatile(__old, __a->__a_value);
820   __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
821   __a->__unlock();
822   return __old;
823 }
824 template <typename _Tp>
825 _LIBCPP_HIDE_FROM_ABI
826 _Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
827                           _Tp __pattern, memory_order) {
828   __a->__lock();
829   _Tp __old = __a->__a_value;
830   __a->__a_value |= __pattern;
831   __a->__unlock();
832   return __old;
833 }
834 
835 template <typename _Tp>
836 _LIBCPP_HIDE_FROM_ABI
837 _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
838                            _Tp __pattern, memory_order) {
839   __a->__lock();
840   _Tp __old;
841   __cxx_atomic_assign_volatile(__old, __a->__a_value);
842   __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
843   __a->__unlock();
844   return __old;
845 }
846 template <typename _Tp>
847 _LIBCPP_HIDE_FROM_ABI
848 _Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
849                            _Tp __pattern, memory_order) {
850   __a->__lock();
851   _Tp __old = __a->__a_value;
852   __a->__a_value ^= __pattern;
853   __a->__unlock();
854   return __old;
855 }
856 
857 template <typename _Tp,
858           typename _Base = typename conditional<__libcpp_is_always_lock_free<_Tp>::__value,
859                                                 __cxx_atomic_base_impl<_Tp>,
860                                                 __cxx_atomic_lock_impl<_Tp> >::type>
861 #else
862 template <typename _Tp,
863           typename _Base = __cxx_atomic_base_impl<_Tp> >
864 #endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
865 struct __cxx_atomic_impl : public _Base {
866     static_assert(is_trivially_copyable<_Tp>::value,
867       "std::atomic<T> requires that 'T' be a trivially copyable type");
868 
869   _LIBCPP_HIDE_FROM_ABI __cxx_atomic_impl() _NOEXCEPT = default;
870   _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp __value) _NOEXCEPT
871     : _Base(__value) {}
872 };
873 
874 _LIBCPP_END_NAMESPACE_STD
875 
876 #endif // _LIBCPP___ATOMIC_CXX_ATOMIC_IMPL_H
877