• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4//                     The LLVM Compiler Infrastructure
5//
6// This file is distributed under the University of Illinois Open Source
7// License. See LICENSE.TXT for details.
8//
9//===----------------------------------------------------------------------===//
10
11#ifndef _LIBCPP_ATOMIC
12#define _LIBCPP_ATOMIC
13
14/*
15    atomic synopsis
16
17namespace std
18{
19
20// order and consistency
21
22typedef enum memory_order
23{
24    memory_order_relaxed,
25    memory_order_consume,  // load-consume
26    memory_order_acquire,  // load-acquire
27    memory_order_release,  // store-release
28    memory_order_acq_rel,  // store-release load-acquire
29    memory_order_seq_cst   // store-release load-acquire
30} memory_order;
31
32template <class T> T kill_dependency(T y) noexcept;
33
34// lock-free property
35
36#define ATOMIC_BOOL_LOCK_FREE unspecified
37#define ATOMIC_CHAR_LOCK_FREE unspecified
38#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
39#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
40#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
41#define ATOMIC_SHORT_LOCK_FREE unspecified
42#define ATOMIC_INT_LOCK_FREE unspecified
43#define ATOMIC_LONG_LOCK_FREE unspecified
44#define ATOMIC_LLONG_LOCK_FREE unspecified
45#define ATOMIC_POINTER_LOCK_FREE unspecified
46
47// flag type and operations
48
49typedef struct atomic_flag
50{
51    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
52    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
53    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
54    void clear(memory_order m = memory_order_seq_cst) noexcept;
55    atomic_flag()  noexcept = default;
56    atomic_flag(const atomic_flag&) = delete;
57    atomic_flag& operator=(const atomic_flag&) = delete;
58    atomic_flag& operator=(const atomic_flag&) volatile = delete;
59} atomic_flag;
60
61bool
62    atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
63
64bool
65    atomic_flag_test_and_set(atomic_flag* obj) noexcept;
66
67bool
68    atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
69                                      memory_order m) noexcept;
70
71bool
72    atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
73
74void
75    atomic_flag_clear(volatile atomic_flag* obj) noexcept;
76
77void
78    atomic_flag_clear(atomic_flag* obj) noexcept;
79
80void
81    atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
82
83void
84    atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
85
86#define ATOMIC_FLAG_INIT see below
87#define ATOMIC_VAR_INIT(value) see below
88
89template <class T>
90struct atomic
91{
92    bool is_lock_free() const volatile noexcept;
93    bool is_lock_free() const noexcept;
94    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
95    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
96    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
97    T load(memory_order m = memory_order_seq_cst) const noexcept;
98    operator T() const volatile noexcept;
99    operator T() const noexcept;
100    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
101    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
102    bool compare_exchange_weak(T& expc, T desr,
103                               memory_order s, memory_order f) volatile noexcept;
104    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
105    bool compare_exchange_strong(T& expc, T desr,
106                                 memory_order s, memory_order f) volatile noexcept;
107    bool compare_exchange_strong(T& expc, T desr,
108                                 memory_order s, memory_order f) noexcept;
109    bool compare_exchange_weak(T& expc, T desr,
110                               memory_order m = memory_order_seq_cst) volatile noexcept;
111    bool compare_exchange_weak(T& expc, T desr,
112                               memory_order m = memory_order_seq_cst) noexcept;
113    bool compare_exchange_strong(T& expc, T desr,
114                                memory_order m = memory_order_seq_cst) volatile noexcept;
115    bool compare_exchange_strong(T& expc, T desr,
116                                 memory_order m = memory_order_seq_cst) noexcept;
117
118    atomic() noexcept = default;
119    constexpr atomic(T desr) noexcept;
120    atomic(const atomic&) = delete;
121    atomic& operator=(const atomic&) = delete;
122    atomic& operator=(const atomic&) volatile = delete;
123    T operator=(T) volatile noexcept;
124    T operator=(T) noexcept;
125};
126
127template <>
128struct atomic<integral>
129{
130    bool is_lock_free() const volatile noexcept;
131    bool is_lock_free() const noexcept;
132    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
135    integral load(memory_order m = memory_order_seq_cst) const noexcept;
136    operator integral() const volatile noexcept;
137    operator integral() const noexcept;
138    integral exchange(integral desr,
139                      memory_order m = memory_order_seq_cst) volatile noexcept;
140    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
141    bool compare_exchange_weak(integral& expc, integral desr,
142                               memory_order s, memory_order f) volatile noexcept;
143    bool compare_exchange_weak(integral& expc, integral desr,
144                               memory_order s, memory_order f) noexcept;
145    bool compare_exchange_strong(integral& expc, integral desr,
146                                 memory_order s, memory_order f) volatile noexcept;
147    bool compare_exchange_strong(integral& expc, integral desr,
148                                 memory_order s, memory_order f) noexcept;
149    bool compare_exchange_weak(integral& expc, integral desr,
150                               memory_order m = memory_order_seq_cst) volatile noexcept;
151    bool compare_exchange_weak(integral& expc, integral desr,
152                               memory_order m = memory_order_seq_cst) noexcept;
153    bool compare_exchange_strong(integral& expc, integral desr,
154                                memory_order m = memory_order_seq_cst) volatile noexcept;
155    bool compare_exchange_strong(integral& expc, integral desr,
156                                 memory_order m = memory_order_seq_cst) noexcept;
157
158    integral
159        fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
161    integral
162        fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
163    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
164    integral
165        fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
167    integral
168        fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
170    integral
171        fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
173
174    atomic() noexcept = default;
175    constexpr atomic(integral desr) noexcept;
176    atomic(const atomic&) = delete;
177    atomic& operator=(const atomic&) = delete;
178    atomic& operator=(const atomic&) volatile = delete;
179    integral operator=(integral desr) volatile noexcept;
180    integral operator=(integral desr) noexcept;
181
182    integral operator++(int) volatile noexcept;
183    integral operator++(int) noexcept;
184    integral operator--(int) volatile noexcept;
185    integral operator--(int) noexcept;
186    integral operator++() volatile noexcept;
187    integral operator++() noexcept;
188    integral operator--() volatile noexcept;
189    integral operator--() noexcept;
190    integral operator+=(integral op) volatile noexcept;
191    integral operator+=(integral op) noexcept;
192    integral operator-=(integral op) volatile noexcept;
193    integral operator-=(integral op) noexcept;
194    integral operator&=(integral op) volatile noexcept;
195    integral operator&=(integral op) noexcept;
196    integral operator|=(integral op) volatile noexcept;
197    integral operator|=(integral op) noexcept;
198    integral operator^=(integral op) volatile noexcept;
199    integral operator^=(integral op) noexcept;
200};
201
202template <class T>
203struct atomic<T*>
204{
205    bool is_lock_free() const volatile noexcept;
206    bool is_lock_free() const noexcept;
207    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
208    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
209    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
210    T* load(memory_order m = memory_order_seq_cst) const noexcept;
211    operator T*() const volatile noexcept;
212    operator T*() const noexcept;
213    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
214    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
215    bool compare_exchange_weak(T*& expc, T* desr,
216                               memory_order s, memory_order f) volatile noexcept;
217    bool compare_exchange_weak(T*& expc, T* desr,
218                               memory_order s, memory_order f) noexcept;
219    bool compare_exchange_strong(T*& expc, T* desr,
220                                 memory_order s, memory_order f) volatile noexcept;
221    bool compare_exchange_strong(T*& expc, T* desr,
222                                 memory_order s, memory_order f) noexcept;
223    bool compare_exchange_weak(T*& expc, T* desr,
224                               memory_order m = memory_order_seq_cst) volatile noexcept;
225    bool compare_exchange_weak(T*& expc, T* desr,
226                               memory_order m = memory_order_seq_cst) noexcept;
227    bool compare_exchange_strong(T*& expc, T* desr,
228                                memory_order m = memory_order_seq_cst) volatile noexcept;
229    bool compare_exchange_strong(T*& expc, T* desr,
230                                 memory_order m = memory_order_seq_cst) noexcept;
231    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
232    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
233    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
234    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
235
236    atomic() noexcept = default;
237    constexpr atomic(T* desr) noexcept;
238    atomic(const atomic&) = delete;
239    atomic& operator=(const atomic&) = delete;
240    atomic& operator=(const atomic&) volatile = delete;
241
242    T* operator=(T*) volatile noexcept;
243    T* operator=(T*) noexcept;
244    T* operator++(int) volatile noexcept;
245    T* operator++(int) noexcept;
246    T* operator--(int) volatile noexcept;
247    T* operator--(int) noexcept;
248    T* operator++() volatile noexcept;
249    T* operator++() noexcept;
250    T* operator--() volatile noexcept;
251    T* operator--() noexcept;
252    T* operator+=(ptrdiff_t op) volatile noexcept;
253    T* operator+=(ptrdiff_t op) noexcept;
254    T* operator-=(ptrdiff_t op) volatile noexcept;
255    T* operator-=(ptrdiff_t op) noexcept;
256};
257
258
259template <class T>
260    bool
261    atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
262
263template <class T>
264    bool
265    atomic_is_lock_free(const atomic<T>* obj) noexcept;
266
267template <class T>
268    void
269    atomic_init(volatile atomic<T>* obj, T desr) noexcept;
270
271template <class T>
272    void
273    atomic_init(atomic<T>* obj, T desr) noexcept;
274
275template <class T>
276    void
277    atomic_store(volatile atomic<T>* obj, T desr) noexcept;
278
279template <class T>
280    void
281    atomic_store(atomic<T>* obj, T desr) noexcept;
282
283template <class T>
284    void
285    atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
286
287template <class T>
288    void
289    atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
290
291template <class T>
292    T
293    atomic_load(const volatile atomic<T>* obj) noexcept;
294
295template <class T>
296    T
297    atomic_load(const atomic<T>* obj) noexcept;
298
299template <class T>
300    T
301    atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
302
303template <class T>
304    T
305    atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
306
307template <class T>
308    T
309    atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
310
311template <class T>
312    T
313    atomic_exchange(atomic<T>* obj, T desr) noexcept;
314
315template <class T>
316    T
317    atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
318
319template <class T>
320    T
321    atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
322
323template <class T>
324    bool
325    atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
326
327template <class T>
328    bool
329    atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
330
331template <class T>
332    bool
333    atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
334
335template <class T>
336    bool
337    atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
338
339template <class T>
340    bool
341    atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
342                                          T desr,
343                                          memory_order s, memory_order f) noexcept;
344
345template <class T>
346    bool
347    atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
348                                          memory_order s, memory_order f) noexcept;
349
350template <class T>
351    bool
352    atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
353                                            T* expc, T desr,
354                                            memory_order s, memory_order f) noexcept;
355
356template <class T>
357    bool
358    atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
359                                            T desr,
360                                            memory_order s, memory_order f) noexcept;
361
362template <class Integral>
363    Integral
364    atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
365
366template <class Integral>
367    Integral
368    atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
369
370template <class Integral>
371    Integral
372    atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
373                              memory_order m) noexcept;
374template <class Integral>
375    Integral
376    atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
377                              memory_order m) noexcept;
378template <class Integral>
379    Integral
380    atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
381
382template <class Integral>
383    Integral
384    atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
385
386template <class Integral>
387    Integral
388    atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
389                              memory_order m) noexcept;
390template <class Integral>
391    Integral
392    atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
393                              memory_order m) noexcept;
394template <class Integral>
395    Integral
396    atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
397
398template <class Integral>
399    Integral
400    atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
401
402template <class Integral>
403    Integral
404    atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
405                              memory_order m) noexcept;
406template <class Integral>
407    Integral
408    atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
409                              memory_order m) noexcept;
410template <class Integral>
411    Integral
412    atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
413
414template <class Integral>
415    Integral
416    atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
417
418template <class Integral>
419    Integral
420    atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
421                             memory_order m) noexcept;
422template <class Integral>
423    Integral
424    atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
425                             memory_order m) noexcept;
426template <class Integral>
427    Integral
428    atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
429
430template <class Integral>
431    Integral
432    atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
433
434template <class Integral>
435    Integral
436    atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
437                              memory_order m) noexcept;
438template <class Integral>
439    Integral
440    atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
441                              memory_order m) noexcept;
442
443template <class T>
444    T*
445    atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
446
447template <class T>
448    T*
449    atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
450
451template <class T>
452    T*
453    atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
454                              memory_order m) noexcept;
455template <class T>
456    T*
457    atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
458
459template <class T>
460    T*
461    atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
462
463template <class T>
464    T*
465    atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
466
467template <class T>
468    T*
469    atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
470                              memory_order m) noexcept;
471template <class T>
472    T*
473    atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
474
475// Atomics for standard typedef types
476
477typedef atomic<bool>               atomic_bool;
478typedef atomic<char>               atomic_char;
479typedef atomic<signed char>        atomic_schar;
480typedef atomic<unsigned char>      atomic_uchar;
481typedef atomic<short>              atomic_short;
482typedef atomic<unsigned short>     atomic_ushort;
483typedef atomic<int>                atomic_int;
484typedef atomic<unsigned int>       atomic_uint;
485typedef atomic<long>               atomic_long;
486typedef atomic<unsigned long>      atomic_ulong;
487typedef atomic<long long>          atomic_llong;
488typedef atomic<unsigned long long> atomic_ullong;
489typedef atomic<char16_t>           atomic_char16_t;
490typedef atomic<char32_t>           atomic_char32_t;
491typedef atomic<wchar_t>            atomic_wchar_t;
492
493typedef atomic<int_least8_t>   atomic_int_least8_t;
494typedef atomic<uint_least8_t>  atomic_uint_least8_t;
495typedef atomic<int_least16_t>  atomic_int_least16_t;
496typedef atomic<uint_least16_t> atomic_uint_least16_t;
497typedef atomic<int_least32_t>  atomic_int_least32_t;
498typedef atomic<uint_least32_t> atomic_uint_least32_t;
499typedef atomic<int_least64_t>  atomic_int_least64_t;
500typedef atomic<uint_least64_t> atomic_uint_least64_t;
501
502typedef atomic<int_fast8_t>   atomic_int_fast8_t;
503typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
504typedef atomic<int_fast16_t>  atomic_int_fast16_t;
505typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
506typedef atomic<int_fast32_t>  atomic_int_fast32_t;
507typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
508typedef atomic<int_fast64_t>  atomic_int_fast64_t;
509typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
510
511typedef atomic<intptr_t>  atomic_intptr_t;
512typedef atomic<uintptr_t> atomic_uintptr_t;
513typedef atomic<size_t>    atomic_size_t;
514typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
515typedef atomic<intmax_t>  atomic_intmax_t;
516typedef atomic<uintmax_t> atomic_uintmax_t;
517
518// fences
519
520void atomic_thread_fence(memory_order m) noexcept;
521void atomic_signal_fence(memory_order m) noexcept;
522
523}  // std
524
525*/
526
527#include <__config>
528#include <cstddef>
529#include <cstdint>
530#include <type_traits>
531
532#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
533#pragma GCC system_header
534#endif
535
536_LIBCPP_BEGIN_NAMESPACE_STD
537
538#if !__has_feature(cxx_atomic) && _GNUC_VER < 407
539#error <atomic> is not implemented
540#else
541
542typedef enum memory_order
543{
544    memory_order_relaxed, memory_order_consume, memory_order_acquire,
545    memory_order_release, memory_order_acq_rel, memory_order_seq_cst
546} memory_order;
547
548#if _GNUC_VER >= 407
549namespace __gcc_atomic {
550template <typename T>
551struct __gcc_atomic_t {
552  __gcc_atomic_t() _NOEXCEPT {}
553  explicit __gcc_atomic_t(T value) _NOEXCEPT : __a_value(value) {}
554  T __a_value;
555};
556#define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
557
558template <typename T> T __create();
559
560template <typename __Tp, typename __Td>
561typename enable_if<sizeof(__Tp()->__a_value = __create<__Td>()), char>::type
562    __test_atomic_assignable(int);
563template <typename T, typename U>
564__two __test_atomic_assignable(...);
565
566template <typename __Tp, typename __Td>
567struct __can_assign {
568  static const bool value =
569      sizeof(__test_atomic_assignable<__Tp, __Td>(1)) == sizeof(char);
570};
571
572static inline constexpr int __to_gcc_order(memory_order __order) {
573  // Avoid switch statement to make this a constexpr.
574  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
575         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
576          (__order == memory_order_release ? __ATOMIC_RELEASE:
577           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
578            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
579              __ATOMIC_CONSUME))));
580}
581
582} // namespace __gcc_atomic
583
584template <typename _Tp>
585static inline
586typename enable_if<
587    __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
588__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
589  __a->__a_value = __val;
590}
591
592template <typename _Tp>
593static inline
594typename enable_if<
595    !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
596     __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
597__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
598  // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
599  // the default operator= in an object is not volatile, a byte-by-byte copy
600  // is required.
601  volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
602  volatile char* end = to + sizeof(_Tp);
603  char* from = reinterpret_cast<char*>(&__val);
604  while (to != end) {
605    *to++ = *from++;
606  }
607}
608
609template <typename _Tp>
610static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
611  __a->__a_value = __val;
612}
613
614static inline void __c11_atomic_thread_fence(memory_order __order) {
615  __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
616}
617
618static inline void __c11_atomic_signal_fence(memory_order __order) {
619  __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
620}
621
622static inline bool __c11_atomic_is_lock_free(size_t __size) {
623  return __atomic_is_lock_free(__size, 0);
624}
625
626template <typename _Tp>
627static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
628                                      memory_order __order) {
629  return __atomic_store(&__a->__a_value, &__val,
630                        __gcc_atomic::__to_gcc_order(__order));
631}
632
633template <typename _Tp>
634static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
635                                      memory_order __order) {
636  return __atomic_store(&__a->__a_value, &__val,
637                        __gcc_atomic::__to_gcc_order(__order));
638}
639
640template <typename _Tp>
641static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
642                                    memory_order __order) {
643  _Tp __ret;
644  __atomic_load(&__a->__a_value, &__ret,
645                __gcc_atomic::__to_gcc_order(__order));
646  return __ret;
647}
648
649template <typename _Tp>
650static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
651  _Tp __ret;
652  __atomic_load(&__a->__a_value, &__ret,
653                __gcc_atomic::__to_gcc_order(__order));
654  return __ret;
655}
656
657template <typename _Tp>
658static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
659                                        _Tp __value, memory_order __order) {
660  _Tp __ret;
661  __atomic_exchange(&__a->__a_value, &__value, &__ret,
662                    __gcc_atomic::__to_gcc_order(__order));
663  return __ret;
664}
665
666template <typename _Tp>
667static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
668                                        memory_order __order) {
669  _Tp __ret;
670  __atomic_exchange(&__a->__a_value, &__value, &__ret,
671                    __gcc_atomic::__to_gcc_order(__order));
672  return __ret;
673}
674
675template <typename _Tp>
676static inline bool __c11_atomic_compare_exchange_strong(
677    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
678    memory_order __success, memory_order __failure) {
679  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
680                                   false,
681                                   __gcc_atomic::__to_gcc_order(__success),
682                                   __gcc_atomic::__to_gcc_order(__failure));
683}
684
685template <typename _Tp>
686static inline bool __c11_atomic_compare_exchange_strong(
687    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
688    memory_order __failure) {
689  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
690                                   false,
691                                   __gcc_atomic::__to_gcc_order(__success),
692                                   __gcc_atomic::__to_gcc_order(__failure));
693}
694
695template <typename _Tp>
696static inline bool __c11_atomic_compare_exchange_weak(
697    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
698    memory_order __success, memory_order __failure) {
699  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
700                                   true,
701                                   __gcc_atomic::__to_gcc_order(__success),
702                                   __gcc_atomic::__to_gcc_order(__failure));
703}
704
705template <typename _Tp>
706static inline bool __c11_atomic_compare_exchange_weak(
707    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
708    memory_order __failure) {
709  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
710                                   true,
711                                   __gcc_atomic::__to_gcc_order(__success),
712                                   __gcc_atomic::__to_gcc_order(__failure));
713}
714
715template <typename _Tp>
716struct __skip_amt { enum {value = 1}; };
717
718template <typename _Tp>
719struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
720
721// FIXME: Haven't figured out what the spec says about using arrays with
722// atomic_fetch_add. Force a failure rather than creating bad behavior.
723template <typename _Tp>
724struct __skip_amt<_Tp[]> { };
725template <typename _Tp, int n>
726struct __skip_amt<_Tp[n]> { };
727
728template <typename _Tp, typename _Td>
729static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
730                                         _Td __delta, memory_order __order) {
731  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
732                            __gcc_atomic::__to_gcc_order(__order));
733}
734
735template <typename _Tp, typename _Td>
736static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
737                                         memory_order __order) {
738  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
739                            __gcc_atomic::__to_gcc_order(__order));
740}
741
742template <typename _Tp, typename _Td>
743static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
744                                         _Td __delta, memory_order __order) {
745  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
746                            __gcc_atomic::__to_gcc_order(__order));
747}
748
749template <typename _Tp, typename _Td>
750static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
751                                         memory_order __order) {
752  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
753                            __gcc_atomic::__to_gcc_order(__order));
754}
755
756template <typename _Tp>
757static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
758                                         _Tp __pattern, memory_order __order) {
759  return __atomic_fetch_and(&__a->__a_value, __pattern,
760                            __gcc_atomic::__to_gcc_order(__order));
761}
762
763template <typename _Tp>
764static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
765                                         _Tp __pattern, memory_order __order) {
766  return __atomic_fetch_and(&__a->__a_value, __pattern,
767                            __gcc_atomic::__to_gcc_order(__order));
768}
769
770template <typename _Tp>
771static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
772                                        _Tp __pattern, memory_order __order) {
773  return __atomic_fetch_or(&__a->__a_value, __pattern,
774                           __gcc_atomic::__to_gcc_order(__order));
775}
776
777template <typename _Tp>
778static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
779                                        memory_order __order) {
780  return __atomic_fetch_or(&__a->__a_value, __pattern,
781                           __gcc_atomic::__to_gcc_order(__order));
782}
783
784template <typename _Tp>
785static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
786                                         _Tp __pattern, memory_order __order) {
787  return __atomic_fetch_xor(&__a->__a_value, __pattern,
788                            __gcc_atomic::__to_gcc_order(__order));
789}
790
791template <typename _Tp>
792static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
793                                         memory_order __order) {
794  return __atomic_fetch_xor(&__a->__a_value, __pattern,
795                            __gcc_atomic::__to_gcc_order(__order));
796}
797#endif // _GNUC_VER >= 407
798
799template <class _Tp>
800inline _LIBCPP_INLINE_VISIBILITY
801_Tp
802kill_dependency(_Tp __y) _NOEXCEPT
803{
804    return __y;
805}
806
807// general atomic<T>
808
809template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
810struct __atomic_base  // false
811{
812    mutable _Atomic(_Tp) __a_;
813
814    _LIBCPP_INLINE_VISIBILITY
815    bool is_lock_free() const volatile _NOEXCEPT
816        {return __c11_atomic_is_lock_free(sizeof(_Tp));}
817    _LIBCPP_INLINE_VISIBILITY
818    bool is_lock_free() const _NOEXCEPT
819        {return __c11_atomic_is_lock_free(sizeof(_Tp));}
820    _LIBCPP_INLINE_VISIBILITY
821    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
822        {__c11_atomic_store(&__a_, __d, __m);}
823    _LIBCPP_INLINE_VISIBILITY
824    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
825        {__c11_atomic_store(&__a_, __d, __m);}
826    _LIBCPP_INLINE_VISIBILITY
827    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
828        {return __c11_atomic_load(&__a_, __m);}
829    _LIBCPP_INLINE_VISIBILITY
830    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
831        {return __c11_atomic_load(&__a_, __m);}
832    _LIBCPP_INLINE_VISIBILITY
833    operator _Tp() const volatile _NOEXCEPT {return load();}
834    _LIBCPP_INLINE_VISIBILITY
835    operator _Tp() const _NOEXCEPT          {return load();}
836    _LIBCPP_INLINE_VISIBILITY
837    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
838        {return __c11_atomic_exchange(&__a_, __d, __m);}
839    _LIBCPP_INLINE_VISIBILITY
840    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
841        {return __c11_atomic_exchange(&__a_, __d, __m);}
842    _LIBCPP_INLINE_VISIBILITY
843    bool compare_exchange_weak(_Tp& __e, _Tp __d,
844                               memory_order __s, memory_order __f) volatile _NOEXCEPT
845        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
846    _LIBCPP_INLINE_VISIBILITY
847    bool compare_exchange_weak(_Tp& __e, _Tp __d,
848                               memory_order __s, memory_order __f) _NOEXCEPT
849        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
850    _LIBCPP_INLINE_VISIBILITY
851    bool compare_exchange_strong(_Tp& __e, _Tp __d,
852                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
853        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
854    _LIBCPP_INLINE_VISIBILITY
855    bool compare_exchange_strong(_Tp& __e, _Tp __d,
856                                 memory_order __s, memory_order __f) _NOEXCEPT
857        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
858    _LIBCPP_INLINE_VISIBILITY
859    bool compare_exchange_weak(_Tp& __e, _Tp __d,
860                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
861        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
862    _LIBCPP_INLINE_VISIBILITY
863    bool compare_exchange_weak(_Tp& __e, _Tp __d,
864                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
865        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
866    _LIBCPP_INLINE_VISIBILITY
867    bool compare_exchange_strong(_Tp& __e, _Tp __d,
868                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
869        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
870    _LIBCPP_INLINE_VISIBILITY
871    bool compare_exchange_strong(_Tp& __e, _Tp __d,
872                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
873        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
874
875    _LIBCPP_INLINE_VISIBILITY
876#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
877    __atomic_base() _NOEXCEPT = default;
878#else
879    __atomic_base() _NOEXCEPT : __a_() {}
880#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
881
882    _LIBCPP_INLINE_VISIBILITY
883    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
884#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
885    __atomic_base(const __atomic_base&) = delete;
886    __atomic_base& operator=(const __atomic_base&) = delete;
887    __atomic_base& operator=(const __atomic_base&) volatile = delete;
888#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
889private:
890    __atomic_base(const __atomic_base&);
891    __atomic_base& operator=(const __atomic_base&);
892    __atomic_base& operator=(const __atomic_base&) volatile;
893#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
894};
895
896// atomic<Integral>
897
898template <class _Tp>
899struct __atomic_base<_Tp, true>
900    : public __atomic_base<_Tp, false>
901{
902    typedef __atomic_base<_Tp, false> __base;
903    _LIBCPP_INLINE_VISIBILITY
904    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
905    _LIBCPP_INLINE_VISIBILITY
906    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
907
908    _LIBCPP_INLINE_VISIBILITY
909    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
910        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
911    _LIBCPP_INLINE_VISIBILITY
912    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
913        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
914    _LIBCPP_INLINE_VISIBILITY
915    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
916        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
917    _LIBCPP_INLINE_VISIBILITY
918    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
919        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
920    _LIBCPP_INLINE_VISIBILITY
921    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
922        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
923    _LIBCPP_INLINE_VISIBILITY
924    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
925        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
926    _LIBCPP_INLINE_VISIBILITY
927    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
928        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
929    _LIBCPP_INLINE_VISIBILITY
930    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
931        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
932    _LIBCPP_INLINE_VISIBILITY
933    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
934        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
935    _LIBCPP_INLINE_VISIBILITY
936    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
937        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
938
939    _LIBCPP_INLINE_VISIBILITY
940    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
941    _LIBCPP_INLINE_VISIBILITY
942    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
943    _LIBCPP_INLINE_VISIBILITY
944    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
945    _LIBCPP_INLINE_VISIBILITY
946    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
947    _LIBCPP_INLINE_VISIBILITY
948    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
949    _LIBCPP_INLINE_VISIBILITY
950    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
951    _LIBCPP_INLINE_VISIBILITY
952    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
953    _LIBCPP_INLINE_VISIBILITY
954    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
955    _LIBCPP_INLINE_VISIBILITY
956    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
957    _LIBCPP_INLINE_VISIBILITY
958    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
959    _LIBCPP_INLINE_VISIBILITY
960    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
961    _LIBCPP_INLINE_VISIBILITY
962    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
963    _LIBCPP_INLINE_VISIBILITY
964    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
965    _LIBCPP_INLINE_VISIBILITY
966    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
967    _LIBCPP_INLINE_VISIBILITY
968    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
969    _LIBCPP_INLINE_VISIBILITY
970    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
971    _LIBCPP_INLINE_VISIBILITY
972    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
973    _LIBCPP_INLINE_VISIBILITY
974    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
975};
976
977// atomic<T>
978
979template <class _Tp>
980struct atomic
981    : public __atomic_base<_Tp>
982{
983    typedef __atomic_base<_Tp> __base;
984    _LIBCPP_INLINE_VISIBILITY
985    atomic() _NOEXCEPT _LIBCPP_DEFAULT
986    _LIBCPP_INLINE_VISIBILITY
987    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
988
989    _LIBCPP_INLINE_VISIBILITY
990    _Tp operator=(_Tp __d) volatile _NOEXCEPT
991        {__base::store(__d); return __d;}
992    _LIBCPP_INLINE_VISIBILITY
993    _Tp operator=(_Tp __d) _NOEXCEPT
994        {__base::store(__d); return __d;}
995};
996
997// atomic<T*>
998
999template <class _Tp>
1000struct atomic<_Tp*>
1001    : public __atomic_base<_Tp*>
1002{
1003    typedef __atomic_base<_Tp*> __base;
1004    _LIBCPP_INLINE_VISIBILITY
1005    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1006    _LIBCPP_INLINE_VISIBILITY
1007    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1008
1009    _LIBCPP_INLINE_VISIBILITY
1010    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1011        {__base::store(__d); return __d;}
1012    _LIBCPP_INLINE_VISIBILITY
1013    _Tp* operator=(_Tp* __d) _NOEXCEPT
1014        {__base::store(__d); return __d;}
1015
1016    _LIBCPP_INLINE_VISIBILITY
1017    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1018                                                                        volatile _NOEXCEPT
1019        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1020    _LIBCPP_INLINE_VISIBILITY
1021    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1022        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1023    _LIBCPP_INLINE_VISIBILITY
1024    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1025                                                                        volatile _NOEXCEPT
1026        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1027    _LIBCPP_INLINE_VISIBILITY
1028    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1029        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1030
1031    _LIBCPP_INLINE_VISIBILITY
1032    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1033    _LIBCPP_INLINE_VISIBILITY
1034    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1035    _LIBCPP_INLINE_VISIBILITY
1036    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1037    _LIBCPP_INLINE_VISIBILITY
1038    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1039    _LIBCPP_INLINE_VISIBILITY
1040    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1041    _LIBCPP_INLINE_VISIBILITY
1042    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1043    _LIBCPP_INLINE_VISIBILITY
1044    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1045    _LIBCPP_INLINE_VISIBILITY
1046    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1047    _LIBCPP_INLINE_VISIBILITY
1048    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1049    _LIBCPP_INLINE_VISIBILITY
1050    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1051    _LIBCPP_INLINE_VISIBILITY
1052    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1053    _LIBCPP_INLINE_VISIBILITY
1054    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1055};
1056
1057// atomic_is_lock_free
1058
1059template <class _Tp>
1060inline _LIBCPP_INLINE_VISIBILITY
1061bool
1062atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1063{
1064    return __o->is_lock_free();
1065}
1066
1067template <class _Tp>
1068inline _LIBCPP_INLINE_VISIBILITY
1069bool
1070atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1071{
1072    return __o->is_lock_free();
1073}
1074
1075// atomic_init
1076
1077template <class _Tp>
1078inline _LIBCPP_INLINE_VISIBILITY
1079void
1080atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1081{
1082    __c11_atomic_init(&__o->__a_, __d);
1083}
1084
1085template <class _Tp>
1086inline _LIBCPP_INLINE_VISIBILITY
1087void
1088atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1089{
1090    __c11_atomic_init(&__o->__a_, __d);
1091}
1092
1093// atomic_store
1094
1095template <class _Tp>
1096inline _LIBCPP_INLINE_VISIBILITY
1097void
1098atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1099{
1100    __o->store(__d);
1101}
1102
1103template <class _Tp>
1104inline _LIBCPP_INLINE_VISIBILITY
1105void
1106atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1107{
1108    __o->store(__d);
1109}
1110
1111// atomic_store_explicit
1112
1113template <class _Tp>
1114inline _LIBCPP_INLINE_VISIBILITY
1115void
1116atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1117{
1118    __o->store(__d, __m);
1119}
1120
1121template <class _Tp>
1122inline _LIBCPP_INLINE_VISIBILITY
1123void
1124atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1125{
1126    __o->store(__d, __m);
1127}
1128
1129// atomic_load
1130
1131template <class _Tp>
1132inline _LIBCPP_INLINE_VISIBILITY
1133_Tp
1134atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1135{
1136    return __o->load();
1137}
1138
1139template <class _Tp>
1140inline _LIBCPP_INLINE_VISIBILITY
1141_Tp
1142atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1143{
1144    return __o->load();
1145}
1146
1147// atomic_load_explicit
1148
1149template <class _Tp>
1150inline _LIBCPP_INLINE_VISIBILITY
1151_Tp
1152atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1153{
1154    return __o->load(__m);
1155}
1156
1157template <class _Tp>
1158inline _LIBCPP_INLINE_VISIBILITY
1159_Tp
1160atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1161{
1162    return __o->load(__m);
1163}
1164
1165// atomic_exchange
1166
1167template <class _Tp>
1168inline _LIBCPP_INLINE_VISIBILITY
1169_Tp
1170atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1171{
1172    return __o->exchange(__d);
1173}
1174
1175template <class _Tp>
1176inline _LIBCPP_INLINE_VISIBILITY
1177_Tp
1178atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1179{
1180    return __o->exchange(__d);
1181}
1182
1183// atomic_exchange_explicit
1184
1185template <class _Tp>
1186inline _LIBCPP_INLINE_VISIBILITY
1187_Tp
1188atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1189{
1190    return __o->exchange(__d, __m);
1191}
1192
1193template <class _Tp>
1194inline _LIBCPP_INLINE_VISIBILITY
1195_Tp
1196atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1197{
1198    return __o->exchange(__d, __m);
1199}
1200
1201// atomic_compare_exchange_weak
1202
1203template <class _Tp>
1204inline _LIBCPP_INLINE_VISIBILITY
1205bool
1206atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1207{
1208    return __o->compare_exchange_weak(*__e, __d);
1209}
1210
1211template <class _Tp>
1212inline _LIBCPP_INLINE_VISIBILITY
1213bool
1214atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1215{
1216    return __o->compare_exchange_weak(*__e, __d);
1217}
1218
1219// atomic_compare_exchange_strong
1220
1221template <class _Tp>
1222inline _LIBCPP_INLINE_VISIBILITY
1223bool
1224atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1225{
1226    return __o->compare_exchange_strong(*__e, __d);
1227}
1228
1229template <class _Tp>
1230inline _LIBCPP_INLINE_VISIBILITY
1231bool
1232atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1233{
1234    return __o->compare_exchange_strong(*__e, __d);
1235}
1236
1237// atomic_compare_exchange_weak_explicit
1238
1239template <class _Tp>
1240inline _LIBCPP_INLINE_VISIBILITY
1241bool
1242atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1243                                      _Tp __d,
1244                                      memory_order __s, memory_order __f) _NOEXCEPT
1245{
1246    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1247}
1248
1249template <class _Tp>
1250inline _LIBCPP_INLINE_VISIBILITY
1251bool
1252atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1253                                      memory_order __s, memory_order __f) _NOEXCEPT
1254{
1255    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1256}
1257
1258// atomic_compare_exchange_strong_explicit
1259
1260template <class _Tp>
1261inline _LIBCPP_INLINE_VISIBILITY
1262bool
1263atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1264                                        _Tp* __e, _Tp __d,
1265                                        memory_order __s, memory_order __f) _NOEXCEPT
1266{
1267    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1268}
1269
1270template <class _Tp>
1271inline _LIBCPP_INLINE_VISIBILITY
1272bool
1273atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1274                                        _Tp __d,
1275                                        memory_order __s, memory_order __f) _NOEXCEPT
1276{
1277    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1278}
1279
1280// atomic_fetch_add
1281
1282template <class _Tp>
1283inline _LIBCPP_INLINE_VISIBILITY
1284typename enable_if
1285<
1286    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1287    _Tp
1288>::type
1289atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1290{
1291    return __o->fetch_add(__op);
1292}
1293
1294template <class _Tp>
1295inline _LIBCPP_INLINE_VISIBILITY
1296typename enable_if
1297<
1298    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1299    _Tp
1300>::type
1301atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1302{
1303    return __o->fetch_add(__op);
1304}
1305
1306template <class _Tp>
1307inline _LIBCPP_INLINE_VISIBILITY
1308_Tp*
1309atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1310{
1311    return __o->fetch_add(__op);
1312}
1313
1314template <class _Tp>
1315inline _LIBCPP_INLINE_VISIBILITY
1316_Tp*
1317atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1318{
1319    return __o->fetch_add(__op);
1320}
1321
1322// atomic_fetch_add_explicit
1323
1324template <class _Tp>
1325inline _LIBCPP_INLINE_VISIBILITY
1326typename enable_if
1327<
1328    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1329    _Tp
1330>::type
1331atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1332{
1333    return __o->fetch_add(__op, __m);
1334}
1335
1336template <class _Tp>
1337inline _LIBCPP_INLINE_VISIBILITY
1338typename enable_if
1339<
1340    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1341    _Tp
1342>::type
1343atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1344{
1345    return __o->fetch_add(__op, __m);
1346}
1347
1348template <class _Tp>
1349inline _LIBCPP_INLINE_VISIBILITY
1350_Tp*
1351atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1352                          memory_order __m) _NOEXCEPT
1353{
1354    return __o->fetch_add(__op, __m);
1355}
1356
1357template <class _Tp>
1358inline _LIBCPP_INLINE_VISIBILITY
1359_Tp*
1360atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1361{
1362    return __o->fetch_add(__op, __m);
1363}
1364
1365// atomic_fetch_sub
1366
1367template <class _Tp>
1368inline _LIBCPP_INLINE_VISIBILITY
1369typename enable_if
1370<
1371    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1372    _Tp
1373>::type
1374atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1375{
1376    return __o->fetch_sub(__op);
1377}
1378
1379template <class _Tp>
1380inline _LIBCPP_INLINE_VISIBILITY
1381typename enable_if
1382<
1383    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1384    _Tp
1385>::type
1386atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1387{
1388    return __o->fetch_sub(__op);
1389}
1390
1391template <class _Tp>
1392inline _LIBCPP_INLINE_VISIBILITY
1393_Tp*
1394atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1395{
1396    return __o->fetch_sub(__op);
1397}
1398
1399template <class _Tp>
1400inline _LIBCPP_INLINE_VISIBILITY
1401_Tp*
1402atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1403{
1404    return __o->fetch_sub(__op);
1405}
1406
1407// atomic_fetch_sub_explicit
1408
1409template <class _Tp>
1410inline _LIBCPP_INLINE_VISIBILITY
1411typename enable_if
1412<
1413    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1414    _Tp
1415>::type
1416atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1417{
1418    return __o->fetch_sub(__op, __m);
1419}
1420
1421template <class _Tp>
1422inline _LIBCPP_INLINE_VISIBILITY
1423typename enable_if
1424<
1425    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1426    _Tp
1427>::type
1428atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1429{
1430    return __o->fetch_sub(__op, __m);
1431}
1432
1433template <class _Tp>
1434inline _LIBCPP_INLINE_VISIBILITY
1435_Tp*
1436atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1437                          memory_order __m) _NOEXCEPT
1438{
1439    return __o->fetch_sub(__op, __m);
1440}
1441
1442template <class _Tp>
1443inline _LIBCPP_INLINE_VISIBILITY
1444_Tp*
1445atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1446{
1447    return __o->fetch_sub(__op, __m);
1448}
1449
1450// atomic_fetch_and
1451
1452template <class _Tp>
1453inline _LIBCPP_INLINE_VISIBILITY
1454typename enable_if
1455<
1456    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1457    _Tp
1458>::type
1459atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1460{
1461    return __o->fetch_and(__op);
1462}
1463
1464template <class _Tp>
1465inline _LIBCPP_INLINE_VISIBILITY
1466typename enable_if
1467<
1468    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1469    _Tp
1470>::type
1471atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1472{
1473    return __o->fetch_and(__op);
1474}
1475
1476// atomic_fetch_and_explicit
1477
1478template <class _Tp>
1479inline _LIBCPP_INLINE_VISIBILITY
1480typename enable_if
1481<
1482    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1483    _Tp
1484>::type
1485atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1486{
1487    return __o->fetch_and(__op, __m);
1488}
1489
1490template <class _Tp>
1491inline _LIBCPP_INLINE_VISIBILITY
1492typename enable_if
1493<
1494    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1495    _Tp
1496>::type
1497atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1498{
1499    return __o->fetch_and(__op, __m);
1500}
1501
1502// atomic_fetch_or
1503
1504template <class _Tp>
1505inline _LIBCPP_INLINE_VISIBILITY
1506typename enable_if
1507<
1508    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1509    _Tp
1510>::type
1511atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1512{
1513    return __o->fetch_or(__op);
1514}
1515
1516template <class _Tp>
1517inline _LIBCPP_INLINE_VISIBILITY
1518typename enable_if
1519<
1520    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1521    _Tp
1522>::type
1523atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1524{
1525    return __o->fetch_or(__op);
1526}
1527
1528// atomic_fetch_or_explicit
1529
1530template <class _Tp>
1531inline _LIBCPP_INLINE_VISIBILITY
1532typename enable_if
1533<
1534    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1535    _Tp
1536>::type
1537atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1538{
1539    return __o->fetch_or(__op, __m);
1540}
1541
1542template <class _Tp>
1543inline _LIBCPP_INLINE_VISIBILITY
1544typename enable_if
1545<
1546    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1547    _Tp
1548>::type
1549atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1550{
1551    return __o->fetch_or(__op, __m);
1552}
1553
1554// atomic_fetch_xor
1555
1556template <class _Tp>
1557inline _LIBCPP_INLINE_VISIBILITY
1558typename enable_if
1559<
1560    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1561    _Tp
1562>::type
1563atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1564{
1565    return __o->fetch_xor(__op);
1566}
1567
1568template <class _Tp>
1569inline _LIBCPP_INLINE_VISIBILITY
1570typename enable_if
1571<
1572    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1573    _Tp
1574>::type
1575atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1576{
1577    return __o->fetch_xor(__op);
1578}
1579
1580// atomic_fetch_xor_explicit
1581
1582template <class _Tp>
1583inline _LIBCPP_INLINE_VISIBILITY
1584typename enable_if
1585<
1586    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1587    _Tp
1588>::type
1589atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1590{
1591    return __o->fetch_xor(__op, __m);
1592}
1593
1594template <class _Tp>
1595inline _LIBCPP_INLINE_VISIBILITY
1596typename enable_if
1597<
1598    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1599    _Tp
1600>::type
1601atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1602{
1603    return __o->fetch_xor(__op, __m);
1604}
1605
1606// flag type and operations
1607
1608typedef struct atomic_flag
1609{
1610    _Atomic(bool) __a_;
1611
1612    _LIBCPP_INLINE_VISIBILITY
1613    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1614        {return __c11_atomic_exchange(&__a_, true, __m);}
1615    _LIBCPP_INLINE_VISIBILITY
1616    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1617        {return __c11_atomic_exchange(&__a_, true, __m);}
1618    _LIBCPP_INLINE_VISIBILITY
1619    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1620        {__c11_atomic_store(&__a_, false, __m);}
1621    _LIBCPP_INLINE_VISIBILITY
1622    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1623        {__c11_atomic_store(&__a_, false, __m);}
1624
1625    _LIBCPP_INLINE_VISIBILITY
1626#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1627    atomic_flag() _NOEXCEPT = default;
1628#else
1629    atomic_flag() _NOEXCEPT : __a_() {}
1630#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1631
1632    _LIBCPP_INLINE_VISIBILITY
1633    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
1634
1635#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1636    atomic_flag(const atomic_flag&) = delete;
1637    atomic_flag& operator=(const atomic_flag&) = delete;
1638    atomic_flag& operator=(const atomic_flag&) volatile = delete;
1639#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1640private:
1641    atomic_flag(const atomic_flag&);
1642    atomic_flag& operator=(const atomic_flag&);
1643    atomic_flag& operator=(const atomic_flag&) volatile;
1644#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1645} atomic_flag;
1646
1647inline _LIBCPP_INLINE_VISIBILITY
1648bool
1649atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1650{
1651    return __o->test_and_set();
1652}
1653
1654inline _LIBCPP_INLINE_VISIBILITY
1655bool
1656atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1657{
1658    return __o->test_and_set();
1659}
1660
1661inline _LIBCPP_INLINE_VISIBILITY
1662bool
1663atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1664{
1665    return __o->test_and_set(__m);
1666}
1667
1668inline _LIBCPP_INLINE_VISIBILITY
1669bool
1670atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1671{
1672    return __o->test_and_set(__m);
1673}
1674
1675inline _LIBCPP_INLINE_VISIBILITY
1676void
1677atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1678{
1679    __o->clear();
1680}
1681
1682inline _LIBCPP_INLINE_VISIBILITY
1683void
1684atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1685{
1686    __o->clear();
1687}
1688
1689inline _LIBCPP_INLINE_VISIBILITY
1690void
1691atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1692{
1693    __o->clear(__m);
1694}
1695
1696inline _LIBCPP_INLINE_VISIBILITY
1697void
1698atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1699{
1700    __o->clear(__m);
1701}
1702
1703// fences
1704
1705inline _LIBCPP_INLINE_VISIBILITY
1706void
1707atomic_thread_fence(memory_order __m) _NOEXCEPT
1708{
1709    __c11_atomic_thread_fence(__m);
1710}
1711
1712inline _LIBCPP_INLINE_VISIBILITY
1713void
1714atomic_signal_fence(memory_order __m) _NOEXCEPT
1715{
1716    __c11_atomic_signal_fence(__m);
1717}
1718
1719// Atomics for standard typedef types
1720
1721typedef atomic<bool>               atomic_bool;
1722typedef atomic<char>               atomic_char;
1723typedef atomic<signed char>        atomic_schar;
1724typedef atomic<unsigned char>      atomic_uchar;
1725typedef atomic<short>              atomic_short;
1726typedef atomic<unsigned short>     atomic_ushort;
1727typedef atomic<int>                atomic_int;
1728typedef atomic<unsigned int>       atomic_uint;
1729typedef atomic<long>               atomic_long;
1730typedef atomic<unsigned long>      atomic_ulong;
1731typedef atomic<long long>          atomic_llong;
1732typedef atomic<unsigned long long> atomic_ullong;
1733typedef atomic<char16_t>           atomic_char16_t;
1734typedef atomic<char32_t>           atomic_char32_t;
1735typedef atomic<wchar_t>            atomic_wchar_t;
1736
1737typedef atomic<int_least8_t>   atomic_int_least8_t;
1738typedef atomic<uint_least8_t>  atomic_uint_least8_t;
1739typedef atomic<int_least16_t>  atomic_int_least16_t;
1740typedef atomic<uint_least16_t> atomic_uint_least16_t;
1741typedef atomic<int_least32_t>  atomic_int_least32_t;
1742typedef atomic<uint_least32_t> atomic_uint_least32_t;
1743typedef atomic<int_least64_t>  atomic_int_least64_t;
1744typedef atomic<uint_least64_t> atomic_uint_least64_t;
1745
1746typedef atomic<int_fast8_t>   atomic_int_fast8_t;
1747typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
1748typedef atomic<int_fast16_t>  atomic_int_fast16_t;
1749typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1750typedef atomic<int_fast32_t>  atomic_int_fast32_t;
1751typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1752typedef atomic<int_fast64_t>  atomic_int_fast64_t;
1753typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1754
1755typedef atomic<intptr_t>  atomic_intptr_t;
1756typedef atomic<uintptr_t> atomic_uintptr_t;
1757typedef atomic<size_t>    atomic_size_t;
1758typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1759typedef atomic<intmax_t>  atomic_intmax_t;
1760typedef atomic<uintmax_t> atomic_uintmax_t;
1761
1762#define ATOMIC_FLAG_INIT {false}
1763#define ATOMIC_VAR_INIT(__v) {__v}
1764
1765// lock-free property
1766
1767#define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1768#define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1769#define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1770#define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1771#define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1772#define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1773#define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1774#define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1775#define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1776#define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1777
1778#endif  //  !__has_feature(cxx_atomic)
1779
1780_LIBCPP_END_NAMESPACE_STD
1781
1782#endif  // _LIBCPP_ATOMIC
1783