• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4//                     The LLVM Compiler Infrastructure
5//
6// This file is distributed under the University of Illinois Open Source
7// License. See LICENSE.TXT for details.
8//
9//===----------------------------------------------------------------------===//
10
11#ifndef _LIBCPP_ATOMIC
12#define _LIBCPP_ATOMIC
13
14/*
15    atomic synopsis
16
17namespace std
18{
19
20// order and consistency
21
22typedef enum memory_order
23{
24    memory_order_relaxed,
25    memory_order_consume,  // load-consume
26    memory_order_acquire,  // load-acquire
27    memory_order_release,  // store-release
28    memory_order_acq_rel,  // store-release load-acquire
29    memory_order_seq_cst   // store-release load-acquire
30} memory_order;
31
32template <class T> T kill_dependency(T y) noexcept;
33
34// lock-free property
35
36#define ATOMIC_BOOL_LOCK_FREE unspecified
37#define ATOMIC_CHAR_LOCK_FREE unspecified
38#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
39#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
40#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
41#define ATOMIC_SHORT_LOCK_FREE unspecified
42#define ATOMIC_INT_LOCK_FREE unspecified
43#define ATOMIC_LONG_LOCK_FREE unspecified
44#define ATOMIC_LLONG_LOCK_FREE unspecified
45#define ATOMIC_POINTER_LOCK_FREE unspecified
46
47// flag type and operations
48
49typedef struct atomic_flag
50{
51    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
52    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
53    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
54    void clear(memory_order m = memory_order_seq_cst) noexcept;
55    atomic_flag()  noexcept = default;
56    atomic_flag(const atomic_flag&) = delete;
57    atomic_flag& operator=(const atomic_flag&) = delete;
58    atomic_flag& operator=(const atomic_flag&) volatile = delete;
59} atomic_flag;
60
61bool
62    atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
63
64bool
65    atomic_flag_test_and_set(atomic_flag* obj) noexcept;
66
67bool
68    atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
69                                      memory_order m) noexcept;
70
71bool
72    atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
73
74void
75    atomic_flag_clear(volatile atomic_flag* obj) noexcept;
76
77void
78    atomic_flag_clear(atomic_flag* obj) noexcept;
79
80void
81    atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
82
83void
84    atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
85
86#define ATOMIC_FLAG_INIT see below
87#define ATOMIC_VAR_INIT(value) see below
88
89template <class T>
90struct atomic
91{
92    bool is_lock_free() const volatile noexcept;
93    bool is_lock_free() const noexcept;
94    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
95    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
96    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
97    T load(memory_order m = memory_order_seq_cst) const noexcept;
98    operator T() const volatile noexcept;
99    operator T() const noexcept;
100    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
101    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
102    bool compare_exchange_weak(T& expc, T desr,
103                               memory_order s, memory_order f) volatile noexcept;
104    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
105    bool compare_exchange_strong(T& expc, T desr,
106                                 memory_order s, memory_order f) volatile noexcept;
107    bool compare_exchange_strong(T& expc, T desr,
108                                 memory_order s, memory_order f) noexcept;
109    bool compare_exchange_weak(T& expc, T desr,
110                               memory_order m = memory_order_seq_cst) volatile noexcept;
111    bool compare_exchange_weak(T& expc, T desr,
112                               memory_order m = memory_order_seq_cst) noexcept;
113    bool compare_exchange_strong(T& expc, T desr,
114                                memory_order m = memory_order_seq_cst) volatile noexcept;
115    bool compare_exchange_strong(T& expc, T desr,
116                                 memory_order m = memory_order_seq_cst) noexcept;
117
118    atomic() noexcept = default;
119    constexpr atomic(T desr) noexcept;
120    atomic(const atomic&) = delete;
121    atomic& operator=(const atomic&) = delete;
122    atomic& operator=(const atomic&) volatile = delete;
123    T operator=(T) volatile noexcept;
124    T operator=(T) noexcept;
125};
126
127template <>
128struct atomic<integral>
129{
130    bool is_lock_free() const volatile noexcept;
131    bool is_lock_free() const noexcept;
132    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
135    integral load(memory_order m = memory_order_seq_cst) const noexcept;
136    operator integral() const volatile noexcept;
137    operator integral() const noexcept;
138    integral exchange(integral desr,
139                      memory_order m = memory_order_seq_cst) volatile noexcept;
140    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
141    bool compare_exchange_weak(integral& expc, integral desr,
142                               memory_order s, memory_order f) volatile noexcept;
143    bool compare_exchange_weak(integral& expc, integral desr,
144                               memory_order s, memory_order f) noexcept;
145    bool compare_exchange_strong(integral& expc, integral desr,
146                                 memory_order s, memory_order f) volatile noexcept;
147    bool compare_exchange_strong(integral& expc, integral desr,
148                                 memory_order s, memory_order f) noexcept;
149    bool compare_exchange_weak(integral& expc, integral desr,
150                               memory_order m = memory_order_seq_cst) volatile noexcept;
151    bool compare_exchange_weak(integral& expc, integral desr,
152                               memory_order m = memory_order_seq_cst) noexcept;
153    bool compare_exchange_strong(integral& expc, integral desr,
154                                memory_order m = memory_order_seq_cst) volatile noexcept;
155    bool compare_exchange_strong(integral& expc, integral desr,
156                                 memory_order m = memory_order_seq_cst) noexcept;
157
158    integral
159        fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
161    integral
162        fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
163    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
164    integral
165        fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
167    integral
168        fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
170    integral
171        fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
173
174    atomic() noexcept = default;
175    constexpr atomic(integral desr) noexcept;
176    atomic(const atomic&) = delete;
177    atomic& operator=(const atomic&) = delete;
178    atomic& operator=(const atomic&) volatile = delete;
179    integral operator=(integral desr) volatile noexcept;
180    integral operator=(integral desr) noexcept;
181
182    integral operator++(int) volatile noexcept;
183    integral operator++(int) noexcept;
184    integral operator--(int) volatile noexcept;
185    integral operator--(int) noexcept;
186    integral operator++() volatile noexcept;
187    integral operator++() noexcept;
188    integral operator--() volatile noexcept;
189    integral operator--() noexcept;
190    integral operator+=(integral op) volatile noexcept;
191    integral operator+=(integral op) noexcept;
192    integral operator-=(integral op) volatile noexcept;
193    integral operator-=(integral op) noexcept;
194    integral operator&=(integral op) volatile noexcept;
195    integral operator&=(integral op) noexcept;
196    integral operator|=(integral op) volatile noexcept;
197    integral operator|=(integral op) noexcept;
198    integral operator^=(integral op) volatile noexcept;
199    integral operator^=(integral op) noexcept;
200};
201
202template <class T>
203struct atomic<T*>
204{
205    bool is_lock_free() const volatile noexcept;
206    bool is_lock_free() const noexcept;
207    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
208    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
209    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
210    T* load(memory_order m = memory_order_seq_cst) const noexcept;
211    operator T*() const volatile noexcept;
212    operator T*() const noexcept;
213    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
214    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
215    bool compare_exchange_weak(T*& expc, T* desr,
216                               memory_order s, memory_order f) volatile noexcept;
217    bool compare_exchange_weak(T*& expc, T* desr,
218                               memory_order s, memory_order f) noexcept;
219    bool compare_exchange_strong(T*& expc, T* desr,
220                                 memory_order s, memory_order f) volatile noexcept;
221    bool compare_exchange_strong(T*& expc, T* desr,
222                                 memory_order s, memory_order f) noexcept;
223    bool compare_exchange_weak(T*& expc, T* desr,
224                               memory_order m = memory_order_seq_cst) volatile noexcept;
225    bool compare_exchange_weak(T*& expc, T* desr,
226                               memory_order m = memory_order_seq_cst) noexcept;
227    bool compare_exchange_strong(T*& expc, T* desr,
228                                memory_order m = memory_order_seq_cst) volatile noexcept;
229    bool compare_exchange_strong(T*& expc, T* desr,
230                                 memory_order m = memory_order_seq_cst) noexcept;
231    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
232    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
233    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
234    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
235
236    atomic() noexcept = default;
237    constexpr atomic(T* desr) noexcept;
238    atomic(const atomic&) = delete;
239    atomic& operator=(const atomic&) = delete;
240    atomic& operator=(const atomic&) volatile = delete;
241
242    T* operator=(T*) volatile noexcept;
243    T* operator=(T*) noexcept;
244    T* operator++(int) volatile noexcept;
245    T* operator++(int) noexcept;
246    T* operator--(int) volatile noexcept;
247    T* operator--(int) noexcept;
248    T* operator++() volatile noexcept;
249    T* operator++() noexcept;
250    T* operator--() volatile noexcept;
251    T* operator--() noexcept;
252    T* operator+=(ptrdiff_t op) volatile noexcept;
253    T* operator+=(ptrdiff_t op) noexcept;
254    T* operator-=(ptrdiff_t op) volatile noexcept;
255    T* operator-=(ptrdiff_t op) noexcept;
256};
257
258
259template <class T>
260    bool
261    atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
262
263template <class T>
264    bool
265    atomic_is_lock_free(const atomic<T>* obj) noexcept;
266
267template <class T>
268    void
269    atomic_init(volatile atomic<T>* obj, T desr) noexcept;
270
271template <class T>
272    void
273    atomic_init(atomic<T>* obj, T desr) noexcept;
274
275template <class T>
276    void
277    atomic_store(volatile atomic<T>* obj, T desr) noexcept;
278
279template <class T>
280    void
281    atomic_store(atomic<T>* obj, T desr) noexcept;
282
283template <class T>
284    void
285    atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
286
287template <class T>
288    void
289    atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
290
291template <class T>
292    T
293    atomic_load(const volatile atomic<T>* obj) noexcept;
294
295template <class T>
296    T
297    atomic_load(const atomic<T>* obj) noexcept;
298
299template <class T>
300    T
301    atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
302
303template <class T>
304    T
305    atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
306
307template <class T>
308    T
309    atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
310
311template <class T>
312    T
313    atomic_exchange(atomic<T>* obj, T desr) noexcept;
314
315template <class T>
316    T
317    atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
318
319template <class T>
320    T
321    atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
322
323template <class T>
324    bool
325    atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
326
327template <class T>
328    bool
329    atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
330
331template <class T>
332    bool
333    atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
334
335template <class T>
336    bool
337    atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
338
339template <class T>
340    bool
341    atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
342                                          T desr,
343                                          memory_order s, memory_order f) noexcept;
344
345template <class T>
346    bool
347    atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
348                                          memory_order s, memory_order f) noexcept;
349
350template <class T>
351    bool
352    atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
353                                            T* expc, T desr,
354                                            memory_order s, memory_order f) noexcept;
355
356template <class T>
357    bool
358    atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
359                                            T desr,
360                                            memory_order s, memory_order f) noexcept;
361
362template <class Integral>
363    Integral
364    atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
365
366template <class Integral>
367    Integral
368    atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
369
370template <class Integral>
371    Integral
372    atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
373                              memory_order m) noexcept;
374template <class Integral>
375    Integral
376    atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
377                              memory_order m) noexcept;
378template <class Integral>
379    Integral
380    atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
381
382template <class Integral>
383    Integral
384    atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
385
386template <class Integral>
387    Integral
388    atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
389                              memory_order m) noexcept;
390template <class Integral>
391    Integral
392    atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
393                              memory_order m) noexcept;
394template <class Integral>
395    Integral
396    atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
397
398template <class Integral>
399    Integral
400    atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
401
402template <class Integral>
403    Integral
404    atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
405                              memory_order m) noexcept;
406template <class Integral>
407    Integral
408    atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
409                              memory_order m) noexcept;
410template <class Integral>
411    Integral
412    atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
413
414template <class Integral>
415    Integral
416    atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
417
418template <class Integral>
419    Integral
420    atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
421                             memory_order m) noexcept;
422template <class Integral>
423    Integral
424    atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
425                             memory_order m) noexcept;
426template <class Integral>
427    Integral
428    atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
429
430template <class Integral>
431    Integral
432    atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
433
434template <class Integral>
435    Integral
436    atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
437                              memory_order m) noexcept;
438template <class Integral>
439    Integral
440    atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
441                              memory_order m) noexcept;
442
443template <class T>
444    T*
445    atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
446
447template <class T>
448    T*
449    atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
450
451template <class T>
452    T*
453    atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
454                              memory_order m) noexcept;
455template <class T>
456    T*
457    atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
458
459template <class T>
460    T*
461    atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
462
463template <class T>
464    T*
465    atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
466
467template <class T>
468    T*
469    atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
470                              memory_order m) noexcept;
471template <class T>
472    T*
473    atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
474
475// Atomics for standard typedef types
476
477typedef atomic<bool>               atomic_bool;
478typedef atomic<char>               atomic_char;
479typedef atomic<signed char>        atomic_schar;
480typedef atomic<unsigned char>      atomic_uchar;
481typedef atomic<short>              atomic_short;
482typedef atomic<unsigned short>     atomic_ushort;
483typedef atomic<int>                atomic_int;
484typedef atomic<unsigned int>       atomic_uint;
485typedef atomic<long>               atomic_long;
486typedef atomic<unsigned long>      atomic_ulong;
487typedef atomic<long long>          atomic_llong;
488typedef atomic<unsigned long long> atomic_ullong;
489typedef atomic<char16_t>           atomic_char16_t;
490typedef atomic<char32_t>           atomic_char32_t;
491typedef atomic<wchar_t>            atomic_wchar_t;
492
493typedef atomic<int_least8_t>   atomic_int_least8_t;
494typedef atomic<uint_least8_t>  atomic_uint_least8_t;
495typedef atomic<int_least16_t>  atomic_int_least16_t;
496typedef atomic<uint_least16_t> atomic_uint_least16_t;
497typedef atomic<int_least32_t>  atomic_int_least32_t;
498typedef atomic<uint_least32_t> atomic_uint_least32_t;
499typedef atomic<int_least64_t>  atomic_int_least64_t;
500typedef atomic<uint_least64_t> atomic_uint_least64_t;
501
502typedef atomic<int_fast8_t>   atomic_int_fast8_t;
503typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
504typedef atomic<int_fast16_t>  atomic_int_fast16_t;
505typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
506typedef atomic<int_fast32_t>  atomic_int_fast32_t;
507typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
508typedef atomic<int_fast64_t>  atomic_int_fast64_t;
509typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
510
511typedef atomic<intptr_t>  atomic_intptr_t;
512typedef atomic<uintptr_t> atomic_uintptr_t;
513typedef atomic<size_t>    atomic_size_t;
514typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
515typedef atomic<intmax_t>  atomic_intmax_t;
516typedef atomic<uintmax_t> atomic_uintmax_t;
517
518// fences
519
520void atomic_thread_fence(memory_order m) noexcept;
521void atomic_signal_fence(memory_order m) noexcept;
522
523}  // std
524
525*/
526
527#include <__config>
528#include <cstddef>
529#include <cstdint>
530#include <type_traits>
531
532#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
533#pragma GCC system_header
534#endif
535
536#ifdef _LIBCPP_HAS_NO_THREADS
537#error <atomic> is not supported on this single threaded system
538#else // !_LIBCPP_HAS_NO_THREADS
539
540_LIBCPP_BEGIN_NAMESPACE_STD
541
542#if !__has_feature(cxx_atomic) && _GNUC_VER < 407
543#error <atomic> is not implemented
544#else
545
546typedef enum memory_order
547{
548    memory_order_relaxed, memory_order_consume, memory_order_acquire,
549    memory_order_release, memory_order_acq_rel, memory_order_seq_cst
550} memory_order;
551
552#if _GNUC_VER >= 407
553namespace __gcc_atomic {
554template <typename _Tp>
555struct __gcc_atomic_t {
556  __gcc_atomic_t() _NOEXCEPT {}
557  _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
558    : __a_value(value) {}
559  _Tp __a_value;
560};
561#define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
562
563template <typename _Tp> _Tp __create();
564
565template <typename _Tp, typename _Td>
566typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
567    __test_atomic_assignable(int);
568template <typename _Tp, typename _Up>
569__two __test_atomic_assignable(...);
570
571template <typename _Tp, typename _Td>
572struct __can_assign {
573  static const bool value =
574      sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
575};
576
577static inline constexpr int __to_gcc_order(memory_order __order) {
578  // Avoid switch statement to make this a constexpr.
579  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
580         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
581          (__order == memory_order_release ? __ATOMIC_RELEASE:
582           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
583            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
584              __ATOMIC_CONSUME))));
585}
586
587static inline constexpr int __to_gcc_failure_order(memory_order __order) {
588  // Avoid switch statement to make this a constexpr.
589  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
590         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
591          (__order == memory_order_release ? __ATOMIC_RELAXED:
592           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
593            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
594              __ATOMIC_CONSUME))));
595}
596
597} // namespace __gcc_atomic
598
599template <typename _Tp>
600static inline
601typename enable_if<
602    __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
603__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
604  __a->__a_value = __val;
605}
606
607template <typename _Tp>
608static inline
609typename enable_if<
610    !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
611     __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
612__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
613  // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
614  // the default operator= in an object is not volatile, a byte-by-byte copy
615  // is required.
616  volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
617  volatile char* end = to + sizeof(_Tp);
618  char* from = reinterpret_cast<char*>(&__val);
619  while (to != end) {
620    *to++ = *from++;
621  }
622}
623
624template <typename _Tp>
625static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
626  __a->__a_value = __val;
627}
628
629static inline void __c11_atomic_thread_fence(memory_order __order) {
630  __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
631}
632
633static inline void __c11_atomic_signal_fence(memory_order __order) {
634  __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
635}
636
637template <typename _Tp>
638static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
639                                      memory_order __order) {
640  return __atomic_store(&__a->__a_value, &__val,
641                        __gcc_atomic::__to_gcc_order(__order));
642}
643
644template <typename _Tp>
645static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
646                                      memory_order __order) {
647  __atomic_store(&__a->__a_value, &__val,
648                 __gcc_atomic::__to_gcc_order(__order));
649}
650
651template <typename _Tp>
652static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
653                                    memory_order __order) {
654  _Tp __ret;
655  __atomic_load(&__a->__a_value, &__ret,
656                __gcc_atomic::__to_gcc_order(__order));
657  return __ret;
658}
659
660template <typename _Tp>
661static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
662  _Tp __ret;
663  __atomic_load(&__a->__a_value, &__ret,
664                __gcc_atomic::__to_gcc_order(__order));
665  return __ret;
666}
667
668template <typename _Tp>
669static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
670                                        _Tp __value, memory_order __order) {
671  _Tp __ret;
672  __atomic_exchange(&__a->__a_value, &__value, &__ret,
673                    __gcc_atomic::__to_gcc_order(__order));
674  return __ret;
675}
676
677template <typename _Tp>
678static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
679                                        memory_order __order) {
680  _Tp __ret;
681  __atomic_exchange(&__a->__a_value, &__value, &__ret,
682                    __gcc_atomic::__to_gcc_order(__order));
683  return __ret;
684}
685
686template <typename _Tp>
687static inline bool __c11_atomic_compare_exchange_strong(
688    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
689    memory_order __success, memory_order __failure) {
690  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
691                                   false,
692                                   __gcc_atomic::__to_gcc_order(__success),
693                                   __gcc_atomic::__to_gcc_failure_order(__failure));
694}
695
696template <typename _Tp>
697static inline bool __c11_atomic_compare_exchange_strong(
698    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
699    memory_order __failure) {
700  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
701                                   false,
702                                   __gcc_atomic::__to_gcc_order(__success),
703                                   __gcc_atomic::__to_gcc_failure_order(__failure));
704}
705
706template <typename _Tp>
707static inline bool __c11_atomic_compare_exchange_weak(
708    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
709    memory_order __success, memory_order __failure) {
710  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
711                                   true,
712                                   __gcc_atomic::__to_gcc_order(__success),
713                                   __gcc_atomic::__to_gcc_failure_order(__failure));
714}
715
716template <typename _Tp>
717static inline bool __c11_atomic_compare_exchange_weak(
718    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
719    memory_order __failure) {
720  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
721                                   true,
722                                   __gcc_atomic::__to_gcc_order(__success),
723                                   __gcc_atomic::__to_gcc_failure_order(__failure));
724}
725
726template <typename _Tp>
727struct __skip_amt { enum {value = 1}; };
728
729template <typename _Tp>
730struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
731
732// FIXME: Haven't figured out what the spec says about using arrays with
733// atomic_fetch_add. Force a failure rather than creating bad behavior.
734template <typename _Tp>
735struct __skip_amt<_Tp[]> { };
736template <typename _Tp, int n>
737struct __skip_amt<_Tp[n]> { };
738
739template <typename _Tp, typename _Td>
740static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
741                                         _Td __delta, memory_order __order) {
742  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
743                            __gcc_atomic::__to_gcc_order(__order));
744}
745
746template <typename _Tp, typename _Td>
747static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
748                                         memory_order __order) {
749  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
750                            __gcc_atomic::__to_gcc_order(__order));
751}
752
753template <typename _Tp, typename _Td>
754static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
755                                         _Td __delta, memory_order __order) {
756  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
757                            __gcc_atomic::__to_gcc_order(__order));
758}
759
760template <typename _Tp, typename _Td>
761static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
762                                         memory_order __order) {
763  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
764                            __gcc_atomic::__to_gcc_order(__order));
765}
766
767template <typename _Tp>
768static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
769                                         _Tp __pattern, memory_order __order) {
770  return __atomic_fetch_and(&__a->__a_value, __pattern,
771                            __gcc_atomic::__to_gcc_order(__order));
772}
773
774template <typename _Tp>
775static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
776                                         _Tp __pattern, memory_order __order) {
777  return __atomic_fetch_and(&__a->__a_value, __pattern,
778                            __gcc_atomic::__to_gcc_order(__order));
779}
780
781template <typename _Tp>
782static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
783                                        _Tp __pattern, memory_order __order) {
784  return __atomic_fetch_or(&__a->__a_value, __pattern,
785                           __gcc_atomic::__to_gcc_order(__order));
786}
787
788template <typename _Tp>
789static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
790                                        memory_order __order) {
791  return __atomic_fetch_or(&__a->__a_value, __pattern,
792                           __gcc_atomic::__to_gcc_order(__order));
793}
794
795template <typename _Tp>
796static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
797                                         _Tp __pattern, memory_order __order) {
798  return __atomic_fetch_xor(&__a->__a_value, __pattern,
799                            __gcc_atomic::__to_gcc_order(__order));
800}
801
802template <typename _Tp>
803static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
804                                         memory_order __order) {
805  return __atomic_fetch_xor(&__a->__a_value, __pattern,
806                            __gcc_atomic::__to_gcc_order(__order));
807}
808#endif // _GNUC_VER >= 407
809
810template <class _Tp>
811inline _LIBCPP_INLINE_VISIBILITY
812_Tp
813kill_dependency(_Tp __y) _NOEXCEPT
814{
815    return __y;
816}
817
818// general atomic<T>
819
820template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
821struct __atomic_base  // false
822{
823    mutable _Atomic(_Tp) __a_;
824
825    _LIBCPP_INLINE_VISIBILITY
826    bool is_lock_free() const volatile _NOEXCEPT
827    {
828#if __has_feature(cxx_atomic)
829    return __c11_atomic_is_lock_free(sizeof(_Tp));
830#else
831    return __atomic_is_lock_free(sizeof(_Tp), 0);
832#endif
833    }
834    _LIBCPP_INLINE_VISIBILITY
835    bool is_lock_free() const _NOEXCEPT
836        {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
837    _LIBCPP_INLINE_VISIBILITY
838    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
839        {__c11_atomic_store(&__a_, __d, __m);}
840    _LIBCPP_INLINE_VISIBILITY
841    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
842        {__c11_atomic_store(&__a_, __d, __m);}
843    _LIBCPP_INLINE_VISIBILITY
844    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
845        {return __c11_atomic_load(&__a_, __m);}
846    _LIBCPP_INLINE_VISIBILITY
847    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
848        {return __c11_atomic_load(&__a_, __m);}
849    _LIBCPP_INLINE_VISIBILITY
850    operator _Tp() const volatile _NOEXCEPT {return load();}
851    _LIBCPP_INLINE_VISIBILITY
852    operator _Tp() const _NOEXCEPT          {return load();}
853    _LIBCPP_INLINE_VISIBILITY
854    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
855        {return __c11_atomic_exchange(&__a_, __d, __m);}
856    _LIBCPP_INLINE_VISIBILITY
857    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
858        {return __c11_atomic_exchange(&__a_, __d, __m);}
859    _LIBCPP_INLINE_VISIBILITY
860    bool compare_exchange_weak(_Tp& __e, _Tp __d,
861                               memory_order __s, memory_order __f) volatile _NOEXCEPT
862        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
863    _LIBCPP_INLINE_VISIBILITY
864    bool compare_exchange_weak(_Tp& __e, _Tp __d,
865                               memory_order __s, memory_order __f) _NOEXCEPT
866        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
867    _LIBCPP_INLINE_VISIBILITY
868    bool compare_exchange_strong(_Tp& __e, _Tp __d,
869                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
870        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
871    _LIBCPP_INLINE_VISIBILITY
872    bool compare_exchange_strong(_Tp& __e, _Tp __d,
873                                 memory_order __s, memory_order __f) _NOEXCEPT
874        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
875    _LIBCPP_INLINE_VISIBILITY
876    bool compare_exchange_weak(_Tp& __e, _Tp __d,
877                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
878        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
879    _LIBCPP_INLINE_VISIBILITY
880    bool compare_exchange_weak(_Tp& __e, _Tp __d,
881                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
882        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
883    _LIBCPP_INLINE_VISIBILITY
884    bool compare_exchange_strong(_Tp& __e, _Tp __d,
885                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
886        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
887    _LIBCPP_INLINE_VISIBILITY
888    bool compare_exchange_strong(_Tp& __e, _Tp __d,
889                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
890        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
891
892    _LIBCPP_INLINE_VISIBILITY
893#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
894    __atomic_base() _NOEXCEPT = default;
895#else
896    __atomic_base() _NOEXCEPT : __a_() {}
897#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
898
899    _LIBCPP_INLINE_VISIBILITY
900    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
901#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
902    __atomic_base(const __atomic_base&) = delete;
903    __atomic_base& operator=(const __atomic_base&) = delete;
904    __atomic_base& operator=(const __atomic_base&) volatile = delete;
905#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
906private:
907    __atomic_base(const __atomic_base&);
908    __atomic_base& operator=(const __atomic_base&);
909    __atomic_base& operator=(const __atomic_base&) volatile;
910#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
911};
912
913// atomic<Integral>
914
915template <class _Tp>
916struct __atomic_base<_Tp, true>
917    : public __atomic_base<_Tp, false>
918{
919    typedef __atomic_base<_Tp, false> __base;
920    _LIBCPP_INLINE_VISIBILITY
921    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
922    _LIBCPP_INLINE_VISIBILITY
923    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
924
925    _LIBCPP_INLINE_VISIBILITY
926    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
927        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
928    _LIBCPP_INLINE_VISIBILITY
929    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
930        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
931    _LIBCPP_INLINE_VISIBILITY
932    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
933        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
934    _LIBCPP_INLINE_VISIBILITY
935    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
936        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
937    _LIBCPP_INLINE_VISIBILITY
938    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
939        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
940    _LIBCPP_INLINE_VISIBILITY
941    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
942        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
943    _LIBCPP_INLINE_VISIBILITY
944    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
945        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
946    _LIBCPP_INLINE_VISIBILITY
947    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
948        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
949    _LIBCPP_INLINE_VISIBILITY
950    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
951        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
952    _LIBCPP_INLINE_VISIBILITY
953    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
954        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
955
956    _LIBCPP_INLINE_VISIBILITY
957    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
958    _LIBCPP_INLINE_VISIBILITY
959    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
960    _LIBCPP_INLINE_VISIBILITY
961    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
962    _LIBCPP_INLINE_VISIBILITY
963    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
964    _LIBCPP_INLINE_VISIBILITY
965    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
966    _LIBCPP_INLINE_VISIBILITY
967    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
968    _LIBCPP_INLINE_VISIBILITY
969    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
970    _LIBCPP_INLINE_VISIBILITY
971    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
972    _LIBCPP_INLINE_VISIBILITY
973    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
974    _LIBCPP_INLINE_VISIBILITY
975    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
976    _LIBCPP_INLINE_VISIBILITY
977    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
978    _LIBCPP_INLINE_VISIBILITY
979    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
980    _LIBCPP_INLINE_VISIBILITY
981    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
982    _LIBCPP_INLINE_VISIBILITY
983    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
984    _LIBCPP_INLINE_VISIBILITY
985    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
986    _LIBCPP_INLINE_VISIBILITY
987    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
988    _LIBCPP_INLINE_VISIBILITY
989    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
990    _LIBCPP_INLINE_VISIBILITY
991    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
992};
993
994// atomic<T>
995
996template <class _Tp>
997struct atomic
998    : public __atomic_base<_Tp>
999{
1000    typedef __atomic_base<_Tp> __base;
1001    _LIBCPP_INLINE_VISIBILITY
1002    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1003    _LIBCPP_INLINE_VISIBILITY
1004    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1005
1006    _LIBCPP_INLINE_VISIBILITY
1007    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1008        {__base::store(__d); return __d;}
1009    _LIBCPP_INLINE_VISIBILITY
1010    _Tp operator=(_Tp __d) _NOEXCEPT
1011        {__base::store(__d); return __d;}
1012};
1013
1014// atomic<T*>
1015
1016template <class _Tp>
1017struct atomic<_Tp*>
1018    : public __atomic_base<_Tp*>
1019{
1020    typedef __atomic_base<_Tp*> __base;
1021    _LIBCPP_INLINE_VISIBILITY
1022    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1023    _LIBCPP_INLINE_VISIBILITY
1024    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1025
1026    _LIBCPP_INLINE_VISIBILITY
1027    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1028        {__base::store(__d); return __d;}
1029    _LIBCPP_INLINE_VISIBILITY
1030    _Tp* operator=(_Tp* __d) _NOEXCEPT
1031        {__base::store(__d); return __d;}
1032
1033    _LIBCPP_INLINE_VISIBILITY
1034    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1035                                                                        volatile _NOEXCEPT
1036        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1037    _LIBCPP_INLINE_VISIBILITY
1038    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1039        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1040    _LIBCPP_INLINE_VISIBILITY
1041    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1042                                                                        volatile _NOEXCEPT
1043        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1044    _LIBCPP_INLINE_VISIBILITY
1045    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1046        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1047
1048    _LIBCPP_INLINE_VISIBILITY
1049    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1050    _LIBCPP_INLINE_VISIBILITY
1051    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1052    _LIBCPP_INLINE_VISIBILITY
1053    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1054    _LIBCPP_INLINE_VISIBILITY
1055    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1056    _LIBCPP_INLINE_VISIBILITY
1057    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1058    _LIBCPP_INLINE_VISIBILITY
1059    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1060    _LIBCPP_INLINE_VISIBILITY
1061    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1062    _LIBCPP_INLINE_VISIBILITY
1063    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1064    _LIBCPP_INLINE_VISIBILITY
1065    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1066    _LIBCPP_INLINE_VISIBILITY
1067    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1068    _LIBCPP_INLINE_VISIBILITY
1069    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1070    _LIBCPP_INLINE_VISIBILITY
1071    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1072};
1073
1074// atomic_is_lock_free
1075
1076template <class _Tp>
1077inline _LIBCPP_INLINE_VISIBILITY
1078bool
1079atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1080{
1081    return __o->is_lock_free();
1082}
1083
1084template <class _Tp>
1085inline _LIBCPP_INLINE_VISIBILITY
1086bool
1087atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1088{
1089    return __o->is_lock_free();
1090}
1091
1092// atomic_init
1093
1094template <class _Tp>
1095inline _LIBCPP_INLINE_VISIBILITY
1096void
1097atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1098{
1099    __c11_atomic_init(&__o->__a_, __d);
1100}
1101
1102template <class _Tp>
1103inline _LIBCPP_INLINE_VISIBILITY
1104void
1105atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1106{
1107    __c11_atomic_init(&__o->__a_, __d);
1108}
1109
1110// atomic_store
1111
1112template <class _Tp>
1113inline _LIBCPP_INLINE_VISIBILITY
1114void
1115atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1116{
1117    __o->store(__d);
1118}
1119
1120template <class _Tp>
1121inline _LIBCPP_INLINE_VISIBILITY
1122void
1123atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1124{
1125    __o->store(__d);
1126}
1127
1128// atomic_store_explicit
1129
1130template <class _Tp>
1131inline _LIBCPP_INLINE_VISIBILITY
1132void
1133atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1134{
1135    __o->store(__d, __m);
1136}
1137
1138template <class _Tp>
1139inline _LIBCPP_INLINE_VISIBILITY
1140void
1141atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1142{
1143    __o->store(__d, __m);
1144}
1145
1146// atomic_load
1147
1148template <class _Tp>
1149inline _LIBCPP_INLINE_VISIBILITY
1150_Tp
1151atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1152{
1153    return __o->load();
1154}
1155
1156template <class _Tp>
1157inline _LIBCPP_INLINE_VISIBILITY
1158_Tp
1159atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1160{
1161    return __o->load();
1162}
1163
1164// atomic_load_explicit
1165
1166template <class _Tp>
1167inline _LIBCPP_INLINE_VISIBILITY
1168_Tp
1169atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1170{
1171    return __o->load(__m);
1172}
1173
1174template <class _Tp>
1175inline _LIBCPP_INLINE_VISIBILITY
1176_Tp
1177atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1178{
1179    return __o->load(__m);
1180}
1181
1182// atomic_exchange
1183
1184template <class _Tp>
1185inline _LIBCPP_INLINE_VISIBILITY
1186_Tp
1187atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1188{
1189    return __o->exchange(__d);
1190}
1191
1192template <class _Tp>
1193inline _LIBCPP_INLINE_VISIBILITY
1194_Tp
1195atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1196{
1197    return __o->exchange(__d);
1198}
1199
1200// atomic_exchange_explicit
1201
1202template <class _Tp>
1203inline _LIBCPP_INLINE_VISIBILITY
1204_Tp
1205atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1206{
1207    return __o->exchange(__d, __m);
1208}
1209
1210template <class _Tp>
1211inline _LIBCPP_INLINE_VISIBILITY
1212_Tp
1213atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1214{
1215    return __o->exchange(__d, __m);
1216}
1217
1218// atomic_compare_exchange_weak
1219
1220template <class _Tp>
1221inline _LIBCPP_INLINE_VISIBILITY
1222bool
1223atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1224{
1225    return __o->compare_exchange_weak(*__e, __d);
1226}
1227
1228template <class _Tp>
1229inline _LIBCPP_INLINE_VISIBILITY
1230bool
1231atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1232{
1233    return __o->compare_exchange_weak(*__e, __d);
1234}
1235
1236// atomic_compare_exchange_strong
1237
1238template <class _Tp>
1239inline _LIBCPP_INLINE_VISIBILITY
1240bool
1241atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1242{
1243    return __o->compare_exchange_strong(*__e, __d);
1244}
1245
1246template <class _Tp>
1247inline _LIBCPP_INLINE_VISIBILITY
1248bool
1249atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1250{
1251    return __o->compare_exchange_strong(*__e, __d);
1252}
1253
1254// atomic_compare_exchange_weak_explicit
1255
1256template <class _Tp>
1257inline _LIBCPP_INLINE_VISIBILITY
1258bool
1259atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1260                                      _Tp __d,
1261                                      memory_order __s, memory_order __f) _NOEXCEPT
1262{
1263    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1264}
1265
1266template <class _Tp>
1267inline _LIBCPP_INLINE_VISIBILITY
1268bool
1269atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1270                                      memory_order __s, memory_order __f) _NOEXCEPT
1271{
1272    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1273}
1274
1275// atomic_compare_exchange_strong_explicit
1276
1277template <class _Tp>
1278inline _LIBCPP_INLINE_VISIBILITY
1279bool
1280atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1281                                        _Tp* __e, _Tp __d,
1282                                        memory_order __s, memory_order __f) _NOEXCEPT
1283{
1284    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1285}
1286
1287template <class _Tp>
1288inline _LIBCPP_INLINE_VISIBILITY
1289bool
1290atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1291                                        _Tp __d,
1292                                        memory_order __s, memory_order __f) _NOEXCEPT
1293{
1294    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1295}
1296
1297// atomic_fetch_add
1298
1299template <class _Tp>
1300inline _LIBCPP_INLINE_VISIBILITY
1301typename enable_if
1302<
1303    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1304    _Tp
1305>::type
1306atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1307{
1308    return __o->fetch_add(__op);
1309}
1310
1311template <class _Tp>
1312inline _LIBCPP_INLINE_VISIBILITY
1313typename enable_if
1314<
1315    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1316    _Tp
1317>::type
1318atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1319{
1320    return __o->fetch_add(__op);
1321}
1322
1323template <class _Tp>
1324inline _LIBCPP_INLINE_VISIBILITY
1325_Tp*
1326atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1327{
1328    return __o->fetch_add(__op);
1329}
1330
1331template <class _Tp>
1332inline _LIBCPP_INLINE_VISIBILITY
1333_Tp*
1334atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1335{
1336    return __o->fetch_add(__op);
1337}
1338
1339// atomic_fetch_add_explicit
1340
1341template <class _Tp>
1342inline _LIBCPP_INLINE_VISIBILITY
1343typename enable_if
1344<
1345    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1346    _Tp
1347>::type
1348atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1349{
1350    return __o->fetch_add(__op, __m);
1351}
1352
1353template <class _Tp>
1354inline _LIBCPP_INLINE_VISIBILITY
1355typename enable_if
1356<
1357    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1358    _Tp
1359>::type
1360atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1361{
1362    return __o->fetch_add(__op, __m);
1363}
1364
1365template <class _Tp>
1366inline _LIBCPP_INLINE_VISIBILITY
1367_Tp*
1368atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1369                          memory_order __m) _NOEXCEPT
1370{
1371    return __o->fetch_add(__op, __m);
1372}
1373
1374template <class _Tp>
1375inline _LIBCPP_INLINE_VISIBILITY
1376_Tp*
1377atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1378{
1379    return __o->fetch_add(__op, __m);
1380}
1381
1382// atomic_fetch_sub
1383
1384template <class _Tp>
1385inline _LIBCPP_INLINE_VISIBILITY
1386typename enable_if
1387<
1388    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1389    _Tp
1390>::type
1391atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1392{
1393    return __o->fetch_sub(__op);
1394}
1395
1396template <class _Tp>
1397inline _LIBCPP_INLINE_VISIBILITY
1398typename enable_if
1399<
1400    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1401    _Tp
1402>::type
1403atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1404{
1405    return __o->fetch_sub(__op);
1406}
1407
1408template <class _Tp>
1409inline _LIBCPP_INLINE_VISIBILITY
1410_Tp*
1411atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1412{
1413    return __o->fetch_sub(__op);
1414}
1415
1416template <class _Tp>
1417inline _LIBCPP_INLINE_VISIBILITY
1418_Tp*
1419atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1420{
1421    return __o->fetch_sub(__op);
1422}
1423
1424// atomic_fetch_sub_explicit
1425
1426template <class _Tp>
1427inline _LIBCPP_INLINE_VISIBILITY
1428typename enable_if
1429<
1430    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1431    _Tp
1432>::type
1433atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1434{
1435    return __o->fetch_sub(__op, __m);
1436}
1437
1438template <class _Tp>
1439inline _LIBCPP_INLINE_VISIBILITY
1440typename enable_if
1441<
1442    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1443    _Tp
1444>::type
1445atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1446{
1447    return __o->fetch_sub(__op, __m);
1448}
1449
1450template <class _Tp>
1451inline _LIBCPP_INLINE_VISIBILITY
1452_Tp*
1453atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1454                          memory_order __m) _NOEXCEPT
1455{
1456    return __o->fetch_sub(__op, __m);
1457}
1458
1459template <class _Tp>
1460inline _LIBCPP_INLINE_VISIBILITY
1461_Tp*
1462atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1463{
1464    return __o->fetch_sub(__op, __m);
1465}
1466
1467// atomic_fetch_and
1468
1469template <class _Tp>
1470inline _LIBCPP_INLINE_VISIBILITY
1471typename enable_if
1472<
1473    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1474    _Tp
1475>::type
1476atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1477{
1478    return __o->fetch_and(__op);
1479}
1480
1481template <class _Tp>
1482inline _LIBCPP_INLINE_VISIBILITY
1483typename enable_if
1484<
1485    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1486    _Tp
1487>::type
1488atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1489{
1490    return __o->fetch_and(__op);
1491}
1492
1493// atomic_fetch_and_explicit
1494
1495template <class _Tp>
1496inline _LIBCPP_INLINE_VISIBILITY
1497typename enable_if
1498<
1499    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1500    _Tp
1501>::type
1502atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1503{
1504    return __o->fetch_and(__op, __m);
1505}
1506
1507template <class _Tp>
1508inline _LIBCPP_INLINE_VISIBILITY
1509typename enable_if
1510<
1511    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1512    _Tp
1513>::type
1514atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1515{
1516    return __o->fetch_and(__op, __m);
1517}
1518
1519// atomic_fetch_or
1520
1521template <class _Tp>
1522inline _LIBCPP_INLINE_VISIBILITY
1523typename enable_if
1524<
1525    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1526    _Tp
1527>::type
1528atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1529{
1530    return __o->fetch_or(__op);
1531}
1532
1533template <class _Tp>
1534inline _LIBCPP_INLINE_VISIBILITY
1535typename enable_if
1536<
1537    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1538    _Tp
1539>::type
1540atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1541{
1542    return __o->fetch_or(__op);
1543}
1544
1545// atomic_fetch_or_explicit
1546
1547template <class _Tp>
1548inline _LIBCPP_INLINE_VISIBILITY
1549typename enable_if
1550<
1551    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1552    _Tp
1553>::type
1554atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1555{
1556    return __o->fetch_or(__op, __m);
1557}
1558
1559template <class _Tp>
1560inline _LIBCPP_INLINE_VISIBILITY
1561typename enable_if
1562<
1563    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1564    _Tp
1565>::type
1566atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1567{
1568    return __o->fetch_or(__op, __m);
1569}
1570
1571// atomic_fetch_xor
1572
1573template <class _Tp>
1574inline _LIBCPP_INLINE_VISIBILITY
1575typename enable_if
1576<
1577    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1578    _Tp
1579>::type
1580atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1581{
1582    return __o->fetch_xor(__op);
1583}
1584
1585template <class _Tp>
1586inline _LIBCPP_INLINE_VISIBILITY
1587typename enable_if
1588<
1589    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1590    _Tp
1591>::type
1592atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1593{
1594    return __o->fetch_xor(__op);
1595}
1596
1597// atomic_fetch_xor_explicit
1598
1599template <class _Tp>
1600inline _LIBCPP_INLINE_VISIBILITY
1601typename enable_if
1602<
1603    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1604    _Tp
1605>::type
1606atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1607{
1608    return __o->fetch_xor(__op, __m);
1609}
1610
1611template <class _Tp>
1612inline _LIBCPP_INLINE_VISIBILITY
1613typename enable_if
1614<
1615    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1616    _Tp
1617>::type
1618atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1619{
1620    return __o->fetch_xor(__op, __m);
1621}
1622
1623// flag type and operations
1624
1625typedef struct atomic_flag
1626{
1627    _Atomic(bool) __a_;
1628
1629    _LIBCPP_INLINE_VISIBILITY
1630    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1631        {return __c11_atomic_exchange(&__a_, true, __m);}
1632    _LIBCPP_INLINE_VISIBILITY
1633    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1634        {return __c11_atomic_exchange(&__a_, true, __m);}
1635    _LIBCPP_INLINE_VISIBILITY
1636    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1637        {__c11_atomic_store(&__a_, false, __m);}
1638    _LIBCPP_INLINE_VISIBILITY
1639    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1640        {__c11_atomic_store(&__a_, false, __m);}
1641
1642    _LIBCPP_INLINE_VISIBILITY
1643#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1644    atomic_flag() _NOEXCEPT = default;
1645#else
1646    atomic_flag() _NOEXCEPT : __a_() {}
1647#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1648
1649    _LIBCPP_INLINE_VISIBILITY
1650    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
1651
1652#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1653    atomic_flag(const atomic_flag&) = delete;
1654    atomic_flag& operator=(const atomic_flag&) = delete;
1655    atomic_flag& operator=(const atomic_flag&) volatile = delete;
1656#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1657private:
1658    atomic_flag(const atomic_flag&);
1659    atomic_flag& operator=(const atomic_flag&);
1660    atomic_flag& operator=(const atomic_flag&) volatile;
1661#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1662} atomic_flag;
1663
1664inline _LIBCPP_INLINE_VISIBILITY
1665bool
1666atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1667{
1668    return __o->test_and_set();
1669}
1670
1671inline _LIBCPP_INLINE_VISIBILITY
1672bool
1673atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1674{
1675    return __o->test_and_set();
1676}
1677
1678inline _LIBCPP_INLINE_VISIBILITY
1679bool
1680atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1681{
1682    return __o->test_and_set(__m);
1683}
1684
1685inline _LIBCPP_INLINE_VISIBILITY
1686bool
1687atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1688{
1689    return __o->test_and_set(__m);
1690}
1691
1692inline _LIBCPP_INLINE_VISIBILITY
1693void
1694atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1695{
1696    __o->clear();
1697}
1698
1699inline _LIBCPP_INLINE_VISIBILITY
1700void
1701atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1702{
1703    __o->clear();
1704}
1705
1706inline _LIBCPP_INLINE_VISIBILITY
1707void
1708atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1709{
1710    __o->clear(__m);
1711}
1712
1713inline _LIBCPP_INLINE_VISIBILITY
1714void
1715atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1716{
1717    __o->clear(__m);
1718}
1719
1720// fences
1721
1722inline _LIBCPP_INLINE_VISIBILITY
1723void
1724atomic_thread_fence(memory_order __m) _NOEXCEPT
1725{
1726    __c11_atomic_thread_fence(__m);
1727}
1728
1729inline _LIBCPP_INLINE_VISIBILITY
1730void
1731atomic_signal_fence(memory_order __m) _NOEXCEPT
1732{
1733    __c11_atomic_signal_fence(__m);
1734}
1735
1736// Atomics for standard typedef types
1737
1738typedef atomic<bool>               atomic_bool;
1739typedef atomic<char>               atomic_char;
1740typedef atomic<signed char>        atomic_schar;
1741typedef atomic<unsigned char>      atomic_uchar;
1742typedef atomic<short>              atomic_short;
1743typedef atomic<unsigned short>     atomic_ushort;
1744typedef atomic<int>                atomic_int;
1745typedef atomic<unsigned int>       atomic_uint;
1746typedef atomic<long>               atomic_long;
1747typedef atomic<unsigned long>      atomic_ulong;
1748typedef atomic<long long>          atomic_llong;
1749typedef atomic<unsigned long long> atomic_ullong;
1750typedef atomic<char16_t>           atomic_char16_t;
1751typedef atomic<char32_t>           atomic_char32_t;
1752typedef atomic<wchar_t>            atomic_wchar_t;
1753
1754typedef atomic<int_least8_t>   atomic_int_least8_t;
1755typedef atomic<uint_least8_t>  atomic_uint_least8_t;
1756typedef atomic<int_least16_t>  atomic_int_least16_t;
1757typedef atomic<uint_least16_t> atomic_uint_least16_t;
1758typedef atomic<int_least32_t>  atomic_int_least32_t;
1759typedef atomic<uint_least32_t> atomic_uint_least32_t;
1760typedef atomic<int_least64_t>  atomic_int_least64_t;
1761typedef atomic<uint_least64_t> atomic_uint_least64_t;
1762
1763typedef atomic<int_fast8_t>   atomic_int_fast8_t;
1764typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
1765typedef atomic<int_fast16_t>  atomic_int_fast16_t;
1766typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1767typedef atomic<int_fast32_t>  atomic_int_fast32_t;
1768typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1769typedef atomic<int_fast64_t>  atomic_int_fast64_t;
1770typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1771
1772typedef atomic<intptr_t>  atomic_intptr_t;
1773typedef atomic<uintptr_t> atomic_uintptr_t;
1774typedef atomic<size_t>    atomic_size_t;
1775typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1776typedef atomic<intmax_t>  atomic_intmax_t;
1777typedef atomic<uintmax_t> atomic_uintmax_t;
1778
1779#define ATOMIC_FLAG_INIT {false}
1780#define ATOMIC_VAR_INIT(__v) {__v}
1781
1782// lock-free property
1783
1784#define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1785#define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1786#define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1787#define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1788#define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1789#define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1790#define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1791#define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1792#define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1793#define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1794
1795#endif  //  !__has_feature(cxx_atomic)
1796
1797_LIBCPP_END_NAMESPACE_STD
1798
1799#endif  // !_LIBCPP_HAS_NO_THREADS
1800
1801#endif  // _LIBCPP_ATOMIC
1802