• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4//                     The LLVM Compiler Infrastructure
5//
6// This file is distributed under the University of Illinois Open Source
7// License. See LICENSE.TXT for details.
8//
9//===----------------------------------------------------------------------===//
10
11#ifndef _LIBCPP_ATOMIC
12#define _LIBCPP_ATOMIC
13
14/*
15    atomic synopsis
16
17namespace std
18{
19
20// feature test macro
21
22#define __cpp_lib_atomic_is_always_lock_free // as specified by SG10
23
24// order and consistency
25
26typedef enum memory_order
27{
28    memory_order_relaxed,
29    memory_order_consume,  // load-consume
30    memory_order_acquire,  // load-acquire
31    memory_order_release,  // store-release
32    memory_order_acq_rel,  // store-release load-acquire
33    memory_order_seq_cst   // store-release load-acquire
34} memory_order;
35
36template <class T> T kill_dependency(T y) noexcept;
37
38// lock-free property
39
40#define ATOMIC_BOOL_LOCK_FREE unspecified
41#define ATOMIC_CHAR_LOCK_FREE unspecified
42#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
43#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
44#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
45#define ATOMIC_SHORT_LOCK_FREE unspecified
46#define ATOMIC_INT_LOCK_FREE unspecified
47#define ATOMIC_LONG_LOCK_FREE unspecified
48#define ATOMIC_LLONG_LOCK_FREE unspecified
49#define ATOMIC_POINTER_LOCK_FREE unspecified
50
51// flag type and operations
52
53typedef struct atomic_flag
54{
55    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
56    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
57    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
58    void clear(memory_order m = memory_order_seq_cst) noexcept;
59    atomic_flag()  noexcept = default;
60    atomic_flag(const atomic_flag&) = delete;
61    atomic_flag& operator=(const atomic_flag&) = delete;
62    atomic_flag& operator=(const atomic_flag&) volatile = delete;
63} atomic_flag;
64
65bool
66    atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
67
68bool
69    atomic_flag_test_and_set(atomic_flag* obj) noexcept;
70
71bool
72    atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
73                                      memory_order m) noexcept;
74
75bool
76    atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
77
78void
79    atomic_flag_clear(volatile atomic_flag* obj) noexcept;
80
81void
82    atomic_flag_clear(atomic_flag* obj) noexcept;
83
84void
85    atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
86
87void
88    atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
89
90#define ATOMIC_FLAG_INIT see below
91#define ATOMIC_VAR_INIT(value) see below
92
93template <class T>
94struct atomic
95{
96    static constexpr bool is_always_lock_free;
97    bool is_lock_free() const volatile noexcept;
98    bool is_lock_free() const noexcept;
99    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
100    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
101    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
102    T load(memory_order m = memory_order_seq_cst) const noexcept;
103    operator T() const volatile noexcept;
104    operator T() const noexcept;
105    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
106    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
107    bool compare_exchange_weak(T& expc, T desr,
108                               memory_order s, memory_order f) volatile noexcept;
109    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
110    bool compare_exchange_strong(T& expc, T desr,
111                                 memory_order s, memory_order f) volatile noexcept;
112    bool compare_exchange_strong(T& expc, T desr,
113                                 memory_order s, memory_order f) noexcept;
114    bool compare_exchange_weak(T& expc, T desr,
115                               memory_order m = memory_order_seq_cst) volatile noexcept;
116    bool compare_exchange_weak(T& expc, T desr,
117                               memory_order m = memory_order_seq_cst) noexcept;
118    bool compare_exchange_strong(T& expc, T desr,
119                                memory_order m = memory_order_seq_cst) volatile noexcept;
120    bool compare_exchange_strong(T& expc, T desr,
121                                 memory_order m = memory_order_seq_cst) noexcept;
122
123    atomic() noexcept = default;
124    constexpr atomic(T desr) noexcept;
125    atomic(const atomic&) = delete;
126    atomic& operator=(const atomic&) = delete;
127    atomic& operator=(const atomic&) volatile = delete;
128    T operator=(T) volatile noexcept;
129    T operator=(T) noexcept;
130};
131
132template <>
133struct atomic<integral>
134{
135    static constexpr bool is_always_lock_free;
136    bool is_lock_free() const volatile noexcept;
137    bool is_lock_free() const noexcept;
138    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
139    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
140    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
141    integral load(memory_order m = memory_order_seq_cst) const noexcept;
142    operator integral() const volatile noexcept;
143    operator integral() const noexcept;
144    integral exchange(integral desr,
145                      memory_order m = memory_order_seq_cst) volatile noexcept;
146    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
147    bool compare_exchange_weak(integral& expc, integral desr,
148                               memory_order s, memory_order f) volatile noexcept;
149    bool compare_exchange_weak(integral& expc, integral desr,
150                               memory_order s, memory_order f) noexcept;
151    bool compare_exchange_strong(integral& expc, integral desr,
152                                 memory_order s, memory_order f) volatile noexcept;
153    bool compare_exchange_strong(integral& expc, integral desr,
154                                 memory_order s, memory_order f) noexcept;
155    bool compare_exchange_weak(integral& expc, integral desr,
156                               memory_order m = memory_order_seq_cst) volatile noexcept;
157    bool compare_exchange_weak(integral& expc, integral desr,
158                               memory_order m = memory_order_seq_cst) noexcept;
159    bool compare_exchange_strong(integral& expc, integral desr,
160                                memory_order m = memory_order_seq_cst) volatile noexcept;
161    bool compare_exchange_strong(integral& expc, integral desr,
162                                 memory_order m = memory_order_seq_cst) noexcept;
163
164    integral
165        fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
167    integral
168        fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
170    integral
171        fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
173    integral
174        fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
175    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
176    integral
177        fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
178    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
179
180    atomic() noexcept = default;
181    constexpr atomic(integral desr) noexcept;
182    atomic(const atomic&) = delete;
183    atomic& operator=(const atomic&) = delete;
184    atomic& operator=(const atomic&) volatile = delete;
185    integral operator=(integral desr) volatile noexcept;
186    integral operator=(integral desr) noexcept;
187
188    integral operator++(int) volatile noexcept;
189    integral operator++(int) noexcept;
190    integral operator--(int) volatile noexcept;
191    integral operator--(int) noexcept;
192    integral operator++() volatile noexcept;
193    integral operator++() noexcept;
194    integral operator--() volatile noexcept;
195    integral operator--() noexcept;
196    integral operator+=(integral op) volatile noexcept;
197    integral operator+=(integral op) noexcept;
198    integral operator-=(integral op) volatile noexcept;
199    integral operator-=(integral op) noexcept;
200    integral operator&=(integral op) volatile noexcept;
201    integral operator&=(integral op) noexcept;
202    integral operator|=(integral op) volatile noexcept;
203    integral operator|=(integral op) noexcept;
204    integral operator^=(integral op) volatile noexcept;
205    integral operator^=(integral op) noexcept;
206};
207
208template <class T>
209struct atomic<T*>
210{
211    static constexpr bool is_always_lock_free;
212    bool is_lock_free() const volatile noexcept;
213    bool is_lock_free() const noexcept;
214    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
215    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
216    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
217    T* load(memory_order m = memory_order_seq_cst) const noexcept;
218    operator T*() const volatile noexcept;
219    operator T*() const noexcept;
220    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
221    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
222    bool compare_exchange_weak(T*& expc, T* desr,
223                               memory_order s, memory_order f) volatile noexcept;
224    bool compare_exchange_weak(T*& expc, T* desr,
225                               memory_order s, memory_order f) noexcept;
226    bool compare_exchange_strong(T*& expc, T* desr,
227                                 memory_order s, memory_order f) volatile noexcept;
228    bool compare_exchange_strong(T*& expc, T* desr,
229                                 memory_order s, memory_order f) noexcept;
230    bool compare_exchange_weak(T*& expc, T* desr,
231                               memory_order m = memory_order_seq_cst) volatile noexcept;
232    bool compare_exchange_weak(T*& expc, T* desr,
233                               memory_order m = memory_order_seq_cst) noexcept;
234    bool compare_exchange_strong(T*& expc, T* desr,
235                                memory_order m = memory_order_seq_cst) volatile noexcept;
236    bool compare_exchange_strong(T*& expc, T* desr,
237                                 memory_order m = memory_order_seq_cst) noexcept;
238    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
239    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
240    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
241    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
242
243    atomic() noexcept = default;
244    constexpr atomic(T* desr) noexcept;
245    atomic(const atomic&) = delete;
246    atomic& operator=(const atomic&) = delete;
247    atomic& operator=(const atomic&) volatile = delete;
248
249    T* operator=(T*) volatile noexcept;
250    T* operator=(T*) noexcept;
251    T* operator++(int) volatile noexcept;
252    T* operator++(int) noexcept;
253    T* operator--(int) volatile noexcept;
254    T* operator--(int) noexcept;
255    T* operator++() volatile noexcept;
256    T* operator++() noexcept;
257    T* operator--() volatile noexcept;
258    T* operator--() noexcept;
259    T* operator+=(ptrdiff_t op) volatile noexcept;
260    T* operator+=(ptrdiff_t op) noexcept;
261    T* operator-=(ptrdiff_t op) volatile noexcept;
262    T* operator-=(ptrdiff_t op) noexcept;
263};
264
265
266template <class T>
267    bool
268    atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
269
270template <class T>
271    bool
272    atomic_is_lock_free(const atomic<T>* obj) noexcept;
273
274template <class T>
275    void
276    atomic_init(volatile atomic<T>* obj, T desr) noexcept;
277
278template <class T>
279    void
280    atomic_init(atomic<T>* obj, T desr) noexcept;
281
282template <class T>
283    void
284    atomic_store(volatile atomic<T>* obj, T desr) noexcept;
285
286template <class T>
287    void
288    atomic_store(atomic<T>* obj, T desr) noexcept;
289
290template <class T>
291    void
292    atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
293
294template <class T>
295    void
296    atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
297
298template <class T>
299    T
300    atomic_load(const volatile atomic<T>* obj) noexcept;
301
302template <class T>
303    T
304    atomic_load(const atomic<T>* obj) noexcept;
305
306template <class T>
307    T
308    atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
309
310template <class T>
311    T
312    atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
313
314template <class T>
315    T
316    atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
317
318template <class T>
319    T
320    atomic_exchange(atomic<T>* obj, T desr) noexcept;
321
322template <class T>
323    T
324    atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
325
326template <class T>
327    T
328    atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
329
330template <class T>
331    bool
332    atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
333
334template <class T>
335    bool
336    atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
337
338template <class T>
339    bool
340    atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
341
342template <class T>
343    bool
344    atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
345
346template <class T>
347    bool
348    atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
349                                          T desr,
350                                          memory_order s, memory_order f) noexcept;
351
352template <class T>
353    bool
354    atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
355                                          memory_order s, memory_order f) noexcept;
356
357template <class T>
358    bool
359    atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
360                                            T* expc, T desr,
361                                            memory_order s, memory_order f) noexcept;
362
363template <class T>
364    bool
365    atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
366                                            T desr,
367                                            memory_order s, memory_order f) noexcept;
368
369template <class Integral>
370    Integral
371    atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
372
373template <class Integral>
374    Integral
375    atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
376
377template <class Integral>
378    Integral
379    atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
380                              memory_order m) noexcept;
381template <class Integral>
382    Integral
383    atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
384                              memory_order m) noexcept;
385template <class Integral>
386    Integral
387    atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
388
389template <class Integral>
390    Integral
391    atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
392
393template <class Integral>
394    Integral
395    atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
396                              memory_order m) noexcept;
397template <class Integral>
398    Integral
399    atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
400                              memory_order m) noexcept;
401template <class Integral>
402    Integral
403    atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
404
405template <class Integral>
406    Integral
407    atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
408
409template <class Integral>
410    Integral
411    atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
412                              memory_order m) noexcept;
413template <class Integral>
414    Integral
415    atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
416                              memory_order m) noexcept;
417template <class Integral>
418    Integral
419    atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
420
421template <class Integral>
422    Integral
423    atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
424
425template <class Integral>
426    Integral
427    atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
428                             memory_order m) noexcept;
429template <class Integral>
430    Integral
431    atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
432                             memory_order m) noexcept;
433template <class Integral>
434    Integral
435    atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
436
437template <class Integral>
438    Integral
439    atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
440
441template <class Integral>
442    Integral
443    atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
444                              memory_order m) noexcept;
445template <class Integral>
446    Integral
447    atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
448                              memory_order m) noexcept;
449
450template <class T>
451    T*
452    atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
453
454template <class T>
455    T*
456    atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
457
458template <class T>
459    T*
460    atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
461                              memory_order m) noexcept;
462template <class T>
463    T*
464    atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
465
466template <class T>
467    T*
468    atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
469
470template <class T>
471    T*
472    atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
473
474template <class T>
475    T*
476    atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
477                              memory_order m) noexcept;
478template <class T>
479    T*
480    atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
481
482// Atomics for standard typedef types
483
484typedef atomic<bool>               atomic_bool;
485typedef atomic<char>               atomic_char;
486typedef atomic<signed char>        atomic_schar;
487typedef atomic<unsigned char>      atomic_uchar;
488typedef atomic<short>              atomic_short;
489typedef atomic<unsigned short>     atomic_ushort;
490typedef atomic<int>                atomic_int;
491typedef atomic<unsigned int>       atomic_uint;
492typedef atomic<long>               atomic_long;
493typedef atomic<unsigned long>      atomic_ulong;
494typedef atomic<long long>          atomic_llong;
495typedef atomic<unsigned long long> atomic_ullong;
496typedef atomic<char16_t>           atomic_char16_t;
497typedef atomic<char32_t>           atomic_char32_t;
498typedef atomic<wchar_t>            atomic_wchar_t;
499
500typedef atomic<int_least8_t>   atomic_int_least8_t;
501typedef atomic<uint_least8_t>  atomic_uint_least8_t;
502typedef atomic<int_least16_t>  atomic_int_least16_t;
503typedef atomic<uint_least16_t> atomic_uint_least16_t;
504typedef atomic<int_least32_t>  atomic_int_least32_t;
505typedef atomic<uint_least32_t> atomic_uint_least32_t;
506typedef atomic<int_least64_t>  atomic_int_least64_t;
507typedef atomic<uint_least64_t> atomic_uint_least64_t;
508
509typedef atomic<int_fast8_t>   atomic_int_fast8_t;
510typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
511typedef atomic<int_fast16_t>  atomic_int_fast16_t;
512typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
513typedef atomic<int_fast32_t>  atomic_int_fast32_t;
514typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
515typedef atomic<int_fast64_t>  atomic_int_fast64_t;
516typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
517
518typedef atomic<int8_t>   atomic_int8_t;
519typedef atomic<uint8_t>  atomic_uint8_t;
520typedef atomic<int16_t>  atomic_int16_t;
521typedef atomic<uint16_t> atomic_uint16_t;
522typedef atomic<int32_t>  atomic_int32_t;
523typedef atomic<uint32_t> atomic_uint32_t;
524typedef atomic<int64_t>  atomic_int64_t;
525typedef atomic<uint64_t> atomic_uint64_t;
526
527typedef atomic<intptr_t>  atomic_intptr_t;
528typedef atomic<uintptr_t> atomic_uintptr_t;
529typedef atomic<size_t>    atomic_size_t;
530typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
531typedef atomic<intmax_t>  atomic_intmax_t;
532typedef atomic<uintmax_t> atomic_uintmax_t;
533
534// fences
535
536void atomic_thread_fence(memory_order m) noexcept;
537void atomic_signal_fence(memory_order m) noexcept;
538
539}  // std
540
541*/
542
543#include <__config>
544#include <cstddef>
545#include <cstdint>
546#include <type_traits>
547
548#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
549#pragma GCC system_header
550#endif
551
552#ifdef _LIBCPP_HAS_NO_THREADS
553#error <atomic> is not supported on this single threaded system
554#endif
555#if !defined(_LIBCPP_HAS_C_ATOMIC_IMP) && !defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
556#error <atomic> is not implemented
557#endif
558
559#if _LIBCPP_STD_VER > 14
560# define __cpp_lib_atomic_is_always_lock_free 201603L
561#endif
562
563#define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
564  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
565                           __m == memory_order_acquire || \
566                           __m == memory_order_acq_rel,   \
567                        "memory order argument to atomic operation is invalid")
568
569#define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
570  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
571                           __m == memory_order_acq_rel,   \
572                        "memory order argument to atomic operation is invalid")
573
574#define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
575  _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
576                           __f == memory_order_acq_rel,   \
577                        "memory order argument to atomic operation is invalid")
578
579_LIBCPP_BEGIN_NAMESPACE_STD
580
581typedef enum memory_order
582{
583    memory_order_relaxed, memory_order_consume, memory_order_acquire,
584    memory_order_release, memory_order_acq_rel, memory_order_seq_cst
585} memory_order;
586
587#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
588namespace __gcc_atomic {
589template <typename _Tp>
590struct __gcc_atomic_t {
591
592#if _GNUC_VER >= 501
593    static_assert(is_trivially_copyable<_Tp>::value,
594      "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
595#endif
596
597  _LIBCPP_INLINE_VISIBILITY
598#ifndef _LIBCPP_CXX03_LANG
599    __gcc_atomic_t() _NOEXCEPT = default;
600#else
601    __gcc_atomic_t() _NOEXCEPT : __a_value() {}
602#endif // _LIBCPP_CXX03_LANG
603  _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
604    : __a_value(value) {}
605  _Tp __a_value;
606};
607#define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
608
609template <typename _Tp> _Tp __create();
610
611template <typename _Tp, typename _Td>
612typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
613    __test_atomic_assignable(int);
614template <typename _Tp, typename _Up>
615__two __test_atomic_assignable(...);
616
617template <typename _Tp, typename _Td>
618struct __can_assign {
619  static const bool value =
620      sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
621};
622
623static inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
624  // Avoid switch statement to make this a constexpr.
625  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
626         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
627          (__order == memory_order_release ? __ATOMIC_RELEASE:
628           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
629            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
630              __ATOMIC_CONSUME))));
631}
632
633static inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
634  // Avoid switch statement to make this a constexpr.
635  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
636         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
637          (__order == memory_order_release ? __ATOMIC_RELAXED:
638           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
639            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
640              __ATOMIC_CONSUME))));
641}
642
643} // namespace __gcc_atomic
644
645template <typename _Tp>
646static inline
647typename enable_if<
648    __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
649__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
650  __a->__a_value = __val;
651}
652
653template <typename _Tp>
654static inline
655typename enable_if<
656    !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
657     __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
658__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
659  // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
660  // the default operator= in an object is not volatile, a byte-by-byte copy
661  // is required.
662  volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
663  volatile char* end = to + sizeof(_Tp);
664  char* from = reinterpret_cast<char*>(&__val);
665  while (to != end) {
666    *to++ = *from++;
667  }
668}
669
670template <typename _Tp>
671static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
672  __a->__a_value = __val;
673}
674
675static inline void __c11_atomic_thread_fence(memory_order __order) {
676  __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
677}
678
679static inline void __c11_atomic_signal_fence(memory_order __order) {
680  __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
681}
682
683template <typename _Tp>
684static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
685                                      memory_order __order) {
686  return __atomic_store(&__a->__a_value, &__val,
687                        __gcc_atomic::__to_gcc_order(__order));
688}
689
690template <typename _Tp>
691static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
692                                      memory_order __order) {
693  __atomic_store(&__a->__a_value, &__val,
694                 __gcc_atomic::__to_gcc_order(__order));
695}
696
697template <typename _Tp>
698static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
699                                    memory_order __order) {
700  _Tp __ret;
701  __atomic_load(&__a->__a_value, &__ret,
702                __gcc_atomic::__to_gcc_order(__order));
703  return __ret;
704}
705
706template <typename _Tp>
707static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
708  _Tp __ret;
709  __atomic_load(&__a->__a_value, &__ret,
710                __gcc_atomic::__to_gcc_order(__order));
711  return __ret;
712}
713
714template <typename _Tp>
715static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
716                                        _Tp __value, memory_order __order) {
717  _Tp __ret;
718  __atomic_exchange(&__a->__a_value, &__value, &__ret,
719                    __gcc_atomic::__to_gcc_order(__order));
720  return __ret;
721}
722
723template <typename _Tp>
724static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
725                                        memory_order __order) {
726  _Tp __ret;
727  __atomic_exchange(&__a->__a_value, &__value, &__ret,
728                    __gcc_atomic::__to_gcc_order(__order));
729  return __ret;
730}
731
732template <typename _Tp>
733static inline bool __c11_atomic_compare_exchange_strong(
734    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
735    memory_order __success, memory_order __failure) {
736  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
737                                   false,
738                                   __gcc_atomic::__to_gcc_order(__success),
739                                   __gcc_atomic::__to_gcc_failure_order(__failure));
740}
741
742template <typename _Tp>
743static inline bool __c11_atomic_compare_exchange_strong(
744    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
745    memory_order __failure) {
746  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
747                                   false,
748                                   __gcc_atomic::__to_gcc_order(__success),
749                                   __gcc_atomic::__to_gcc_failure_order(__failure));
750}
751
752template <typename _Tp>
753static inline bool __c11_atomic_compare_exchange_weak(
754    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
755    memory_order __success, memory_order __failure) {
756  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
757                                   true,
758                                   __gcc_atomic::__to_gcc_order(__success),
759                                   __gcc_atomic::__to_gcc_failure_order(__failure));
760}
761
762template <typename _Tp>
763static inline bool __c11_atomic_compare_exchange_weak(
764    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
765    memory_order __failure) {
766  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
767                                   true,
768                                   __gcc_atomic::__to_gcc_order(__success),
769                                   __gcc_atomic::__to_gcc_failure_order(__failure));
770}
771
772template <typename _Tp>
773struct __skip_amt { enum {value = 1}; };
774
775template <typename _Tp>
776struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
777
778// FIXME: Haven't figured out what the spec says about using arrays with
779// atomic_fetch_add. Force a failure rather than creating bad behavior.
780template <typename _Tp>
781struct __skip_amt<_Tp[]> { };
782template <typename _Tp, int n>
783struct __skip_amt<_Tp[n]> { };
784
785template <typename _Tp, typename _Td>
786static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
787                                         _Td __delta, memory_order __order) {
788  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
789                            __gcc_atomic::__to_gcc_order(__order));
790}
791
792template <typename _Tp, typename _Td>
793static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
794                                         memory_order __order) {
795  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
796                            __gcc_atomic::__to_gcc_order(__order));
797}
798
799template <typename _Tp, typename _Td>
800static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
801                                         _Td __delta, memory_order __order) {
802  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
803                            __gcc_atomic::__to_gcc_order(__order));
804}
805
806template <typename _Tp, typename _Td>
807static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
808                                         memory_order __order) {
809  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
810                            __gcc_atomic::__to_gcc_order(__order));
811}
812
813template <typename _Tp>
814static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
815                                         _Tp __pattern, memory_order __order) {
816  return __atomic_fetch_and(&__a->__a_value, __pattern,
817                            __gcc_atomic::__to_gcc_order(__order));
818}
819
820template <typename _Tp>
821static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
822                                         _Tp __pattern, memory_order __order) {
823  return __atomic_fetch_and(&__a->__a_value, __pattern,
824                            __gcc_atomic::__to_gcc_order(__order));
825}
826
827template <typename _Tp>
828static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
829                                        _Tp __pattern, memory_order __order) {
830  return __atomic_fetch_or(&__a->__a_value, __pattern,
831                           __gcc_atomic::__to_gcc_order(__order));
832}
833
834template <typename _Tp>
835static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
836                                        memory_order __order) {
837  return __atomic_fetch_or(&__a->__a_value, __pattern,
838                           __gcc_atomic::__to_gcc_order(__order));
839}
840
841template <typename _Tp>
842static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
843                                         _Tp __pattern, memory_order __order) {
844  return __atomic_fetch_xor(&__a->__a_value, __pattern,
845                            __gcc_atomic::__to_gcc_order(__order));
846}
847
848template <typename _Tp>
849static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
850                                         memory_order __order) {
851  return __atomic_fetch_xor(&__a->__a_value, __pattern,
852                            __gcc_atomic::__to_gcc_order(__order));
853}
854#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP
855
856template <class _Tp>
857inline _LIBCPP_INLINE_VISIBILITY
858_Tp
859kill_dependency(_Tp __y) _NOEXCEPT
860{
861    return __y;
862}
863
864#define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
865#define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
866#define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
867#define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
868#define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
869#define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
870#define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
871#define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
872#define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
873#define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
874
875// general atomic<T>
876
877template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
878struct __atomic_base  // false
879{
880    mutable _Atomic(_Tp) __a_;
881
882#if defined(__cpp_lib_atomic_is_always_lock_free)
883  static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
884#endif
885
886    _LIBCPP_INLINE_VISIBILITY
887    bool is_lock_free() const volatile _NOEXCEPT
888    {
889#if defined(_LIBCPP_HAS_C_ATOMIC_IMP)
890    return __c11_atomic_is_lock_free(sizeof(_Tp));
891#else
892    return __atomic_is_lock_free(sizeof(_Tp), 0);
893#endif
894    }
895    _LIBCPP_INLINE_VISIBILITY
896    bool is_lock_free() const _NOEXCEPT
897        {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
898    _LIBCPP_INLINE_VISIBILITY
899    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
900      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
901        {__c11_atomic_store(&__a_, __d, __m);}
902    _LIBCPP_INLINE_VISIBILITY
903    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
904      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
905        {__c11_atomic_store(&__a_, __d, __m);}
906    _LIBCPP_INLINE_VISIBILITY
907    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
908      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
909        {return __c11_atomic_load(&__a_, __m);}
910    _LIBCPP_INLINE_VISIBILITY
911    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
912      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
913        {return __c11_atomic_load(&__a_, __m);}
914    _LIBCPP_INLINE_VISIBILITY
915    operator _Tp() const volatile _NOEXCEPT {return load();}
916    _LIBCPP_INLINE_VISIBILITY
917    operator _Tp() const _NOEXCEPT          {return load();}
918    _LIBCPP_INLINE_VISIBILITY
919    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
920        {return __c11_atomic_exchange(&__a_, __d, __m);}
921    _LIBCPP_INLINE_VISIBILITY
922    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
923        {return __c11_atomic_exchange(&__a_, __d, __m);}
924    _LIBCPP_INLINE_VISIBILITY
925    bool compare_exchange_weak(_Tp& __e, _Tp __d,
926                               memory_order __s, memory_order __f) volatile _NOEXCEPT
927      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
928        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
929    _LIBCPP_INLINE_VISIBILITY
930    bool compare_exchange_weak(_Tp& __e, _Tp __d,
931                               memory_order __s, memory_order __f) _NOEXCEPT
932      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
933        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
934    _LIBCPP_INLINE_VISIBILITY
935    bool compare_exchange_strong(_Tp& __e, _Tp __d,
936                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
937      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
938        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
939    _LIBCPP_INLINE_VISIBILITY
940    bool compare_exchange_strong(_Tp& __e, _Tp __d,
941                                 memory_order __s, memory_order __f) _NOEXCEPT
942      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
943        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
944    _LIBCPP_INLINE_VISIBILITY
945    bool compare_exchange_weak(_Tp& __e, _Tp __d,
946                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
947        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
948    _LIBCPP_INLINE_VISIBILITY
949    bool compare_exchange_weak(_Tp& __e, _Tp __d,
950                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
951        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
952    _LIBCPP_INLINE_VISIBILITY
953    bool compare_exchange_strong(_Tp& __e, _Tp __d,
954                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
955        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
956    _LIBCPP_INLINE_VISIBILITY
957    bool compare_exchange_strong(_Tp& __e, _Tp __d,
958                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
959        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
960
961    _LIBCPP_INLINE_VISIBILITY
962#ifndef _LIBCPP_CXX03_LANG
963    __atomic_base() _NOEXCEPT = default;
964#else
965    __atomic_base() _NOEXCEPT : __a_() {}
966#endif // _LIBCPP_CXX03_LANG
967
968    _LIBCPP_INLINE_VISIBILITY
969    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
970#ifndef _LIBCPP_CXX03_LANG
971    __atomic_base(const __atomic_base&) = delete;
972    __atomic_base& operator=(const __atomic_base&) = delete;
973    __atomic_base& operator=(const __atomic_base&) volatile = delete;
974#else
975private:
976    __atomic_base(const __atomic_base&);
977    __atomic_base& operator=(const __atomic_base&);
978    __atomic_base& operator=(const __atomic_base&) volatile;
979#endif
980};
981
982#if defined(__cpp_lib_atomic_is_always_lock_free)
983template <class _Tp, bool __b>
984_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
985#endif
986
987// atomic<Integral>
988
989template <class _Tp>
990struct __atomic_base<_Tp, true>
991    : public __atomic_base<_Tp, false>
992{
993    typedef __atomic_base<_Tp, false> __base;
994    _LIBCPP_INLINE_VISIBILITY
995    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
996    _LIBCPP_INLINE_VISIBILITY
997    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
998
999    _LIBCPP_INLINE_VISIBILITY
1000    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1001        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1002    _LIBCPP_INLINE_VISIBILITY
1003    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1004        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1005    _LIBCPP_INLINE_VISIBILITY
1006    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1007        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1008    _LIBCPP_INLINE_VISIBILITY
1009    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1010        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1011    _LIBCPP_INLINE_VISIBILITY
1012    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1013        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
1014    _LIBCPP_INLINE_VISIBILITY
1015    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1016        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
1017    _LIBCPP_INLINE_VISIBILITY
1018    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1019        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
1020    _LIBCPP_INLINE_VISIBILITY
1021    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1022        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
1023    _LIBCPP_INLINE_VISIBILITY
1024    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1025        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
1026    _LIBCPP_INLINE_VISIBILITY
1027    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1028        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
1029
1030    _LIBCPP_INLINE_VISIBILITY
1031    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
1032    _LIBCPP_INLINE_VISIBILITY
1033    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
1034    _LIBCPP_INLINE_VISIBILITY
1035    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
1036    _LIBCPP_INLINE_VISIBILITY
1037    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
1038    _LIBCPP_INLINE_VISIBILITY
1039    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
1040    _LIBCPP_INLINE_VISIBILITY
1041    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
1042    _LIBCPP_INLINE_VISIBILITY
1043    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
1044    _LIBCPP_INLINE_VISIBILITY
1045    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
1046    _LIBCPP_INLINE_VISIBILITY
1047    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1048    _LIBCPP_INLINE_VISIBILITY
1049    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1050    _LIBCPP_INLINE_VISIBILITY
1051    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1052    _LIBCPP_INLINE_VISIBILITY
1053    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1054    _LIBCPP_INLINE_VISIBILITY
1055    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1056    _LIBCPP_INLINE_VISIBILITY
1057    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
1058    _LIBCPP_INLINE_VISIBILITY
1059    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1060    _LIBCPP_INLINE_VISIBILITY
1061    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
1062    _LIBCPP_INLINE_VISIBILITY
1063    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1064    _LIBCPP_INLINE_VISIBILITY
1065    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
1066};
1067
1068// atomic<T>
1069
1070template <class _Tp>
1071struct atomic
1072    : public __atomic_base<_Tp>
1073{
1074    typedef __atomic_base<_Tp> __base;
1075    _LIBCPP_INLINE_VISIBILITY
1076    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1077    _LIBCPP_INLINE_VISIBILITY
1078    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1079
1080    _LIBCPP_INLINE_VISIBILITY
1081    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1082        {__base::store(__d); return __d;}
1083    _LIBCPP_INLINE_VISIBILITY
1084    _Tp operator=(_Tp __d) _NOEXCEPT
1085        {__base::store(__d); return __d;}
1086};
1087
1088// atomic<T*>
1089
1090template <class _Tp>
1091struct atomic<_Tp*>
1092    : public __atomic_base<_Tp*>
1093{
1094    typedef __atomic_base<_Tp*> __base;
1095    _LIBCPP_INLINE_VISIBILITY
1096    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1097    _LIBCPP_INLINE_VISIBILITY
1098    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1099
1100    _LIBCPP_INLINE_VISIBILITY
1101    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1102        {__base::store(__d); return __d;}
1103    _LIBCPP_INLINE_VISIBILITY
1104    _Tp* operator=(_Tp* __d) _NOEXCEPT
1105        {__base::store(__d); return __d;}
1106
1107    _LIBCPP_INLINE_VISIBILITY
1108    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1109                                                                        volatile _NOEXCEPT
1110        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1111    _LIBCPP_INLINE_VISIBILITY
1112    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1113        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1114    _LIBCPP_INLINE_VISIBILITY
1115    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1116                                                                        volatile _NOEXCEPT
1117        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1118    _LIBCPP_INLINE_VISIBILITY
1119    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1120        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1121
1122    _LIBCPP_INLINE_VISIBILITY
1123    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1124    _LIBCPP_INLINE_VISIBILITY
1125    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1126    _LIBCPP_INLINE_VISIBILITY
1127    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1128    _LIBCPP_INLINE_VISIBILITY
1129    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1130    _LIBCPP_INLINE_VISIBILITY
1131    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1132    _LIBCPP_INLINE_VISIBILITY
1133    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1134    _LIBCPP_INLINE_VISIBILITY
1135    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1136    _LIBCPP_INLINE_VISIBILITY
1137    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1138    _LIBCPP_INLINE_VISIBILITY
1139    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1140    _LIBCPP_INLINE_VISIBILITY
1141    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1142    _LIBCPP_INLINE_VISIBILITY
1143    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1144    _LIBCPP_INLINE_VISIBILITY
1145    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1146};
1147
1148// atomic_is_lock_free
1149
1150template <class _Tp>
1151inline _LIBCPP_INLINE_VISIBILITY
1152bool
1153atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1154{
1155    return __o->is_lock_free();
1156}
1157
1158template <class _Tp>
1159inline _LIBCPP_INLINE_VISIBILITY
1160bool
1161atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1162{
1163    return __o->is_lock_free();
1164}
1165
1166// atomic_init
1167
1168template <class _Tp>
1169inline _LIBCPP_INLINE_VISIBILITY
1170void
1171atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1172{
1173    __c11_atomic_init(&__o->__a_, __d);
1174}
1175
1176template <class _Tp>
1177inline _LIBCPP_INLINE_VISIBILITY
1178void
1179atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1180{
1181    __c11_atomic_init(&__o->__a_, __d);
1182}
1183
1184// atomic_store
1185
1186template <class _Tp>
1187inline _LIBCPP_INLINE_VISIBILITY
1188void
1189atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1190{
1191    __o->store(__d);
1192}
1193
1194template <class _Tp>
1195inline _LIBCPP_INLINE_VISIBILITY
1196void
1197atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1198{
1199    __o->store(__d);
1200}
1201
1202// atomic_store_explicit
1203
1204template <class _Tp>
1205inline _LIBCPP_INLINE_VISIBILITY
1206void
1207atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1208  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1209{
1210    __o->store(__d, __m);
1211}
1212
1213template <class _Tp>
1214inline _LIBCPP_INLINE_VISIBILITY
1215void
1216atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1217  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1218{
1219    __o->store(__d, __m);
1220}
1221
1222// atomic_load
1223
1224template <class _Tp>
1225inline _LIBCPP_INLINE_VISIBILITY
1226_Tp
1227atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1228{
1229    return __o->load();
1230}
1231
1232template <class _Tp>
1233inline _LIBCPP_INLINE_VISIBILITY
1234_Tp
1235atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1236{
1237    return __o->load();
1238}
1239
1240// atomic_load_explicit
1241
1242template <class _Tp>
1243inline _LIBCPP_INLINE_VISIBILITY
1244_Tp
1245atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1246  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1247{
1248    return __o->load(__m);
1249}
1250
1251template <class _Tp>
1252inline _LIBCPP_INLINE_VISIBILITY
1253_Tp
1254atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1255  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1256{
1257    return __o->load(__m);
1258}
1259
1260// atomic_exchange
1261
1262template <class _Tp>
1263inline _LIBCPP_INLINE_VISIBILITY
1264_Tp
1265atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1266{
1267    return __o->exchange(__d);
1268}
1269
1270template <class _Tp>
1271inline _LIBCPP_INLINE_VISIBILITY
1272_Tp
1273atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1274{
1275    return __o->exchange(__d);
1276}
1277
1278// atomic_exchange_explicit
1279
1280template <class _Tp>
1281inline _LIBCPP_INLINE_VISIBILITY
1282_Tp
1283atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1284{
1285    return __o->exchange(__d, __m);
1286}
1287
1288template <class _Tp>
1289inline _LIBCPP_INLINE_VISIBILITY
1290_Tp
1291atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1292{
1293    return __o->exchange(__d, __m);
1294}
1295
1296// atomic_compare_exchange_weak
1297
1298template <class _Tp>
1299inline _LIBCPP_INLINE_VISIBILITY
1300bool
1301atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1302{
1303    return __o->compare_exchange_weak(*__e, __d);
1304}
1305
1306template <class _Tp>
1307inline _LIBCPP_INLINE_VISIBILITY
1308bool
1309atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1310{
1311    return __o->compare_exchange_weak(*__e, __d);
1312}
1313
1314// atomic_compare_exchange_strong
1315
1316template <class _Tp>
1317inline _LIBCPP_INLINE_VISIBILITY
1318bool
1319atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1320{
1321    return __o->compare_exchange_strong(*__e, __d);
1322}
1323
1324template <class _Tp>
1325inline _LIBCPP_INLINE_VISIBILITY
1326bool
1327atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1328{
1329    return __o->compare_exchange_strong(*__e, __d);
1330}
1331
1332// atomic_compare_exchange_weak_explicit
1333
1334template <class _Tp>
1335inline _LIBCPP_INLINE_VISIBILITY
1336bool
1337atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1338                                      _Tp __d,
1339                                      memory_order __s, memory_order __f) _NOEXCEPT
1340  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1341{
1342    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1343}
1344
1345template <class _Tp>
1346inline _LIBCPP_INLINE_VISIBILITY
1347bool
1348atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1349                                      memory_order __s, memory_order __f) _NOEXCEPT
1350  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1351{
1352    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1353}
1354
1355// atomic_compare_exchange_strong_explicit
1356
1357template <class _Tp>
1358inline _LIBCPP_INLINE_VISIBILITY
1359bool
1360atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1361                                        _Tp* __e, _Tp __d,
1362                                        memory_order __s, memory_order __f) _NOEXCEPT
1363  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1364{
1365    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1366}
1367
1368template <class _Tp>
1369inline _LIBCPP_INLINE_VISIBILITY
1370bool
1371atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1372                                        _Tp __d,
1373                                        memory_order __s, memory_order __f) _NOEXCEPT
1374  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1375{
1376    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1377}
1378
1379// atomic_fetch_add
1380
1381template <class _Tp>
1382inline _LIBCPP_INLINE_VISIBILITY
1383typename enable_if
1384<
1385    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1386    _Tp
1387>::type
1388atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1389{
1390    return __o->fetch_add(__op);
1391}
1392
1393template <class _Tp>
1394inline _LIBCPP_INLINE_VISIBILITY
1395typename enable_if
1396<
1397    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1398    _Tp
1399>::type
1400atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1401{
1402    return __o->fetch_add(__op);
1403}
1404
1405template <class _Tp>
1406inline _LIBCPP_INLINE_VISIBILITY
1407_Tp*
1408atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1409{
1410    return __o->fetch_add(__op);
1411}
1412
1413template <class _Tp>
1414inline _LIBCPP_INLINE_VISIBILITY
1415_Tp*
1416atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1417{
1418    return __o->fetch_add(__op);
1419}
1420
1421// atomic_fetch_add_explicit
1422
1423template <class _Tp>
1424inline _LIBCPP_INLINE_VISIBILITY
1425typename enable_if
1426<
1427    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1428    _Tp
1429>::type
1430atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1431{
1432    return __o->fetch_add(__op, __m);
1433}
1434
1435template <class _Tp>
1436inline _LIBCPP_INLINE_VISIBILITY
1437typename enable_if
1438<
1439    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1440    _Tp
1441>::type
1442atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1443{
1444    return __o->fetch_add(__op, __m);
1445}
1446
1447template <class _Tp>
1448inline _LIBCPP_INLINE_VISIBILITY
1449_Tp*
1450atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1451                          memory_order __m) _NOEXCEPT
1452{
1453    return __o->fetch_add(__op, __m);
1454}
1455
1456template <class _Tp>
1457inline _LIBCPP_INLINE_VISIBILITY
1458_Tp*
1459atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1460{
1461    return __o->fetch_add(__op, __m);
1462}
1463
1464// atomic_fetch_sub
1465
1466template <class _Tp>
1467inline _LIBCPP_INLINE_VISIBILITY
1468typename enable_if
1469<
1470    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1471    _Tp
1472>::type
1473atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1474{
1475    return __o->fetch_sub(__op);
1476}
1477
1478template <class _Tp>
1479inline _LIBCPP_INLINE_VISIBILITY
1480typename enable_if
1481<
1482    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1483    _Tp
1484>::type
1485atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1486{
1487    return __o->fetch_sub(__op);
1488}
1489
1490template <class _Tp>
1491inline _LIBCPP_INLINE_VISIBILITY
1492_Tp*
1493atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1494{
1495    return __o->fetch_sub(__op);
1496}
1497
1498template <class _Tp>
1499inline _LIBCPP_INLINE_VISIBILITY
1500_Tp*
1501atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1502{
1503    return __o->fetch_sub(__op);
1504}
1505
1506// atomic_fetch_sub_explicit
1507
1508template <class _Tp>
1509inline _LIBCPP_INLINE_VISIBILITY
1510typename enable_if
1511<
1512    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1513    _Tp
1514>::type
1515atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1516{
1517    return __o->fetch_sub(__op, __m);
1518}
1519
1520template <class _Tp>
1521inline _LIBCPP_INLINE_VISIBILITY
1522typename enable_if
1523<
1524    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1525    _Tp
1526>::type
1527atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1528{
1529    return __o->fetch_sub(__op, __m);
1530}
1531
1532template <class _Tp>
1533inline _LIBCPP_INLINE_VISIBILITY
1534_Tp*
1535atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1536                          memory_order __m) _NOEXCEPT
1537{
1538    return __o->fetch_sub(__op, __m);
1539}
1540
1541template <class _Tp>
1542inline _LIBCPP_INLINE_VISIBILITY
1543_Tp*
1544atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1545{
1546    return __o->fetch_sub(__op, __m);
1547}
1548
1549// atomic_fetch_and
1550
1551template <class _Tp>
1552inline _LIBCPP_INLINE_VISIBILITY
1553typename enable_if
1554<
1555    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1556    _Tp
1557>::type
1558atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1559{
1560    return __o->fetch_and(__op);
1561}
1562
1563template <class _Tp>
1564inline _LIBCPP_INLINE_VISIBILITY
1565typename enable_if
1566<
1567    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1568    _Tp
1569>::type
1570atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1571{
1572    return __o->fetch_and(__op);
1573}
1574
1575// atomic_fetch_and_explicit
1576
1577template <class _Tp>
1578inline _LIBCPP_INLINE_VISIBILITY
1579typename enable_if
1580<
1581    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1582    _Tp
1583>::type
1584atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1585{
1586    return __o->fetch_and(__op, __m);
1587}
1588
1589template <class _Tp>
1590inline _LIBCPP_INLINE_VISIBILITY
1591typename enable_if
1592<
1593    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1594    _Tp
1595>::type
1596atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1597{
1598    return __o->fetch_and(__op, __m);
1599}
1600
1601// atomic_fetch_or
1602
1603template <class _Tp>
1604inline _LIBCPP_INLINE_VISIBILITY
1605typename enable_if
1606<
1607    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1608    _Tp
1609>::type
1610atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1611{
1612    return __o->fetch_or(__op);
1613}
1614
1615template <class _Tp>
1616inline _LIBCPP_INLINE_VISIBILITY
1617typename enable_if
1618<
1619    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1620    _Tp
1621>::type
1622atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1623{
1624    return __o->fetch_or(__op);
1625}
1626
1627// atomic_fetch_or_explicit
1628
1629template <class _Tp>
1630inline _LIBCPP_INLINE_VISIBILITY
1631typename enable_if
1632<
1633    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1634    _Tp
1635>::type
1636atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1637{
1638    return __o->fetch_or(__op, __m);
1639}
1640
1641template <class _Tp>
1642inline _LIBCPP_INLINE_VISIBILITY
1643typename enable_if
1644<
1645    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1646    _Tp
1647>::type
1648atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1649{
1650    return __o->fetch_or(__op, __m);
1651}
1652
1653// atomic_fetch_xor
1654
1655template <class _Tp>
1656inline _LIBCPP_INLINE_VISIBILITY
1657typename enable_if
1658<
1659    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1660    _Tp
1661>::type
1662atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1663{
1664    return __o->fetch_xor(__op);
1665}
1666
1667template <class _Tp>
1668inline _LIBCPP_INLINE_VISIBILITY
1669typename enable_if
1670<
1671    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1672    _Tp
1673>::type
1674atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1675{
1676    return __o->fetch_xor(__op);
1677}
1678
1679// atomic_fetch_xor_explicit
1680
1681template <class _Tp>
1682inline _LIBCPP_INLINE_VISIBILITY
1683typename enable_if
1684<
1685    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1686    _Tp
1687>::type
1688atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1689{
1690    return __o->fetch_xor(__op, __m);
1691}
1692
1693template <class _Tp>
1694inline _LIBCPP_INLINE_VISIBILITY
1695typename enable_if
1696<
1697    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1698    _Tp
1699>::type
1700atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1701{
1702    return __o->fetch_xor(__op, __m);
1703}
1704
1705// flag type and operations
1706
1707typedef struct atomic_flag
1708{
1709    _Atomic(bool) __a_;
1710
1711    _LIBCPP_INLINE_VISIBILITY
1712    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1713        {return __c11_atomic_exchange(&__a_, true, __m);}
1714    _LIBCPP_INLINE_VISIBILITY
1715    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1716        {return __c11_atomic_exchange(&__a_, true, __m);}
1717    _LIBCPP_INLINE_VISIBILITY
1718    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1719        {__c11_atomic_store(&__a_, false, __m);}
1720    _LIBCPP_INLINE_VISIBILITY
1721    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1722        {__c11_atomic_store(&__a_, false, __m);}
1723
1724    _LIBCPP_INLINE_VISIBILITY
1725#ifndef _LIBCPP_CXX03_LANG
1726    atomic_flag() _NOEXCEPT = default;
1727#else
1728    atomic_flag() _NOEXCEPT : __a_() {}
1729#endif // _LIBCPP_CXX03_LANG
1730
1731    _LIBCPP_INLINE_VISIBILITY
1732    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
1733
1734#ifndef _LIBCPP_CXX03_LANG
1735    atomic_flag(const atomic_flag&) = delete;
1736    atomic_flag& operator=(const atomic_flag&) = delete;
1737    atomic_flag& operator=(const atomic_flag&) volatile = delete;
1738#else
1739private:
1740    atomic_flag(const atomic_flag&);
1741    atomic_flag& operator=(const atomic_flag&);
1742    atomic_flag& operator=(const atomic_flag&) volatile;
1743#endif
1744} atomic_flag;
1745
1746inline _LIBCPP_INLINE_VISIBILITY
1747bool
1748atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1749{
1750    return __o->test_and_set();
1751}
1752
1753inline _LIBCPP_INLINE_VISIBILITY
1754bool
1755atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1756{
1757    return __o->test_and_set();
1758}
1759
1760inline _LIBCPP_INLINE_VISIBILITY
1761bool
1762atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1763{
1764    return __o->test_and_set(__m);
1765}
1766
1767inline _LIBCPP_INLINE_VISIBILITY
1768bool
1769atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1770{
1771    return __o->test_and_set(__m);
1772}
1773
1774inline _LIBCPP_INLINE_VISIBILITY
1775void
1776atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1777{
1778    __o->clear();
1779}
1780
1781inline _LIBCPP_INLINE_VISIBILITY
1782void
1783atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1784{
1785    __o->clear();
1786}
1787
1788inline _LIBCPP_INLINE_VISIBILITY
1789void
1790atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1791{
1792    __o->clear(__m);
1793}
1794
1795inline _LIBCPP_INLINE_VISIBILITY
1796void
1797atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1798{
1799    __o->clear(__m);
1800}
1801
1802// fences
1803
1804inline _LIBCPP_INLINE_VISIBILITY
1805void
1806atomic_thread_fence(memory_order __m) _NOEXCEPT
1807{
1808    __c11_atomic_thread_fence(__m);
1809}
1810
1811inline _LIBCPP_INLINE_VISIBILITY
1812void
1813atomic_signal_fence(memory_order __m) _NOEXCEPT
1814{
1815    __c11_atomic_signal_fence(__m);
1816}
1817
1818// Atomics for standard typedef types
1819
1820typedef atomic<bool>               atomic_bool;
1821typedef atomic<char>               atomic_char;
1822typedef atomic<signed char>        atomic_schar;
1823typedef atomic<unsigned char>      atomic_uchar;
1824typedef atomic<short>              atomic_short;
1825typedef atomic<unsigned short>     atomic_ushort;
1826typedef atomic<int>                atomic_int;
1827typedef atomic<unsigned int>       atomic_uint;
1828typedef atomic<long>               atomic_long;
1829typedef atomic<unsigned long>      atomic_ulong;
1830typedef atomic<long long>          atomic_llong;
1831typedef atomic<unsigned long long> atomic_ullong;
1832typedef atomic<char16_t>           atomic_char16_t;
1833typedef atomic<char32_t>           atomic_char32_t;
1834typedef atomic<wchar_t>            atomic_wchar_t;
1835
1836typedef atomic<int_least8_t>   atomic_int_least8_t;
1837typedef atomic<uint_least8_t>  atomic_uint_least8_t;
1838typedef atomic<int_least16_t>  atomic_int_least16_t;
1839typedef atomic<uint_least16_t> atomic_uint_least16_t;
1840typedef atomic<int_least32_t>  atomic_int_least32_t;
1841typedef atomic<uint_least32_t> atomic_uint_least32_t;
1842typedef atomic<int_least64_t>  atomic_int_least64_t;
1843typedef atomic<uint_least64_t> atomic_uint_least64_t;
1844
1845typedef atomic<int_fast8_t>   atomic_int_fast8_t;
1846typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
1847typedef atomic<int_fast16_t>  atomic_int_fast16_t;
1848typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1849typedef atomic<int_fast32_t>  atomic_int_fast32_t;
1850typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1851typedef atomic<int_fast64_t>  atomic_int_fast64_t;
1852typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1853
1854typedef atomic< int8_t>  atomic_int8_t;
1855typedef atomic<uint8_t>  atomic_uint8_t;
1856typedef atomic< int16_t> atomic_int16_t;
1857typedef atomic<uint16_t> atomic_uint16_t;
1858typedef atomic< int32_t> atomic_int32_t;
1859typedef atomic<uint32_t> atomic_uint32_t;
1860typedef atomic< int64_t> atomic_int64_t;
1861typedef atomic<uint64_t> atomic_uint64_t;
1862
1863typedef atomic<intptr_t>  atomic_intptr_t;
1864typedef atomic<uintptr_t> atomic_uintptr_t;
1865typedef atomic<size_t>    atomic_size_t;
1866typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1867typedef atomic<intmax_t>  atomic_intmax_t;
1868typedef atomic<uintmax_t> atomic_uintmax_t;
1869
1870#define ATOMIC_FLAG_INIT {false}
1871#define ATOMIC_VAR_INIT(__v) {__v}
1872
1873_LIBCPP_END_NAMESPACE_STD
1874
1875#endif  // _LIBCPP_ATOMIC
1876