• Home
  • Raw
  • Download

Lines Matching refs:_NOEXCEPT

599     __gcc_atomic_t() _NOEXCEPT = default;
601 __gcc_atomic_t() _NOEXCEPT : __a_value() {}
603 _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
859 kill_dependency(_Tp __y) _NOEXCEPT
887 bool is_lock_free() const volatile _NOEXCEPT
896 bool is_lock_free() const _NOEXCEPT
899 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
903 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
907 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
911 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
915 operator _Tp() const volatile _NOEXCEPT {return load();}
917 operator _Tp() const _NOEXCEPT {return load();}
919 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
922 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
926 memory_order __s, memory_order __f) volatile _NOEXCEPT
931 memory_order __s, memory_order __f) _NOEXCEPT
936 memory_order __s, memory_order __f) volatile _NOEXCEPT
941 memory_order __s, memory_order __f) _NOEXCEPT
946 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
950 memory_order __m = memory_order_seq_cst) _NOEXCEPT
954 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
958 memory_order __m = memory_order_seq_cst) _NOEXCEPT
963 __atomic_base() _NOEXCEPT = default;
965 __atomic_base() _NOEXCEPT : __a_() {}
969 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
995 __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
997 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
1000 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1003 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1006 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1009 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1012 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1015 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1018 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1021 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1024 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1027 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1031 _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
1033 _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));}
1035 _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));}
1037 _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));}
1039 _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
1041 _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
1043 _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
1045 _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
1047 _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1049 _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1051 _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1053 _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1055 _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1057 _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;}
1059 _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1061 _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;}
1063 _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1065 _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1076 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1078 _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1081 _Tp operator=(_Tp __d) volatile _NOEXCEPT
1084 _Tp operator=(_Tp __d) _NOEXCEPT
1096 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1098 _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1101 _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1104 _Tp* operator=(_Tp* __d) _NOEXCEPT
1109 volatile _NOEXCEPT
1112 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1116 volatile _NOEXCEPT
1119 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1123 _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
1125 _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);}
1127 _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);}
1129 _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);}
1131 _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;}
1133 _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;}
1135 _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;}
1137 _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;}
1139 _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1141 _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1143 _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1145 _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1153 atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1161 atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1171 atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1179 atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1189 atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1197 atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1207 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1216 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1227 atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1235 atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1245 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1254 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1265 atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1273 atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1283 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1291 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1301 atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1309 atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1319 atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1327 atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1339 memory_order __s, memory_order __f) _NOEXCEPT
1349 memory_order __s, memory_order __f) _NOEXCEPT
1362 memory_order __s, memory_order __f) _NOEXCEPT
1373 memory_order __s, memory_order __f) _NOEXCEPT
1388 atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1400 atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1408 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1416 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1430 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1442 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1451 memory_order __m) _NOEXCEPT
1459 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1473 atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1485 atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1493 atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1501 atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1515 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1527 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1536 memory_order __m) _NOEXCEPT
1544 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1558 atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1570 atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1584 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1596 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1610 atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1622 atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1636 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1648 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1662 atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1674 atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1688 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1700 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1712 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1715 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1718 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1721 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1726 atomic_flag() _NOEXCEPT = default;
1728 atomic_flag() _NOEXCEPT : __a_() {}
1732 atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
1748 atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1755 atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1762 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1769 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1776 atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1783 atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1790 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1797 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1806 atomic_thread_fence(memory_order __m) _NOEXCEPT
1813 atomic_signal_fence(memory_order __m) _NOEXCEPT