1 //===----------------------------------------------------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #ifndef _LIBCPP___ATOMIC_ATOMIC_H 10 #define _LIBCPP___ATOMIC_ATOMIC_H 11 12 #include <__atomic/atomic_sync.h> 13 #include <__atomic/check_memory_order.h> 14 #include <__atomic/cxx_atomic_impl.h> 15 #include <__atomic/is_always_lock_free.h> 16 #include <__atomic/memory_order.h> 17 #include <__config> 18 #include <__cstddef/ptrdiff_t.h> 19 #include <__functional/operations.h> 20 #include <__memory/addressof.h> 21 #include <__type_traits/enable_if.h> 22 #include <__type_traits/is_floating_point.h> 23 #include <__type_traits/is_function.h> 24 #include <__type_traits/is_integral.h> 25 #include <__type_traits/is_nothrow_constructible.h> 26 #include <__type_traits/is_same.h> 27 #include <__type_traits/remove_const.h> 28 #include <__type_traits/remove_pointer.h> 29 #include <__type_traits/remove_volatile.h> 30 #include <__utility/forward.h> 31 #include <cstring> 32 33 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER) 34 # pragma GCC system_header 35 #endif 36 37 _LIBCPP_BEGIN_NAMESPACE_STD 38 39 template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value> 40 struct __atomic_base // false 41 { 42 mutable __cxx_atomic_impl<_Tp> __a_; 43 44 #if _LIBCPP_STD_VER >= 17 45 static constexpr bool is_always_lock_free = __libcpp_is_always_lock_free<__cxx_atomic_impl<_Tp> >::__value; 46 #endif 47 is_lock_free__atomic_base48 _LIBCPP_HIDE_FROM_ABI bool is_lock_free() const volatile _NOEXCEPT { 49 return __cxx_atomic_is_lock_free(sizeof(__cxx_atomic_impl<_Tp>)); 50 } is_lock_free__atomic_base51 _LIBCPP_HIDE_FROM_ABI bool is_lock_free() const _NOEXCEPT { 52 return static_cast<__atomic_base const volatile*>(this)->is_lock_free(); 53 } 54 _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT _LIBCPP_CHECK_STORE_MEMORY_ORDER__atomic_base55 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) { 56 std::__cxx_atomic_store(std::addressof(__a_), __d, __m); 57 } 58 _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT _LIBCPP_CHECK_STORE_MEMORY_ORDER__atomic_base59 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) { 60 std::__cxx_atomic_store(std::addressof(__a_), __d, __m); 61 } 62 _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT _LIBCPP_CHECK_LOAD_MEMORY_ORDER__atomic_base63 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) { 64 return std::__cxx_atomic_load(std::addressof(__a_), __m); 65 } 66 _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT _LIBCPP_CHECK_LOAD_MEMORY_ORDER__atomic_base67 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) { 68 return std::__cxx_atomic_load(std::addressof(__a_), __m); 69 } _Tp__atomic_base70 _LIBCPP_HIDE_FROM_ABI operator _Tp() const volatile _NOEXCEPT { return load(); } _Tp__atomic_base71 _LIBCPP_HIDE_FROM_ABI operator _Tp() const _NOEXCEPT { return load(); } 72 _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 73 return std::__cxx_atomic_exchange(std::addressof(__a_), __d, __m); 74 } 75 _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 76 return std::__cxx_atomic_exchange(std::addressof(__a_), __d, __m); 77 } 78 _LIBCPP_HIDE_FROM_ABI bool compare_exchange_weak__atomic_base79 compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) volatile _NOEXCEPT 80 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) { 81 return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __s, __f); 82 } compare_exchange_weak__atomic_base83 _LIBCPP_HIDE_FROM_ABI bool compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT 84 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) { 85 return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __s, __f); 86 } 87 _LIBCPP_HIDE_FROM_ABI bool compare_exchange_strong__atomic_base88 compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) volatile _NOEXCEPT 89 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) { 90 return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __s, __f); 91 } compare_exchange_strong__atomic_base92 _LIBCPP_HIDE_FROM_ABI bool compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT 93 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) { 94 return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __s, __f); 95 } 96 _LIBCPP_HIDE_FROM_ABI bool 97 compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 98 return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __m, __m); 99 } 100 _LIBCPP_HIDE_FROM_ABI bool 101 compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 102 return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __m, __m); 103 } 104 _LIBCPP_HIDE_FROM_ABI bool 105 compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 106 return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __m, __m); 107 } 108 _LIBCPP_HIDE_FROM_ABI bool 109 compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 110 return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __m, __m); 111 } 112 113 #if _LIBCPP_STD_VER >= 20 114 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const 115 volatile _NOEXCEPT { 116 std::__atomic_wait(*this, __v, __m); 117 } 118 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void 119 wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT { 120 std::__atomic_wait(*this, __v, __m); 121 } notify_one__atomic_base122 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_one() volatile _NOEXCEPT { 123 std::__atomic_notify_one(*this); 124 } notify_one__atomic_base125 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_one() _NOEXCEPT { std::__atomic_notify_one(*this); } notify_all__atomic_base126 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_all() volatile _NOEXCEPT { 127 std::__atomic_notify_all(*this); 128 } notify_all__atomic_base129 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_all() _NOEXCEPT { std::__atomic_notify_all(*this); } 130 #endif // _LIBCPP_STD_VER >= 20 131 132 #if _LIBCPP_STD_VER >= 20 __atomic_base__atomic_base133 _LIBCPP_HIDE_FROM_ABI constexpr __atomic_base() noexcept(is_nothrow_default_constructible_v<_Tp>) : __a_(_Tp()) {} 134 #else 135 _LIBCPP_HIDE_FROM_ABI __atomic_base() _NOEXCEPT = default; 136 #endif 137 __atomic_base__atomic_base138 _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {} 139 140 __atomic_base(const __atomic_base&) = delete; 141 }; 142 143 // atomic<Integral> 144 145 template <class _Tp> 146 struct __atomic_base<_Tp, true> : public __atomic_base<_Tp, false> { 147 using __base = __atomic_base<_Tp, false>; 148 149 _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR_SINCE_CXX20 __atomic_base() _NOEXCEPT = default; 150 151 _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {} 152 153 _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 154 return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m); 155 } 156 _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 157 return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m); 158 } 159 _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 160 return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m); 161 } 162 _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 163 return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m); 164 } 165 _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 166 return std::__cxx_atomic_fetch_and(std::addressof(this->__a_), __op, __m); 167 } 168 _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 169 return std::__cxx_atomic_fetch_and(std::addressof(this->__a_), __op, __m); 170 } 171 _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 172 return std::__cxx_atomic_fetch_or(std::addressof(this->__a_), __op, __m); 173 } 174 _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 175 return std::__cxx_atomic_fetch_or(std::addressof(this->__a_), __op, __m); 176 } 177 _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 178 return std::__cxx_atomic_fetch_xor(std::addressof(this->__a_), __op, __m); 179 } 180 _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 181 return std::__cxx_atomic_fetch_xor(std::addressof(this->__a_), __op, __m); 182 } 183 184 _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) volatile _NOEXCEPT { return fetch_add(_Tp(1)); } 185 _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) _NOEXCEPT { return fetch_add(_Tp(1)); } 186 _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) volatile _NOEXCEPT { return fetch_sub(_Tp(1)); } 187 _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) _NOEXCEPT { return fetch_sub(_Tp(1)); } 188 _LIBCPP_HIDE_FROM_ABI _Tp operator++() volatile _NOEXCEPT { return fetch_add(_Tp(1)) + _Tp(1); } 189 _LIBCPP_HIDE_FROM_ABI _Tp operator++() _NOEXCEPT { return fetch_add(_Tp(1)) + _Tp(1); } 190 _LIBCPP_HIDE_FROM_ABI _Tp operator--() volatile _NOEXCEPT { return fetch_sub(_Tp(1)) - _Tp(1); } 191 _LIBCPP_HIDE_FROM_ABI _Tp operator--() _NOEXCEPT { return fetch_sub(_Tp(1)) - _Tp(1); } 192 _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) volatile _NOEXCEPT { return fetch_add(__op) + __op; } 193 _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) _NOEXCEPT { return fetch_add(__op) + __op; } 194 _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) volatile _NOEXCEPT { return fetch_sub(__op) - __op; } 195 _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) _NOEXCEPT { return fetch_sub(__op) - __op; } 196 _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __op) volatile _NOEXCEPT { return fetch_and(__op) & __op; } 197 _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __op) _NOEXCEPT { return fetch_and(__op) & __op; } 198 _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __op) volatile _NOEXCEPT { return fetch_or(__op) | __op; } 199 _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __op) _NOEXCEPT { return fetch_or(__op) | __op; } 200 _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) volatile _NOEXCEPT { return fetch_xor(__op) ^ __op; } 201 _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) _NOEXCEPT { return fetch_xor(__op) ^ __op; } 202 }; 203 204 // Here we need _IsIntegral because the default template argument is not enough 205 // e.g __atomic_base<int> is __atomic_base<int, true>, which inherits from 206 // __atomic_base<int, false> and the caller of the wait function is 207 // __atomic_base<int, false>. So specializing __atomic_base<_Tp> does not work 208 template <class _Tp, bool _IsIntegral> 209 struct __atomic_waitable_traits<__atomic_base<_Tp, _IsIntegral> > { 210 static _LIBCPP_HIDE_FROM_ABI _Tp __atomic_load(const __atomic_base<_Tp, _IsIntegral>& __a, memory_order __order) { 211 return __a.load(__order); 212 } 213 214 static _LIBCPP_HIDE_FROM_ABI _Tp 215 __atomic_load(const volatile __atomic_base<_Tp, _IsIntegral>& __this, memory_order __order) { 216 return __this.load(__order); 217 } 218 219 static _LIBCPP_HIDE_FROM_ABI const __cxx_atomic_impl<_Tp>* 220 __atomic_contention_address(const __atomic_base<_Tp, _IsIntegral>& __a) { 221 return std::addressof(__a.__a_); 222 } 223 224 static _LIBCPP_HIDE_FROM_ABI const volatile __cxx_atomic_impl<_Tp>* 225 __atomic_contention_address(const volatile __atomic_base<_Tp, _IsIntegral>& __this) { 226 return std::addressof(__this.__a_); 227 } 228 }; 229 230 template <class _Tp> 231 struct atomic : public __atomic_base<_Tp> { 232 using __base = __atomic_base<_Tp>; 233 using value_type = _Tp; 234 using difference_type = value_type; 235 236 #if _LIBCPP_STD_VER >= 20 237 _LIBCPP_HIDE_FROM_ABI atomic() = default; 238 #else 239 _LIBCPP_HIDE_FROM_ABI atomic() _NOEXCEPT = default; 240 #endif 241 242 _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {} 243 244 _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) volatile _NOEXCEPT { 245 __base::store(__d); 246 return __d; 247 } 248 _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) _NOEXCEPT { 249 __base::store(__d); 250 return __d; 251 } 252 253 atomic& operator=(const atomic&) = delete; 254 atomic& operator=(const atomic&) volatile = delete; 255 }; 256 257 // atomic<T*> 258 259 template <class _Tp> 260 struct atomic<_Tp*> : public __atomic_base<_Tp*> { 261 using __base = __atomic_base<_Tp*>; 262 using value_type = _Tp*; 263 using difference_type = ptrdiff_t; 264 265 _LIBCPP_HIDE_FROM_ABI atomic() _NOEXCEPT = default; 266 267 _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {} 268 269 _LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __d) volatile _NOEXCEPT { 270 __base::store(__d); 271 return __d; 272 } 273 _LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __d) _NOEXCEPT { 274 __base::store(__d); 275 return __d; 276 } 277 278 _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 279 // __atomic_fetch_add accepts function pointers, guard against them. 280 static_assert(!is_function<__remove_pointer_t<_Tp> >::value, "Pointer to function isn't allowed"); 281 return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m); 282 } 283 284 _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 285 // __atomic_fetch_add accepts function pointers, guard against them. 286 static_assert(!is_function<__remove_pointer_t<_Tp> >::value, "Pointer to function isn't allowed"); 287 return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m); 288 } 289 290 _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 291 // __atomic_fetch_add accepts function pointers, guard against them. 292 static_assert(!is_function<__remove_pointer_t<_Tp> >::value, "Pointer to function isn't allowed"); 293 return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m); 294 } 295 296 _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 297 // __atomic_fetch_add accepts function pointers, guard against them. 298 static_assert(!is_function<__remove_pointer_t<_Tp> >::value, "Pointer to function isn't allowed"); 299 return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m); 300 } 301 302 _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) volatile _NOEXCEPT { return fetch_add(1); } 303 _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) _NOEXCEPT { return fetch_add(1); } 304 _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) volatile _NOEXCEPT { return fetch_sub(1); } 305 _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) _NOEXCEPT { return fetch_sub(1); } 306 _LIBCPP_HIDE_FROM_ABI _Tp* operator++() volatile _NOEXCEPT { return fetch_add(1) + 1; } 307 _LIBCPP_HIDE_FROM_ABI _Tp* operator++() _NOEXCEPT { return fetch_add(1) + 1; } 308 _LIBCPP_HIDE_FROM_ABI _Tp* operator--() volatile _NOEXCEPT { return fetch_sub(1) - 1; } 309 _LIBCPP_HIDE_FROM_ABI _Tp* operator--() _NOEXCEPT { return fetch_sub(1) - 1; } 310 _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT { return fetch_add(__op) + __op; } 311 _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT { return fetch_add(__op) + __op; } 312 _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT { return fetch_sub(__op) - __op; } 313 _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT { return fetch_sub(__op) - __op; } 314 315 atomic& operator=(const atomic&) = delete; 316 atomic& operator=(const atomic&) volatile = delete; 317 }; 318 319 template <class _Tp> 320 struct __atomic_waitable_traits<atomic<_Tp> > : __atomic_waitable_traits<__atomic_base<_Tp> > {}; 321 322 #if _LIBCPP_STD_VER >= 20 323 template <class _Tp> 324 requires is_floating_point_v<_Tp> 325 struct atomic<_Tp> : __atomic_base<_Tp> { 326 private: 327 _LIBCPP_HIDE_FROM_ABI static constexpr bool __is_fp80_long_double() { 328 // Only x87-fp80 long double has 64-bit mantissa 329 return __LDBL_MANT_DIG__ == 64 && std::is_same_v<_Tp, long double>; 330 } 331 332 _LIBCPP_HIDE_FROM_ABI static constexpr bool __has_rmw_builtin() { 333 # ifndef _LIBCPP_COMPILER_CLANG_BASED 334 return false; 335 # else 336 // The builtin __cxx_atomic_fetch_add errors during compilation for 337 // long double on platforms with fp80 format. 338 // For more details, see 339 // lib/Sema/SemaChecking.cpp function IsAllowedValueType 340 // LLVM Parser does not allow atomicrmw with x86_fp80 type. 341 // if (ValType->isSpecificBuiltinType(BuiltinType::LongDouble) && 342 // &Context.getTargetInfo().getLongDoubleFormat() == 343 // &llvm::APFloat::x87DoubleExtended()) 344 // For more info 345 // https://github.com/llvm/llvm-project/issues/68602 346 // https://reviews.llvm.org/D53965 347 return !__is_fp80_long_double(); 348 # endif 349 } 350 351 template <class _This, class _Operation, class _BuiltinOp> 352 _LIBCPP_HIDE_FROM_ABI static _Tp 353 __rmw_op(_This&& __self, _Tp __operand, memory_order __m, _Operation __operation, _BuiltinOp __builtin_op) { 354 if constexpr (__has_rmw_builtin()) { 355 return __builtin_op(std::addressof(std::forward<_This>(__self).__a_), __operand, __m); 356 } else { 357 _Tp __old = __self.load(memory_order_relaxed); 358 _Tp __new = __operation(__old, __operand); 359 while (!__self.compare_exchange_weak(__old, __new, __m, memory_order_relaxed)) { 360 # ifdef _LIBCPP_COMPILER_CLANG_BASED 361 if constexpr (__is_fp80_long_double()) { 362 // https://github.com/llvm/llvm-project/issues/47978 363 // clang bug: __old is not updated on failure for atomic<long double>::compare_exchange_weak 364 // Note __old = __self.load(memory_order_relaxed) will not work 365 std::__cxx_atomic_load_inplace(std::addressof(__self.__a_), &__old, memory_order_relaxed); 366 } 367 # endif 368 __new = __operation(__old, __operand); 369 } 370 return __old; 371 } 372 } 373 374 template <class _This> 375 _LIBCPP_HIDE_FROM_ABI static _Tp __fetch_add(_This&& __self, _Tp __operand, memory_order __m) { 376 auto __builtin_op = [](auto __a, auto __builtin_operand, auto __order) { 377 return std::__cxx_atomic_fetch_add(__a, __builtin_operand, __order); 378 }; 379 return __rmw_op(std::forward<_This>(__self), __operand, __m, std::plus<>{}, __builtin_op); 380 } 381 382 template <class _This> 383 _LIBCPP_HIDE_FROM_ABI static _Tp __fetch_sub(_This&& __self, _Tp __operand, memory_order __m) { 384 auto __builtin_op = [](auto __a, auto __builtin_operand, auto __order) { 385 return std::__cxx_atomic_fetch_sub(__a, __builtin_operand, __order); 386 }; 387 return __rmw_op(std::forward<_This>(__self), __operand, __m, std::minus<>{}, __builtin_op); 388 } 389 390 public: 391 using __base = __atomic_base<_Tp>; 392 using value_type = _Tp; 393 using difference_type = value_type; 394 395 _LIBCPP_HIDE_FROM_ABI constexpr atomic() noexcept = default; 396 _LIBCPP_HIDE_FROM_ABI constexpr atomic(_Tp __d) noexcept : __base(__d) {} 397 398 atomic(const atomic&) = delete; 399 atomic& operator=(const atomic&) = delete; 400 atomic& operator=(const atomic&) volatile = delete; 401 402 _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) volatile noexcept 403 requires __base::is_always_lock_free 404 { 405 __base::store(__d); 406 return __d; 407 } 408 _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) noexcept { 409 __base::store(__d); 410 return __d; 411 } 412 413 _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile noexcept 414 requires __base::is_always_lock_free 415 { 416 return __fetch_add(*this, __op, __m); 417 } 418 419 _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) noexcept { 420 return __fetch_add(*this, __op, __m); 421 } 422 423 _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile noexcept 424 requires __base::is_always_lock_free 425 { 426 return __fetch_sub(*this, __op, __m); 427 } 428 429 _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) noexcept { 430 return __fetch_sub(*this, __op, __m); 431 } 432 433 _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) volatile noexcept 434 requires __base::is_always_lock_free 435 { 436 return fetch_add(__op) + __op; 437 } 438 439 _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) noexcept { return fetch_add(__op) + __op; } 440 441 _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) volatile noexcept 442 requires __base::is_always_lock_free 443 { 444 return fetch_sub(__op) - __op; 445 } 446 447 _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) noexcept { return fetch_sub(__op) - __op; } 448 }; 449 450 #endif // _LIBCPP_STD_VER >= 20 451 452 // atomic_is_lock_free 453 454 template <class _Tp> 455 _LIBCPP_HIDE_FROM_ABI bool atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT { 456 return __o->is_lock_free(); 457 } 458 459 template <class _Tp> 460 _LIBCPP_HIDE_FROM_ABI bool atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT { 461 return __o->is_lock_free(); 462 } 463 464 // atomic_init 465 466 template <class _Tp> 467 _LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_HIDE_FROM_ABI void 468 atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT { 469 std::__cxx_atomic_init(std::addressof(__o->__a_), __d); 470 } 471 472 template <class _Tp> 473 _LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_HIDE_FROM_ABI void 474 atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT { 475 std::__cxx_atomic_init(std::addressof(__o->__a_), __d); 476 } 477 478 // atomic_store 479 480 template <class _Tp> 481 _LIBCPP_HIDE_FROM_ABI void atomic_store(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT { 482 __o->store(__d); 483 } 484 485 template <class _Tp> 486 _LIBCPP_HIDE_FROM_ABI void atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT { 487 __o->store(__d); 488 } 489 490 // atomic_store_explicit 491 492 template <class _Tp> 493 _LIBCPP_HIDE_FROM_ABI void 494 atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT 495 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) { 496 __o->store(__d, __m); 497 } 498 499 template <class _Tp> 500 _LIBCPP_HIDE_FROM_ABI void 501 atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT 502 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) { 503 __o->store(__d, __m); 504 } 505 506 // atomic_load 507 508 template <class _Tp> 509 _LIBCPP_HIDE_FROM_ABI _Tp atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT { 510 return __o->load(); 511 } 512 513 template <class _Tp> 514 _LIBCPP_HIDE_FROM_ABI _Tp atomic_load(const atomic<_Tp>* __o) _NOEXCEPT { 515 return __o->load(); 516 } 517 518 // atomic_load_explicit 519 520 template <class _Tp> 521 _LIBCPP_HIDE_FROM_ABI _Tp atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT 522 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) { 523 return __o->load(__m); 524 } 525 526 template <class _Tp> 527 _LIBCPP_HIDE_FROM_ABI _Tp atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT 528 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) { 529 return __o->load(__m); 530 } 531 532 // atomic_exchange 533 534 template <class _Tp> 535 _LIBCPP_HIDE_FROM_ABI _Tp atomic_exchange(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT { 536 return __o->exchange(__d); 537 } 538 539 template <class _Tp> 540 _LIBCPP_HIDE_FROM_ABI _Tp atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT { 541 return __o->exchange(__d); 542 } 543 544 // atomic_exchange_explicit 545 546 template <class _Tp> 547 _LIBCPP_HIDE_FROM_ABI _Tp 548 atomic_exchange_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT { 549 return __o->exchange(__d, __m); 550 } 551 552 template <class _Tp> 553 _LIBCPP_HIDE_FROM_ABI _Tp 554 atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT { 555 return __o->exchange(__d, __m); 556 } 557 558 // atomic_compare_exchange_weak 559 560 template <class _Tp> 561 _LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_weak( 562 volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT { 563 return __o->compare_exchange_weak(*__e, __d); 564 } 565 566 template <class _Tp> 567 _LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_weak( 568 atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT { 569 return __o->compare_exchange_weak(*__e, __d); 570 } 571 572 // atomic_compare_exchange_strong 573 574 template <class _Tp> 575 _LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_strong( 576 volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT { 577 return __o->compare_exchange_strong(*__e, __d); 578 } 579 580 template <class _Tp> 581 _LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_strong( 582 atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT { 583 return __o->compare_exchange_strong(*__e, __d); 584 } 585 586 // atomic_compare_exchange_weak_explicit 587 588 template <class _Tp> 589 _LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_weak_explicit( 590 volatile atomic<_Tp>* __o, 591 typename atomic<_Tp>::value_type* __e, 592 typename atomic<_Tp>::value_type __d, 593 memory_order __s, 594 memory_order __f) _NOEXCEPT _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) { 595 return __o->compare_exchange_weak(*__e, __d, __s, __f); 596 } 597 598 template <class _Tp> 599 _LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_weak_explicit( 600 atomic<_Tp>* __o, 601 typename atomic<_Tp>::value_type* __e, 602 typename atomic<_Tp>::value_type __d, 603 memory_order __s, 604 memory_order __f) _NOEXCEPT _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) { 605 return __o->compare_exchange_weak(*__e, __d, __s, __f); 606 } 607 608 // atomic_compare_exchange_strong_explicit 609 610 template <class _Tp> 611 _LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_strong_explicit( 612 volatile atomic<_Tp>* __o, 613 typename atomic<_Tp>::value_type* __e, 614 typename atomic<_Tp>::value_type __d, 615 memory_order __s, 616 memory_order __f) _NOEXCEPT _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) { 617 return __o->compare_exchange_strong(*__e, __d, __s, __f); 618 } 619 620 template <class _Tp> 621 _LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_strong_explicit( 622 atomic<_Tp>* __o, 623 typename atomic<_Tp>::value_type* __e, 624 typename atomic<_Tp>::value_type __d, 625 memory_order __s, 626 memory_order __f) _NOEXCEPT _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) { 627 return __o->compare_exchange_strong(*__e, __d, __s, __f); 628 } 629 630 #if _LIBCPP_STD_VER >= 20 631 632 // atomic_wait 633 634 template <class _Tp> 635 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void 636 atomic_wait(const volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __v) _NOEXCEPT { 637 return __o->wait(__v); 638 } 639 640 template <class _Tp> 641 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void 642 atomic_wait(const atomic<_Tp>* __o, typename atomic<_Tp>::value_type __v) _NOEXCEPT { 643 return __o->wait(__v); 644 } 645 646 // atomic_wait_explicit 647 648 template <class _Tp> 649 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void 650 atomic_wait_explicit(const volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __v, memory_order __m) _NOEXCEPT 651 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) { 652 return __o->wait(__v, __m); 653 } 654 655 template <class _Tp> 656 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void 657 atomic_wait_explicit(const atomic<_Tp>* __o, typename atomic<_Tp>::value_type __v, memory_order __m) _NOEXCEPT 658 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) { 659 return __o->wait(__v, __m); 660 } 661 662 // atomic_notify_one 663 664 template <class _Tp> 665 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT { 666 __o->notify_one(); 667 } 668 template <class _Tp> 669 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT { 670 __o->notify_one(); 671 } 672 673 // atomic_notify_all 674 675 template <class _Tp> 676 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT { 677 __o->notify_all(); 678 } 679 template <class _Tp> 680 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT { 681 __o->notify_all(); 682 } 683 684 #endif // _LIBCPP_STD_VER >= 20 685 686 // atomic_fetch_add 687 688 template <class _Tp> 689 _LIBCPP_HIDE_FROM_ABI _Tp 690 atomic_fetch_add(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT { 691 return __o->fetch_add(__op); 692 } 693 694 template <class _Tp> 695 _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT { 696 return __o->fetch_add(__op); 697 } 698 699 // atomic_fetch_add_explicit 700 701 template <class _Tp> 702 _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_add_explicit( 703 volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT { 704 return __o->fetch_add(__op, __m); 705 } 706 707 template <class _Tp> 708 _LIBCPP_HIDE_FROM_ABI _Tp 709 atomic_fetch_add_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT { 710 return __o->fetch_add(__op, __m); 711 } 712 713 // atomic_fetch_sub 714 715 template <class _Tp> 716 _LIBCPP_HIDE_FROM_ABI _Tp 717 atomic_fetch_sub(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT { 718 return __o->fetch_sub(__op); 719 } 720 721 template <class _Tp> 722 _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT { 723 return __o->fetch_sub(__op); 724 } 725 726 // atomic_fetch_sub_explicit 727 728 template <class _Tp> 729 _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_sub_explicit( 730 volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT { 731 return __o->fetch_sub(__op, __m); 732 } 733 734 template <class _Tp> 735 _LIBCPP_HIDE_FROM_ABI _Tp 736 atomic_fetch_sub_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT { 737 return __o->fetch_sub(__op, __m); 738 } 739 740 // atomic_fetch_and 741 742 template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0> 743 _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_and(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT { 744 return __o->fetch_and(__op); 745 } 746 747 template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0> 748 _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT { 749 return __o->fetch_and(__op); 750 } 751 752 // atomic_fetch_and_explicit 753 754 template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0> 755 _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_and_explicit( 756 volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT { 757 return __o->fetch_and(__op, __m); 758 } 759 760 template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0> 761 _LIBCPP_HIDE_FROM_ABI _Tp 762 atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT { 763 return __o->fetch_and(__op, __m); 764 } 765 766 // atomic_fetch_or 767 768 template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0> 769 _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_or(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT { 770 return __o->fetch_or(__op); 771 } 772 773 template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0> 774 _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT { 775 return __o->fetch_or(__op); 776 } 777 778 // atomic_fetch_or_explicit 779 780 template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0> 781 _LIBCPP_HIDE_FROM_ABI _Tp 782 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT { 783 return __o->fetch_or(__op, __m); 784 } 785 786 template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0> 787 _LIBCPP_HIDE_FROM_ABI _Tp 788 atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT { 789 return __o->fetch_or(__op, __m); 790 } 791 792 // atomic_fetch_xor 793 794 template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0> 795 _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_xor(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT { 796 return __o->fetch_xor(__op); 797 } 798 799 template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0> 800 _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT { 801 return __o->fetch_xor(__op); 802 } 803 804 // atomic_fetch_xor_explicit 805 806 template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0> 807 _LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_xor_explicit( 808 volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT { 809 return __o->fetch_xor(__op, __m); 810 } 811 812 template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0> 813 _LIBCPP_HIDE_FROM_ABI _Tp 814 atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT { 815 return __o->fetch_xor(__op, __m); 816 } 817 818 _LIBCPP_END_NAMESPACE_STD 819 820 #endif // _LIBCPP___ATOMIC_ATOMIC_H 821