• Home
  • Raw
  • Download

Lines Matching refs:__m

563 #define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
564 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
565 __m == memory_order_acquire || \
566 __m == memory_order_acq_rel, \
569 #define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
570 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
571 __m == memory_order_acq_rel, \
574 #define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
899 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
900 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
901 {__c11_atomic_store(&__a_, __d, __m);}
903 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
904 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
905 {__c11_atomic_store(&__a_, __d, __m);}
907 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
908 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
909 {return __c11_atomic_load(&__a_, __m);}
911 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
912 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
913 {return __c11_atomic_load(&__a_, __m);}
919 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
920 {return __c11_atomic_exchange(&__a_, __d, __m);}
922 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
923 {return __c11_atomic_exchange(&__a_, __d, __m);}
946 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
947 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
950 memory_order __m = memory_order_seq_cst) _NOEXCEPT
951 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
954 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
955 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
958 memory_order __m = memory_order_seq_cst) _NOEXCEPT
959 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1000 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1001 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1003 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1004 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1006 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1007 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1009 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1010 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1012 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1013 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
1015 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1016 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
1018 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1019 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
1021 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1022 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
1024 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1025 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
1027 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1028 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
1108 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1110 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1112 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1113 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1115 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1117 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1119 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1120 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1207 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1208 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1210 __o->store(__d, __m);
1216 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1217 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1219 __o->store(__d, __m);
1245 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1246 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1248 return __o->load(__m);
1254 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1255 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1257 return __o->load(__m);
1283 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1285 return __o->exchange(__d, __m);
1291 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1293 return __o->exchange(__d, __m);
1430 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1432 return __o->fetch_add(__op, __m);
1442 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1444 return __o->fetch_add(__op, __m);
1451 memory_order __m) _NOEXCEPT
1453 return __o->fetch_add(__op, __m);
1459 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1461 return __o->fetch_add(__op, __m);
1515 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1517 return __o->fetch_sub(__op, __m);
1527 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1529 return __o->fetch_sub(__op, __m);
1536 memory_order __m) _NOEXCEPT
1538 return __o->fetch_sub(__op, __m);
1544 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1546 return __o->fetch_sub(__op, __m);
1584 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1586 return __o->fetch_and(__op, __m);
1596 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1598 return __o->fetch_and(__op, __m);
1636 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1638 return __o->fetch_or(__op, __m);
1648 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1650 return __o->fetch_or(__op, __m);
1688 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1690 return __o->fetch_xor(__op, __m);
1700 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1702 return __o->fetch_xor(__op, __m);
1712 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1713 {return __c11_atomic_exchange(&__a_, true, __m);}
1715 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1716 {return __c11_atomic_exchange(&__a_, true, __m);}
1718 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1719 {__c11_atomic_store(&__a_, false, __m);}
1721 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1722 {__c11_atomic_store(&__a_, false, __m);}
1762 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1764 return __o->test_and_set(__m);
1769 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1771 return __o->test_and_set(__m);
1790 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1792 __o->clear(__m);
1797 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1799 __o->clear(__m);
1806 atomic_thread_fence(memory_order __m) _NOEXCEPT
1808 __c11_atomic_thread_fence(__m);
1813 atomic_signal_fence(memory_order __m) _NOEXCEPT
1815 __c11_atomic_signal_fence(__m);