/external/libcxx/test/std/atomics/atomics.types.operations/atomics.types.operations.req/ |
D | atomic_fetch_add.pass.cpp | 44 assert(std::atomic_fetch_add(&t, T(2)) == T(1)); in operator ()() 51 assert(std::atomic_fetch_add(&t, T(2)) == T(1)); in operator ()() 65 assert(std::atomic_fetch_add(&t, 2) == T(1*sizeof(X))); in testp() 73 assert(std::atomic_fetch_add(&t, 2) == T(1*sizeof(X))); in testp()
|
/external/compiler-rt/lib/tsan/rtl/ |
D | tsan_suppressions.cc | 107 atomic_fetch_add(&(*sp)->hit_count, 1, memory_order_relaxed); in IsSuppressed() 145 atomic_fetch_add(&s->hit_count, 1, memory_order_relaxed); in IsSuppressed()
|
D | tsan_interface_ann.cc | 134 atomic_fetch_add(&race->hitcount, 1, memory_order_relaxed); in CheckContains() 173 atomic_fetch_add(&(race0->*counter), cnt, memory_order_relaxed); in CollectMatchedBenignRaces()
|
D | tsan_mutex.cc | 262 uptr prev = atomic_fetch_add(&state_, kReadLock, memory_order_acquire); in ReadLock()
|
D | tsan_sync.cc | 241 const u64 uid = atomic_fetch_add(&uid_gen_, 1, memory_order_relaxed); in GetAndLock()
|
D | tsan_rtl_report.cc | 545 atomic_fetch_add(&s->supp->hit_count, 1, memory_order_relaxed); in IsFiredSuppression() 561 atomic_fetch_add(&s->supp->hit_count, 1, memory_order_relaxed); in IsFiredSuppression()
|
D | tsan_fd.cc | 59 atomic_fetch_add(&s->rc, 1, memory_order_relaxed); in ref()
|
/external/compiler-rt/lib/sanitizer_common/ |
D | sanitizer_atomic_msvc.h | 115 INLINE u32 atomic_fetch_add(volatile atomic_uint32_t *a, in atomic_fetch_add() function 123 INLINE uptr atomic_fetch_add(volatile atomic_uintptr_t *a, in atomic_fetch_add() function
|
D | sanitizer_termination.cc | 73 if (atomic_fetch_add(&num_calls, 1, memory_order_relaxed) > 10) { in CheckFailed()
|
D | sanitizer_atomic_clang.h | 47 INLINE typename T::Type atomic_fetch_add(volatile T *a, in atomic_fetch_add() function
|
D | sanitizer_mutex.h | 118 u32 prev = atomic_fetch_add(&state_, kReadLock, memory_order_acquire); in ReadLock()
|
D | sanitizer_coverage_libcdep.cc | 431 uptr counter = atomic_fetch_add(&coverage_counter, 1, memory_order_relaxed); in Add() 451 uptr idx = atomic_fetch_add(&cc_array_index, 1, memory_order_relaxed); in IndirCall() 462 atomic_fetch_add(&caller_callee_counter, 1, memory_order_relaxed); in IndirCall() 878 if (atomic_fetch_add(&dump_once_guard, 1, memory_order_relaxed)) in DumpAll()
|
D | sanitizer_tls_get_addr.cc | 60 atomic_fetch_add(&number_of_live_dtls, 1, memory_order_relaxed); in DTLS_Resize()
|
D | sanitizer_stackdepot.cc | 97 atomic_fetch_add(&node_->hash_and_use_count, 1, memory_order_relaxed) & in inc_use_count_unsafe()
|
D | sanitizer_stackdepotbase.h | 119 u32 id = atomic_fetch_add(&seq[part], 1, memory_order_relaxed) + 1; in Put()
|
D | sanitizer_common.cc | 286 atomic_fetch_add(&g_total_mmaped, size, memory_order_relaxed) + size; in IncreaseTotalMmap()
|
/external/compiler-rt/lib/dfsan/ |
D | dfsan.cc | 172 atomic_fetch_add(&__dfsan_last_label, 1, memory_order_relaxed) + 1; in __dfsan_union() 235 atomic_fetch_add(&__dfsan_last_label, 1, memory_order_relaxed) + 1; in dfsan_create_label()
|
/external/libcxx/include/ |
D | atomic | 371 atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept; 375 atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept; 452 atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept; 456 atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept; 779 // atomic_fetch_add. Force a failure rather than creating bad behavior. 1392 // atomic_fetch_add 1401 atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT 1413 atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT 1421 atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT 1429 atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
|
/external/compiler-rt/lib/tsan/dd/ |
D | dd_rtl.cc | 102 uptr id = atomic_fetch_add(&id_gen, 1, memory_order_relaxed); in ThreadInit()
|
/external/clang/lib/Headers/ |
D | stdatomic.h | 146 #define atomic_fetch_add(object, operand) __c11_atomic_fetch_add(object, operand, __ATOMIC_SEQ_CST) macro
|
/external/jemalloc/include/jemalloc/internal/ |
D | atomic.h | 133 return (atomic_fetch_add(a, x) + x); in atomic_add_uint64() 357 return (atomic_fetch_add(a, x) + x); in atomic_add_uint32()
|
/external/clang/test/Sema/ |
D | atomic-ops.c | 244 atomic_fetch_add(&k, n); // expected-error {{must be a pointer to _Atomic}} in f() 245 k = atomic_fetch_add(&n, k); in f()
|
/external/compiler-rt/lib/tsan/tests/rtl/ |
D | tsan_test_util_posix.cc | 126 uintptr_t addr = atomic_fetch_add(&uniq, size, memory_order_relaxed); in allocate_addr()
|
/external/compiler-rt/lib/asan/ |
D | asan_rtl.cc | 44 if (atomic_fetch_add(&num_calls, 1, memory_order_relaxed) != 0) { in AsanDie()
|
/external/v8/src/wasm/ |
D | wasm-interpreter.cc | 1541 ATOMIC_BINOP_CASE(I32AtomicAdd, uint32_t, uint32_t, atomic_fetch_add); in ExecuteAtomicOp() 1542 ATOMIC_BINOP_CASE(I32AtomicAdd8U, uint8_t, uint32_t, atomic_fetch_add); in ExecuteAtomicOp() 1543 ATOMIC_BINOP_CASE(I32AtomicAdd16U, uint16_t, uint32_t, atomic_fetch_add); in ExecuteAtomicOp() 1561 ATOMIC_BINOP_CASE(I64AtomicAdd, uint64_t, uint64_t, atomic_fetch_add); in ExecuteAtomicOp() 1562 ATOMIC_BINOP_CASE(I64AtomicAdd8U, uint8_t, uint64_t, atomic_fetch_add); in ExecuteAtomicOp() 1563 ATOMIC_BINOP_CASE(I64AtomicAdd16U, uint16_t, uint64_t, atomic_fetch_add); in ExecuteAtomicOp() 1564 ATOMIC_BINOP_CASE(I64AtomicAdd32U, uint32_t, uint64_t, atomic_fetch_add); in ExecuteAtomicOp()
|