/external/compiler-rt/lib/sanitizer_common/tests/ |
D | sanitizer_atomic_test.cc | 55 CheckStoreLoad<atomic_uint8_t, memory_order_relaxed, memory_order_relaxed>(); in TEST() 56 CheckStoreLoad<atomic_uint8_t, memory_order_consume, memory_order_relaxed>(); in TEST() 57 CheckStoreLoad<atomic_uint8_t, memory_order_acquire, memory_order_relaxed>(); in TEST() 58 CheckStoreLoad<atomic_uint8_t, memory_order_relaxed, memory_order_release>(); in TEST() 61 CheckStoreLoad<atomic_uint16_t, memory_order_relaxed, memory_order_relaxed>(); in TEST() 62 CheckStoreLoad<atomic_uint16_t, memory_order_consume, memory_order_relaxed>(); in TEST() 63 CheckStoreLoad<atomic_uint16_t, memory_order_acquire, memory_order_relaxed>(); in TEST() 64 CheckStoreLoad<atomic_uint16_t, memory_order_relaxed, memory_order_release>(); in TEST() 67 CheckStoreLoad<atomic_uint32_t, memory_order_relaxed, memory_order_relaxed>(); in TEST() 68 CheckStoreLoad<atomic_uint32_t, memory_order_consume, memory_order_relaxed>(); in TEST() [all …]
|
/external/clang/test/Sema/ |
D | atomic-ops.c | 102 __c11_atomic_store(i, 0, memory_order_relaxed); in f() 103 …__c11_atomic_store(ci, 0, memory_order_relaxed); // expected-error {{address argument to atomic op… in f() 110 int load_n_1 = __atomic_load_n(I, memory_order_relaxed); in f() 111 int *load_n_2 = __atomic_load_n(P, memory_order_relaxed); in f() 112 …float load_n_3 = __atomic_load_n(D, memory_order_relaxed); // expected-error {{must be a pointer t… in f() 113 …__atomic_load_n(s1, memory_order_relaxed); // expected-error {{must be a pointer to integer or poi… in f() 114 load_n_1 = __atomic_load_n(CI, memory_order_relaxed); in f() 116 …__atomic_load(i, I, memory_order_relaxed); // expected-error {{must be a pointer to a trivially-co… in f() 117 __atomic_load(CI, I, memory_order_relaxed); in f() 119 …__atomic_load(I, i, memory_order_relaxed); // expected-warning {{passing '_Atomic(int) *' to param… in f() [all …]
|
/external/libchrome/base/ |
D | atomicops_internals_portable.h | 71 std::memory_order_relaxed, in NoBarrier_CompareAndSwap() 72 std::memory_order_relaxed); in NoBarrier_CompareAndSwap() 79 ->exchange(new_value, std::memory_order_relaxed); in NoBarrier_AtomicExchange() 86 ->fetch_add(increment, std::memory_order_relaxed); in NoBarrier_AtomicIncrement() 112 std::memory_order_relaxed); in Release_CompareAndSwap() 117 ((AtomicLocation32)ptr)->store(value, std::memory_order_relaxed); in NoBarrier_Store() 121 ((AtomicLocation32)ptr)->store(value, std::memory_order_relaxed); in Acquire_Store() 130 return ((AtomicLocation32)ptr)->load(std::memory_order_relaxed); in NoBarrier_Load() 139 return ((AtomicLocation32)ptr)->load(std::memory_order_relaxed); in Release_Load() 154 std::memory_order_relaxed, in NoBarrier_CompareAndSwap() [all …]
|
/external/protobuf/src/google/protobuf/stubs/ |
D | atomicops_internals_pnacl.h | 71 std::memory_order_relaxed, in NoBarrier_CompareAndSwap() 72 std::memory_order_relaxed); in NoBarrier_CompareAndSwap() 79 ->exchange(new_value, std::memory_order_relaxed); in NoBarrier_AtomicExchange() 86 ->fetch_add(increment, std::memory_order_relaxed); in NoBarrier_AtomicIncrement() 112 std::memory_order_relaxed); in Release_CompareAndSwap() 117 ((AtomicLocation32)ptr)->store(value, std::memory_order_relaxed); in NoBarrier_Store() 121 ((AtomicLocation32)ptr)->store(value, std::memory_order_relaxed); in Acquire_Store() 130 return ((AtomicLocation32)ptr)->load(std::memory_order_relaxed); in NoBarrier_Load() 139 return ((AtomicLocation32)ptr)->load(std::memory_order_relaxed); in Release_Load() 154 std::memory_order_relaxed, in NoBarrier_CompareAndSwap() [all …]
|
/external/eigen/unsupported/Eigen/CXX11/src/ThreadPool/ |
D | RunQueue.h | 47 array_[i].state.store(kEmpty, std::memory_order_relaxed); in RunQueue() 55 unsigned front = front_.load(std::memory_order_relaxed); in PushFront() 57 uint8_t s = e->state.load(std::memory_order_relaxed); in PushFront() 61 front_.store(front + 1 + (kSize << 1), std::memory_order_relaxed); in PushFront() 70 unsigned front = front_.load(std::memory_order_relaxed); in PopFront() 72 uint8_t s = e->state.load(std::memory_order_relaxed); in PopFront() 79 front_.store(front, std::memory_order_relaxed); in PopFront() 87 unsigned back = back_.load(std::memory_order_relaxed); in PushBack() 89 uint8_t s = e->state.load(std::memory_order_relaxed); in PushBack() 94 back_.store(back, std::memory_order_relaxed); in PushBack() [all …]
|
D | EventCount.h | 68 w->epoch = state_.fetch_add(kWaiterInc, std::memory_order_relaxed); in Prewait() 95 w->next.store(nullptr, std::memory_order_relaxed); in CommitWait() 97 w->next.store(&waiters_[state & kStackMask], std::memory_order_relaxed); in CommitWait() 110 uint64_t state = state_.load(std::memory_order_relaxed); in CancelWait() 116 state = state_.load(std::memory_order_relaxed); in CancelWait() 124 std::memory_order_relaxed)) in CancelWait() 149 Waiter* wnext = w->next.load(std::memory_order_relaxed); in Notify() 163 if (!all) w->next.store(nullptr, std::memory_order_relaxed); in Notify() 215 next = w->next.load(std::memory_order_relaxed); in Unpark()
|
/external/clang/test/Analysis/ |
D | atomics.c | 9 memory_order_relaxed = __ATOMIC_RELAXED, enumerator 27 …ult = __c11_atomic_fetch_add((volatile _Atomic(uint32_t) *)&s->refCount,- 1, memory_order_relaxed); in test_atomic_fetch_add() 40 …t32_t result = __c11_atomic_load((volatile _Atomic(uint32_t) *)&s->refCount, memory_order_relaxed); in test_atomic_load() 52 __c11_atomic_store((volatile _Atomic(uint32_t) *)&s->refCount, 2, memory_order_relaxed); in test_atomic_store() 62 …esult = __c11_atomic_exchange((volatile _Atomic(uint32_t) *)&s->refCount, 2, memory_order_relaxed); in test_atomic_exchange() 77 … _Atomic(uint32_t) *)&s->refCount, &expected, desired, memory_order_relaxed, memory_order_relaxed); in test_atomic_compare_exchange_strong() 89 … _Atomic(uint32_t) *)&s->refCount, &expected, desired, memory_order_relaxed, memory_order_relaxed); in test_atomic_compare_exchange_weak()
|
/external/skia/include/private/ |
D | SkWeakRefCnt.h | 66 fWeakCnt.store(0, std::memory_order_relaxed); in ~SkWeakRefCnt() 73 return fWeakCnt.load(std::memory_order_relaxed); in getWeakCnt() 87 int32_t prev = fRefCnt.load(std::memory_order_relaxed); in atomic_conditional_acquire_strong_ref() 93 std::memory_order_relaxed)); in atomic_conditional_acquire_strong_ref() 121 (void)fWeakCnt.fetch_add(+1, std::memory_order_relaxed); in weak_ref() 137 fWeakCnt.store(1, std::memory_order_relaxed); in weak_unref() 147 return fRefCnt.load(std::memory_order_relaxed) == 0; in weak_expired()
|
/external/llvm/include/llvm/ADT/ |
D | Statistic.h | 46 unsigned getValue() const { return Value.load(std::memory_order_relaxed); } in getValue() 65 Value.store(Val, std::memory_order_relaxed); 70 Value.fetch_add(1, std::memory_order_relaxed); 76 return Value.fetch_add(1, std::memory_order_relaxed); 80 Value.fetch_sub(1, std::memory_order_relaxed); 86 return Value.fetch_sub(1, std::memory_order_relaxed); 92 Value.fetch_add(V, std::memory_order_relaxed); 99 Value.fetch_sub(V, std::memory_order_relaxed);
|
/external/swiftshader/third_party/llvm-subzero/include/llvm/ADT/ |
D | Statistic.h | 47 unsigned getValue() const { return Value.load(std::memory_order_relaxed); } in getValue() 66 Value.store(Val, std::memory_order_relaxed); 71 Value.fetch_add(1, std::memory_order_relaxed); 77 return Value.fetch_add(1, std::memory_order_relaxed); 81 Value.fetch_sub(1, std::memory_order_relaxed); 87 return Value.fetch_sub(1, std::memory_order_relaxed); 93 Value.fetch_add(V, std::memory_order_relaxed); 100 Value.fetch_sub(V, std::memory_order_relaxed);
|
/external/libchrome/base/metrics/ |
D | persistent_memory_allocator.cc | 52 uint32_t loaded_flags = flags->load(std::memory_order_relaxed); in CheckFlag() 57 uint32_t loaded_flags = flags->load(std::memory_order_relaxed); in SetFlag() 140 if (!block || block->next.load(std::memory_order_relaxed) == 0) { in Iterator() 197 *type_return = block->type_id.load(std::memory_order_relaxed); in GetNext() 208 allocator_->shared_meta()->freeptr.load(std::memory_order_relaxed), in GetNext() 295 shared_meta()->freeptr.load(std::memory_order_relaxed) != 0 || in PersistentMemoryAllocator() 296 shared_meta()->flags.load(std::memory_order_relaxed) != 0 || in PersistentMemoryAllocator() 301 shared_meta()->queue.next.load(std::memory_order_relaxed) != 0 || in PersistentMemoryAllocator() 304 first_block->type_id.load(std::memory_order_relaxed) != 0 || in PersistentMemoryAllocator() 336 shared_meta()->freeptr.load(std::memory_order_relaxed) == 0 || in PersistentMemoryAllocator() [all …]
|
/external/compiler-rt/lib/sanitizer_common/ |
D | sanitizer_addrhashmap.h | 184 if (atomic_load(&b->add, memory_order_relaxed)) { in acquire() 186 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire() 189 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 205 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 217 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire() 221 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 245 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 260 atomic_store(&b->add, (uptr)add, memory_order_relaxed); in acquire() 272 atomic_store(&b->add, (uptr)add1, memory_order_relaxed); in acquire() 278 CHECK_EQ(atomic_load(&c->addr, memory_order_relaxed), 0); in acquire() [all …]
|
D | sanitizer_mutex.h | 26 atomic_store(&state_, 0, memory_order_relaxed); in Init() 44 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1); in CheckLocked() 56 if (atomic_load(&state_, memory_order_relaxed) == 0 in LockSlow() 96 atomic_store(&state_, kUnlocked, memory_order_relaxed); in RWMutex() 100 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked); in ~RWMutex() 132 CHECK_NE(atomic_load(&state_, memory_order_relaxed), kUnlocked); in CheckLocked() 150 u32 cmp = atomic_load(&state_, memory_order_relaxed); in LockSlow()
|
D | sanitizer_coverage_libcdep.cc | 61 return atomic_store(&coverage_counter, 0, memory_order_relaxed); in ResetGlobalCounters() 62 return atomic_store(&caller_callee_counter, 0, memory_order_relaxed); in ResetGlobalCounters() 212 atomic_store(&pc_array_index, 0, memory_order_relaxed); in Enable() 214 atomic_store(&pc_array_size, 0, memory_order_relaxed); in Enable() 216 atomic_store(&pc_array_size, kPcArrayMaxSize, memory_order_relaxed); in Enable() 226 atomic_store(&cc_array_size, kCcArrayMaxSize, memory_order_relaxed); in Enable() 227 atomic_store(&cc_array_index, 0, memory_order_relaxed); in Enable() 280 atomic_store(&pc_array_index, 0, memory_order_relaxed); in ReinitializeGuards() 291 uptr size = atomic_load(&pc_array_size, memory_order_relaxed); in ReInit() 302 CHECK_EQ(atomic_load(&pc_array_index, memory_order_relaxed), 0); in ReInit() [all …]
|
D | sanitizer_atomic_clang_x86.h | 30 DCHECK(mo & (memory_order_relaxed | memory_order_consume in atomic_load() 37 if (mo == memory_order_relaxed) { in atomic_load() 76 DCHECK(mo & (memory_order_relaxed | memory_order_release in atomic_store() 82 if (mo == memory_order_relaxed) { in atomic_store()
|
D | sanitizer_atomic_clang_other.h | 27 DCHECK(mo & (memory_order_relaxed | memory_order_consume in atomic_load() 34 if (mo == memory_order_relaxed) { in atomic_load() 64 DCHECK(mo & (memory_order_relaxed | memory_order_release in atomic_store() 70 if (mo == memory_order_relaxed) { in atomic_store()
|
D | sanitizer_atomic.h | 22 memory_order_relaxed = 1 << 0, enumerator 72 return atomic_load(a, memory_order_relaxed); in atomic_load_relaxed() 77 atomic_store(a, v, memory_order_relaxed); in atomic_store_relaxed()
|
/external/libcxx/test/std/atomics/atomics.flag/ |
D | atomic_flag_clear_explicit.pass.cpp | 26 atomic_flag_clear_explicit(&f, std::memory_order_relaxed); in main() 28 atomic_flag_clear_explicit(&f, std::memory_order_relaxed); in main() 47 atomic_flag_clear_explicit(&f, std::memory_order_relaxed); in main() 49 atomic_flag_clear_explicit(&f, std::memory_order_relaxed); in main()
|
D | clear.pass.cpp | 33 f.clear(std::memory_order_relaxed); in main() 35 f.clear(std::memory_order_relaxed); in main() 61 f.clear(std::memory_order_relaxed); in main() 63 f.clear(std::memory_order_relaxed); in main()
|
D | test_and_set.pass.cpp | 33 assert(f.test_and_set(std::memory_order_relaxed) == 0); in main() 34 assert(f.test_and_set(std::memory_order_relaxed) == 1); in main() 75 assert(f.test_and_set(std::memory_order_relaxed) == 0); in main() 76 assert(f.test_and_set(std::memory_order_relaxed) == 1); in main()
|
/external/compiler-rt/test/tsan/Darwin/ |
D | libcxx-shared-ptr-stress.mm | 18 atomic_fetch_add_explicit(&self_counter, 1, memory_order_relaxed); 19 atomic_fetch_add_explicit(&shared_call_counter, 1, memory_order_relaxed); 22 atomic_fetch_add_explicit(&weak_call_counter, 1, memory_order_relaxed); 27 atomic_fetch_add_explicit(&destructor_counter, 1, memory_order_relaxed); 54 atomic_fetch_add_explicit(&weak_destroyed_counter, 1, memory_order_relaxed);
|
/external/libcxx/test/libcxx/atomics/ |
D | diagnose_invalid_memory_order.fail.cpp | 31 x.load(std::memory_order_relaxed); in main() 42 std::atomic_load_explicit(&x, std::memory_order_relaxed); in main() 56 x.store(42, std::memory_order_relaxed); in main() 68 std::atomic_store_explicit(&x, 42, std::memory_order_relaxed); in main() 79 x.compare_exchange_weak(val1, val2, std::memory_order_seq_cst, std::memory_order_relaxed); in main() 93 …pare_exchange_weak_explicit(&x, &val1, val2, std::memory_order_seq_cst, std::memory_order_relaxed); in main() 105 x.compare_exchange_strong(val1, val2, std::memory_order_seq_cst, std::memory_order_relaxed); in main() 119 …re_exchange_strong_explicit(&x, &val1, val2, std::memory_order_seq_cst, std::memory_order_relaxed); in main()
|
/external/eigen/unsupported/test/ |
D | cxx11_eventcount.cpp | 50 int val = val_.load(std::memory_order_relaxed); in Push() 55 if (val_.compare_exchange_weak(val, val + 1, std::memory_order_relaxed)) in Push() 61 int val = val_.load(std::memory_order_relaxed); in Pop() 66 if (val_.compare_exchange_weak(val, val - 1, std::memory_order_relaxed)) in Pop() 71 bool Empty() { return val_.load(std::memory_order_relaxed) == 0; } in Empty()
|
/external/compiler-rt/lib/tsan/rtl/ |
D | tsan_fd.cc | 53 atomic_store(&s->rc, 1, memory_order_relaxed); in allocsync() 58 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) in ref() 59 atomic_fetch_add(&s->rc, 1, memory_order_relaxed); in ref() 64 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) { in unref() 123 atomic_store(&fdctx.globsync.rc, (u64)-1, memory_order_relaxed); in FdInit() 124 atomic_store(&fdctx.filesync.rc, (u64)-1, memory_order_relaxed); in FdInit() 125 atomic_store(&fdctx.socksync.rc, (u64)-1, memory_order_relaxed); in FdInit() 133 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); in FdOnFork() 145 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); in FdLocation()
|
/external/compiler-rt/lib/esan/ |
D | esan_sideline_linux.cpp | 48 if (atomic_load(&Thread->SidelineExit, memory_order_relaxed) != 0) in handleSidelineSignal() 91 while (atomic_load(&TheThread->SidelineExit, memory_order_relaxed) == 0) in runSideline() 112 atomic_store(&SidelineExit, 0, memory_order_relaxed); in launchThread() 136 atomic_store(&SidelineExit, 1, memory_order_relaxed); in joinThread()
|