/external/scudo/standalone/tests/ |
D | atomic_test.cpp | 50 checkStoreLoad<atomic_u8, memory_order_relaxed, memory_order_relaxed>(); in TEST() 51 checkStoreLoad<atomic_u8, memory_order_consume, memory_order_relaxed>(); in TEST() 52 checkStoreLoad<atomic_u8, memory_order_acquire, memory_order_relaxed>(); in TEST() 53 checkStoreLoad<atomic_u8, memory_order_relaxed, memory_order_release>(); in TEST() 56 checkStoreLoad<atomic_u16, memory_order_relaxed, memory_order_relaxed>(); in TEST() 57 checkStoreLoad<atomic_u16, memory_order_consume, memory_order_relaxed>(); in TEST() 58 checkStoreLoad<atomic_u16, memory_order_acquire, memory_order_relaxed>(); in TEST() 59 checkStoreLoad<atomic_u16, memory_order_relaxed, memory_order_release>(); in TEST() 62 checkStoreLoad<atomic_u32, memory_order_relaxed, memory_order_relaxed>(); in TEST() 63 checkStoreLoad<atomic_u32, memory_order_consume, memory_order_relaxed>(); in TEST() [all …]
|
/external/compiler-rt/lib/sanitizer_common/tests/ |
D | sanitizer_atomic_test.cc | 55 CheckStoreLoad<atomic_uint8_t, memory_order_relaxed, memory_order_relaxed>(); in TEST() 56 CheckStoreLoad<atomic_uint8_t, memory_order_consume, memory_order_relaxed>(); in TEST() 57 CheckStoreLoad<atomic_uint8_t, memory_order_acquire, memory_order_relaxed>(); in TEST() 58 CheckStoreLoad<atomic_uint8_t, memory_order_relaxed, memory_order_release>(); in TEST() 61 CheckStoreLoad<atomic_uint16_t, memory_order_relaxed, memory_order_relaxed>(); in TEST() 62 CheckStoreLoad<atomic_uint16_t, memory_order_consume, memory_order_relaxed>(); in TEST() 63 CheckStoreLoad<atomic_uint16_t, memory_order_acquire, memory_order_relaxed>(); in TEST() 64 CheckStoreLoad<atomic_uint16_t, memory_order_relaxed, memory_order_release>(); in TEST() 67 CheckStoreLoad<atomic_uint32_t, memory_order_relaxed, memory_order_relaxed>(); in TEST() 68 CheckStoreLoad<atomic_uint32_t, memory_order_consume, memory_order_relaxed>(); in TEST() [all …]
|
/external/clang/test/Sema/ |
D | atomic-ops.c | 102 __c11_atomic_store(i, 0, memory_order_relaxed); in f() 103 …__c11_atomic_store(ci, 0, memory_order_relaxed); // expected-error {{address argument to atomic op… in f() 110 int load_n_1 = __atomic_load_n(I, memory_order_relaxed); in f() 111 int *load_n_2 = __atomic_load_n(P, memory_order_relaxed); in f() 112 …float load_n_3 = __atomic_load_n(D, memory_order_relaxed); // expected-error {{must be a pointer t… in f() 113 …__atomic_load_n(s1, memory_order_relaxed); // expected-error {{must be a pointer to integer or poi… in f() 114 load_n_1 = __atomic_load_n(CI, memory_order_relaxed); in f() 116 …__atomic_load(i, I, memory_order_relaxed); // expected-error {{must be a pointer to a trivially-co… in f() 117 __atomic_load(CI, I, memory_order_relaxed); in f() 119 …__atomic_load(I, i, memory_order_relaxed); // expected-warning {{passing '_Atomic(int) *' to param… in f() [all …]
|
/external/v8/src/base/ |
D | atomicops_internals_std.h | 35 std::memory_order_relaxed, std::memory_order_relaxed); in Relaxed_CompareAndSwap() 43 std::memory_order_relaxed, std::memory_order_relaxed); in Relaxed_CompareAndSwap() 51 std::memory_order_relaxed, std::memory_order_relaxed); in Relaxed_CompareAndSwap() 58 std::memory_order_relaxed); in Relaxed_AtomicExchange() 65 std::memory_order_relaxed); in Relaxed_AtomicIncrement() 80 std::memory_order_release, std::memory_order_relaxed); in Release_CompareAndSwap() 89 std::memory_order_release, std::memory_order_relaxed); in Release_CompareAndSwap() 104 std::memory_order_relaxed); in Relaxed_Store() 109 std::memory_order_relaxed); in Relaxed_Store() 114 std::memory_order_relaxed); in Relaxed_Store() [all …]
|
/external/libchrome/base/ |
D | atomicops_internals_portable.h | 71 std::memory_order_relaxed, in NoBarrier_CompareAndSwap() 72 std::memory_order_relaxed); in NoBarrier_CompareAndSwap() 79 ->exchange(new_value, std::memory_order_relaxed); in NoBarrier_AtomicExchange() 86 ->fetch_add(increment, std::memory_order_relaxed); in NoBarrier_AtomicIncrement() 112 std::memory_order_relaxed); in Release_CompareAndSwap() 117 ((AtomicLocation32)ptr)->store(value, std::memory_order_relaxed); in NoBarrier_Store() 121 ((AtomicLocation32)ptr)->store(value, std::memory_order_relaxed); in Acquire_Store() 130 return ((AtomicLocation32)ptr)->load(std::memory_order_relaxed); in NoBarrier_Load() 139 return ((AtomicLocation32)ptr)->load(std::memory_order_relaxed); in Release_Load() 154 std::memory_order_relaxed, in NoBarrier_CompareAndSwap() [all …]
|
/external/eigen/unsupported/Eigen/CXX11/src/ThreadPool/ |
D | RunQueue.h | 47 array_[i].state.store(kEmpty, std::memory_order_relaxed); in RunQueue() 55 unsigned front = front_.load(std::memory_order_relaxed); in PushFront() 57 uint8_t s = e->state.load(std::memory_order_relaxed); in PushFront() 61 front_.store(front + 1 + (kSize << 1), std::memory_order_relaxed); in PushFront() 70 unsigned front = front_.load(std::memory_order_relaxed); in PopFront() 72 uint8_t s = e->state.load(std::memory_order_relaxed); in PopFront() 79 front_.store(front, std::memory_order_relaxed); in PopFront() 87 unsigned back = back_.load(std::memory_order_relaxed); in PushBack() 89 uint8_t s = e->state.load(std::memory_order_relaxed); in PushBack() 94 back_.store(back, std::memory_order_relaxed); in PushBack() [all …]
|
D | EventCount.h | 68 w->epoch = state_.fetch_add(kWaiterInc, std::memory_order_relaxed); in Prewait() 95 w->next.store(nullptr, std::memory_order_relaxed); in CommitWait() 97 w->next.store(&waiters_[state & kStackMask], std::memory_order_relaxed); in CommitWait() 110 uint64_t state = state_.load(std::memory_order_relaxed); in CancelWait() 116 state = state_.load(std::memory_order_relaxed); in CancelWait() 124 std::memory_order_relaxed)) in CancelWait() 149 Waiter* wnext = w->next.load(std::memory_order_relaxed); in Notify() 163 if (!all) w->next.store(nullptr, std::memory_order_relaxed); in Notify() 215 next = w->next.load(std::memory_order_relaxed); in Unpark()
|
/external/v8/src/tracing/ |
D | tracing-category-observer.cc | 35 std::memory_order_relaxed); in OnTraceEnabled() 41 std::memory_order_relaxed); in OnTraceEnabled() 46 i::TracingFlags::gc.fetch_or(ENABLED_BY_TRACING, std::memory_order_relaxed); in OnTraceEnabled() 52 std::memory_order_relaxed); in OnTraceEnabled() 58 std::memory_order_relaxed); in OnTraceEnabled() 65 std::memory_order_relaxed); in OnTraceEnabled() 71 ~(ENABLED_BY_TRACING | ENABLED_BY_SAMPLING), std::memory_order_relaxed); in OnTraceDisabled() 73 i::TracingFlags::gc.fetch_and(~ENABLED_BY_TRACING, std::memory_order_relaxed); in OnTraceDisabled() 76 std::memory_order_relaxed); in OnTraceDisabled() 79 std::memory_order_relaxed); in OnTraceDisabled()
|
/external/clang/test/Analysis/ |
D | atomics.c | 9 memory_order_relaxed = __ATOMIC_RELAXED, enumerator 27 …ult = __c11_atomic_fetch_add((volatile _Atomic(uint32_t) *)&s->refCount,- 1, memory_order_relaxed); in test_atomic_fetch_add() 40 …t32_t result = __c11_atomic_load((volatile _Atomic(uint32_t) *)&s->refCount, memory_order_relaxed); in test_atomic_load() 52 __c11_atomic_store((volatile _Atomic(uint32_t) *)&s->refCount, 2, memory_order_relaxed); in test_atomic_store() 62 …esult = __c11_atomic_exchange((volatile _Atomic(uint32_t) *)&s->refCount, 2, memory_order_relaxed); in test_atomic_exchange() 77 … _Atomic(uint32_t) *)&s->refCount, &expected, desired, memory_order_relaxed, memory_order_relaxed); in test_atomic_compare_exchange_strong() 89 … _Atomic(uint32_t) *)&s->refCount, &expected, desired, memory_order_relaxed, memory_order_relaxed); in test_atomic_compare_exchange_weak()
|
/external/swiftshader/third_party/llvm-7.0/llvm/include/llvm/ADT/ |
D | Statistic.h | 56 unsigned getValue() const { return Value.load(std::memory_order_relaxed); } in getValue() 75 Value.store(Val, std::memory_order_relaxed); 80 Value.fetch_add(1, std::memory_order_relaxed); 86 return Value.fetch_add(1, std::memory_order_relaxed); 90 Value.fetch_sub(1, std::memory_order_relaxed); 96 return Value.fetch_sub(1, std::memory_order_relaxed); 102 Value.fetch_add(V, std::memory_order_relaxed); 109 Value.fetch_sub(V, std::memory_order_relaxed); 114 unsigned PrevMax = Value.load(std::memory_order_relaxed); in updateMax() 118 PrevMax, V, std::memory_order_relaxed)) { in updateMax()
|
/external/skqp/include/private/ |
D | SkWeakRefCnt.h | 66 fWeakCnt.store(0, std::memory_order_relaxed); in ~SkWeakRefCnt() 73 return fWeakCnt.load(std::memory_order_relaxed); in getWeakCnt() 82 int32_t prev = fRefCnt.load(std::memory_order_relaxed); in atomic_conditional_acquire_strong_ref() 88 std::memory_order_relaxed)); in atomic_conditional_acquire_strong_ref() 116 (void)fWeakCnt.fetch_add(+1, std::memory_order_relaxed); in weak_ref() 132 fWeakCnt.store(1, std::memory_order_relaxed); in weak_unref() 142 return fRefCnt.load(std::memory_order_relaxed) == 0; in weak_expired()
|
/external/skia/include/private/ |
D | SkWeakRefCnt.h | 66 fWeakCnt.store(0, std::memory_order_relaxed); in ~SkWeakRefCnt() 73 return fWeakCnt.load(std::memory_order_relaxed); in getWeakCnt() 82 int32_t prev = fRefCnt.load(std::memory_order_relaxed); in atomic_conditional_acquire_strong_ref() 88 std::memory_order_relaxed)); in atomic_conditional_acquire_strong_ref() 116 (void)fWeakCnt.fetch_add(+1, std::memory_order_relaxed); in weak_ref() 132 fWeakCnt.store(1, std::memory_order_relaxed); in weak_unref() 142 return fRefCnt.load(std::memory_order_relaxed) == 0; in weak_expired()
|
/external/pthreadpool/test/ |
D | pthreadpool.cc | 138 processed_indicators[i].store(true, std::memory_order_relaxed); in SetTrue1D() 155 EXPECT_TRUE(indicators[i].load(std::memory_order_relaxed)) in TEST() 178 EXPECT_TRUE(indicators[i].load(std::memory_order_relaxed)) in TEST() 184 processed_counters[i].fetch_add(1, std::memory_order_relaxed); in Increment1D() 201 EXPECT_EQ(counters[i].load(std::memory_order_relaxed), 1) in TEST() 202 …<< "Element " << i << " was processed " << counters[i].load(std::memory_order_relaxed) << " times … in TEST() 224 EXPECT_EQ(counters[i].load(std::memory_order_relaxed), 1) in TEST() 225 …<< "Element " << i << " was processed " << counters[i].load(std::memory_order_relaxed) << " times … in TEST() 245 EXPECT_EQ(counters[i].load(std::memory_order_relaxed), kIncrementIterations) in TEST() 246 …<< "Element " << i << " was processed " << counters[i].load(std::memory_order_relaxed) << " times " in TEST() [all …]
|
/external/llvm/include/llvm/ADT/ |
D | Statistic.h | 46 unsigned getValue() const { return Value.load(std::memory_order_relaxed); } in getValue() 65 Value.store(Val, std::memory_order_relaxed); 70 Value.fetch_add(1, std::memory_order_relaxed); 76 return Value.fetch_add(1, std::memory_order_relaxed); 80 Value.fetch_sub(1, std::memory_order_relaxed); 86 return Value.fetch_sub(1, std::memory_order_relaxed); 92 Value.fetch_add(V, std::memory_order_relaxed); 99 Value.fetch_sub(V, std::memory_order_relaxed);
|
/external/swiftshader/third_party/llvm-subzero/include/llvm/ADT/ |
D | Statistic.h | 47 unsigned getValue() const { return Value.load(std::memory_order_relaxed); } in getValue() 66 Value.store(Val, std::memory_order_relaxed); 71 Value.fetch_add(1, std::memory_order_relaxed); 77 return Value.fetch_add(1, std::memory_order_relaxed); 81 Value.fetch_sub(1, std::memory_order_relaxed); 87 return Value.fetch_sub(1, std::memory_order_relaxed); 93 Value.fetch_add(V, std::memory_order_relaxed); 100 Value.fetch_sub(V, std::memory_order_relaxed);
|
/external/v8/src/logging/ |
D | tracing-flags.h | 27 return runtime_stats.load(std::memory_order_relaxed) != 0; in is_runtime_stats_enabled() 31 return gc.load(std::memory_order_relaxed) != 0; in is_gc_enabled() 35 return gc_stats.load(std::memory_order_relaxed) != 0; in is_gc_stats_enabled() 39 return ic_stats.load(std::memory_order_relaxed) != 0; in is_ic_stats_enabled() 43 return zone_stats.load(std::memory_order_relaxed) != 0; in is_zone_stats_enabled()
|
/external/libchrome/base/metrics/ |
D | persistent_memory_allocator.cc | 63 uint32_t loaded_flags = flags->load(std::memory_order_relaxed); in CheckFlag() 68 uint32_t loaded_flags = flags->load(std::memory_order_relaxed); in SetFlag() 75 std::memory_order_relaxed, in SetFlag() 76 std::memory_order_relaxed)) { in SetFlag() 164 last_record_.store(kReferenceQueue, std::memory_order_relaxed); in Reset() 165 record_count_.store(0, std::memory_order_relaxed); in Reset() 174 last_record_.store(starting_after, std::memory_order_relaxed); in Reset() 175 record_count_.store(0, std::memory_order_relaxed); in Reset() 181 if (!block || block->next.load(std::memory_order_relaxed) == 0) { in Reset() 189 Reference last = last_record_.load(std::memory_order_relaxed); in GetLast() [all …]
|
/external/compiler-rt/lib/sanitizer_common/ |
D | sanitizer_addrhashmap.h | 184 if (atomic_load(&b->add, memory_order_relaxed)) { in acquire() 186 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire() 189 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 205 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 217 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire() 221 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 245 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 260 atomic_store(&b->add, (uptr)add, memory_order_relaxed); in acquire() 272 atomic_store(&b->add, (uptr)add1, memory_order_relaxed); in acquire() 278 CHECK_EQ(atomic_load(&c->addr, memory_order_relaxed), 0); in acquire() [all …]
|
D | sanitizer_mutex.h | 26 atomic_store(&state_, 0, memory_order_relaxed); in Init() 44 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1); in CheckLocked() 56 if (atomic_load(&state_, memory_order_relaxed) == 0 in LockSlow() 96 atomic_store(&state_, kUnlocked, memory_order_relaxed); in RWMutex() 100 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked); in ~RWMutex() 132 CHECK_NE(atomic_load(&state_, memory_order_relaxed), kUnlocked); in CheckLocked() 150 u32 cmp = atomic_load(&state_, memory_order_relaxed); in LockSlow()
|
/external/libchrome/base/android/orderfile/ |
D | orderfile_instrumentation.cc | 120 int index = g_data_index.load(std::memory_order_relaxed); in RecordAddress() 148 uint32_t value = element->load(std::memory_order_relaxed); in RecordAddress() 153 auto before = element->fetch_or(mask, std::memory_order_relaxed); in RecordAddress() 164 ordered_offsets_index.fetch_add(1, std::memory_order_relaxed); in RecordAddress() 169 ordered_offsets[insertion_index].store(offset, std::memory_order_relaxed); in RecordAddress() 192 auto offset = data.ordered_offsets[i].load(std::memory_order_relaxed); in DumpToFile() 231 auto old_phase = g_data_index.exchange(kPhases, std::memory_order_relaxed); in Disable() 245 int before = g_data_index.fetch_add(1, std::memory_order_relaxed); in SwitchToNextPhaseOrDump() 308 size_t max_index = g_data[0].index.load(std::memory_order_relaxed); in GetOrderedOffsetsForTesting() 310 auto value = g_data[0].ordered_offsets[i].load(std::memory_order_relaxed); in GetOrderedOffsetsForTesting()
|
/external/pthreadpool/src/ |
D | threadpool-pthreads.c | 222 if (atomic_fetch_sub_explicit(&threadpool->active_threads, 1, memory_order_relaxed) == 1) { in checkin_worker_thread() 228 if (atomic_fetch_sub_explicit(&threadpool->active_threads, 1, memory_order_relaxed) == 1) { in checkin_worker_thread() 238 …t has_active_threads = atomic_load_explicit(&threadpool->has_active_threads, memory_order_relaxed); in wait_worker_threads() 243 size_t active_threads = atomic_load_explicit(&threadpool->active_threads, memory_order_relaxed); in wait_worker_threads() 255 has_active_threads = atomic_load_explicit(&threadpool->has_active_threads, memory_order_relaxed); in wait_worker_threads() 260 active_threads = atomic_load_explicit(&threadpool->active_threads, memory_order_relaxed); in wait_worker_threads() 274 while (atomic_load_explicit(&threadpool->active_threads, memory_order_relaxed) != 0) { in wait_worker_threads() 282 size_t actual_value = atomic_load_explicit(value, memory_order_relaxed); in atomic_decrement() 287 value, &actual_value, actual_value - 1, memory_order_relaxed, memory_order_relaxed)) in atomic_decrement() 306 …_1d_t task = (pthreadpool_task_1d_t) atomic_load_explicit(&threadpool->task, memory_order_relaxed); in thread_parallelize_1d() [all …]
|
/external/protobuf/src/google/protobuf/ |
D | arena.cc | 71 lifecycle_id_generator_.fetch_add(1, std::memory_order_relaxed); in Init() 72 hint_.store(nullptr, std::memory_order_relaxed); in Init() 73 threads_.store(nullptr, std::memory_order_relaxed); in Init() 83 threads_.store(serial, std::memory_order_relaxed); in Init() 85 std::memory_order_relaxed); in Init() 88 space_allocated_.store(0, std::memory_order_relaxed); in Init() 123 space_allocated_.fetch_add(size, std::memory_order_relaxed); in NewBlock() 240 return space_allocated_.load(std::memory_order_relaxed); in SpaceAllocated() 268 SerialArena* serial = threads_.load(std::memory_order_relaxed); in FreeBlocks() 314 SerialArena* serial = threads_.load(std::memory_order_relaxed); in CleanupList() [all …]
|
/external/skqp/src/compute/skc/platforms/cl_12/ |
D | atomic_cl.h | 53 … v,memory_order_relaxed,memory_scope_device) 55 … v,memory_order_relaxed,memory_scope_sub_group) 58 … v,memory_order_relaxed,memory_scope_device) 60 … v,memory_order_relaxed,memory_scope_sub_group)
|
/external/v8/src/libplatform/ |
D | default-job.cc | 43 if (is_canceled_.load(std::memory_order_relaxed)) return; in NotifyConcurrencyIncrease() 68 assigned_task_ids_.load(std::memory_order_relaxed); in AcquireTaskId() 83 std::memory_order_relaxed)); in AcquireTaskId() 118 is_canceled_.store(true, std::memory_order_relaxed); in CancelAndWait() 127 is_canceled_.store(true, std::memory_order_relaxed); in CancelAndDetach() 139 if (is_canceled_.load(std::memory_order_relaxed)) return false; in CanRunFirstTask() 155 if (is_canceled_.load(std::memory_order_relaxed) || in DidRunTask() 191 is_canceled_.store(true, std::memory_order_relaxed); in WaitForParticipationOpportunityLockRequired()
|
/external/perfetto/include/perfetto/ext/base/ |
D | metatrace.h | 91 auto enabled_tags = g_enabled_tags.load(std::memory_order_relaxed); in IsEnabled() 105 auto base_ns = g_enabled_timestamp.load(std::memory_order_relaxed); in timestamp_ns() 112 auto t_start = g_enabled_timestamp.load(std::memory_order_relaxed); in set_timestamp() 199 At(cur_)->type_and_id.store(0, std::memory_order_relaxed); 243 auto wr_index = wr_index_.load(std::memory_order_relaxed); in GetSizeForTesting() 244 auto rd_index = rd_index_.load(std::memory_order_relaxed); in GetSizeForTesting() 267 auto enabled_tags = g_enabled_tags.load(std::memory_order_relaxed); in TraceCounter() 281 auto enabled_tags = g_enabled_tags.load(std::memory_order_relaxed); in ScopedEvent()
|