/external/compiler-rt/lib/sanitizer_common/ |
D | sanitizer_addrhashmap.h | 176 uptr addr1 = atomic_load(&c->addr, memory_order_acquire); in acquire() 184 if (atomic_load(&b->add, memory_order_relaxed)) { in acquire() 186 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire() 189 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 205 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 217 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); in acquire() 221 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 245 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in acquire() 278 CHECK_EQ(atomic_load(&c->addr, memory_order_relaxed), 0); in acquire() 289 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); in release() [all …]
|
D | sanitizer_mutex.h | 44 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1); in CheckLocked() 56 if (atomic_load(&state_, memory_order_relaxed) == 0 in LockSlow() 100 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked); in ~RWMutex() 132 CHECK_NE(atomic_load(&state_, memory_order_relaxed), kUnlocked); in CheckLocked() 150 u32 cmp = atomic_load(&state_, memory_order_relaxed); in LockSlow() 164 u32 prev = atomic_load(&state_, memory_order_acquire); in ReadLockSlow()
|
D | sanitizer_lfstack.h | 33 return (atomic_load(&head_, memory_order_relaxed) & kPtrMask) == 0; in Empty() 37 u64 cmp = atomic_load(&head_, memory_order_relaxed); in Push() 49 u64 cmp = atomic_load(&head_, memory_order_acquire); in Pop()
|
D | sanitizer_coverage_libcdep.cc | 275 uptr size = atomic_load(&pc_array_size, memory_order_relaxed); in ReInit() 286 CHECK_EQ(atomic_load(&pc_array_index, memory_order_relaxed), 0); in ReInit() 305 uptr size = atomic_load(&pc_array_size, memory_order_relaxed); in Extend() 367 uptr range_end = atomic_load(&pc_array_index, memory_order_relaxed); in InitializeGuards() 404 s32 guard_value = atomic_load(atomic_guard, memory_order_relaxed); in Add() 411 if (idx >= atomic_load(&pc_array_index, memory_order_acquire)) in Add() 414 atomic_load(&pc_array_size, memory_order_acquire)); in Add() 436 atomic_load(&cc_array_size, memory_order_acquire)); in IndirCall() 440 CHECK_EQ(atomic_load(&atomic_callee_cache[0], memory_order_relaxed), caller); in IndirCall() 516 return atomic_load(&pc_array_index, memory_order_relaxed); in size() [all …]
|
D | sanitizer_stackdepotbase.h | 78 uptr cmp = atomic_load(p, memory_order_relaxed); in lock() 104 uptr v = atomic_load(p, memory_order_consume); in Put() 149 uptr v = atomic_load(p, memory_order_consume); in Get() 171 uptr s = atomic_load(p, memory_order_relaxed); in UnlockAll()
|
D | sanitizer_persistent_allocator.h | 38 uptr cmp = atomic_load(®ion_pos, memory_order_acquire); in tryAlloc() 39 uptr end = atomic_load(®ion_end, memory_order_acquire); in tryAlloc()
|
D | sanitizer_quarantine.h | 58 uptr GetSize() const { return atomic_load(&max_size_, memory_order_acquire); } in GetSize() 89 uptr min_size = atomic_load(&min_size_, memory_order_acquire); in Recycle() 128 return atomic_load(&size_, memory_order_relaxed); in Size()
|
D | sanitizer_stackdepot.cc | 40 atomic_load(&hash_and_use_count, memory_order_relaxed) & kHashMask; in eq() 92 return atomic_load(&node_->hash_and_use_count, memory_order_relaxed) & in use_count() 142 uptr v = atomic_load(p, memory_order_consume); in StackDepotReverseMap()
|
D | sanitizer_allocator.cc | 60 if (atomic_load(&internal_allocator_initialized, memory_order_acquire) == 0) { 62 if (atomic_load(&internal_allocator_initialized, memory_order_relaxed) ==
|
D | sanitizer_libignore.h | 73 const uptr n = atomic_load(&loaded_count_, memory_order_acquire); in IsIgnored()
|
D | sanitizer_atomic.h | 72 return atomic_load(a, memory_order_relaxed); in atomic_load_relaxed()
|
D | sanitizer_atomic_clang_other.h | 25 INLINE typename T::Type atomic_load( in atomic_load() function
|
D | sanitizer_atomic_clang_x86.h | 28 INLINE typename T::Type atomic_load( in atomic_load() function
|
/external/boringssl/src/crypto/ |
D | refcount_c11.c | 39 uint32_t expected = atomic_load(count); in CRYPTO_refcount_inc() 51 uint32_t expected = atomic_load(count); in CRYPTO_refcount_dec_and_test_zero()
|
/external/libcxx/test/std/atomics/atomics.types.operations/atomics.types.operations.req/ |
D | atomic_load.pass.cpp | 34 assert(std::atomic_load(&t) == T(1)); in test() 37 assert(std::atomic_load(&vt) == T(2)); in test()
|
/external/compiler-rt/lib/tsan/rtl/ |
D | tsan_mutex.cc | 223 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked); in ~Mutex() 235 if (atomic_load(&state_, memory_order_relaxed) == kUnlocked) { in Lock() 265 prev = atomic_load(&state_, memory_order_acquire); in ReadLock() 286 CHECK_NE(atomic_load(&state_, memory_order_relaxed), 0); in CheckLocked()
|
D | tsan_fd.cc | 58 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) in ref() 64 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) { in unref() 78 uptr l1 = atomic_load(pl1, memory_order_consume); in fddesc() 133 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); in FdOnFork() 145 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); in FdLocation()
|
/external/compiler-rt/lib/dfsan/ |
D | dfsan.cc | 182 label = atomic_load(table_ent, memory_order_acquire); in __dfsan_union() 321 atomic_load(&__dfsan_last_label, memory_order_relaxed); in dfsan_get_label_count() 329 atomic_load(&__dfsan_last_label, memory_order_relaxed); in dfsan_dump_labels()
|
/external/libcxx/test/std/utilities/memory/util.smartptr/util.smartptr.shared.atomic/ |
D | atomic_load.pass.cpp | 33 std::shared_ptr<int> q = std::atomic_load(&p); in main()
|
/external/compiler-rt/lib/asan/ |
D | asan_stack.cc | 27 return atomic_load(&malloc_context_size, memory_order_acquire); in GetMallocContextSize()
|
D | asan_allocator.cc | 276 options->min_redzone = atomic_load(&min_redzone, memory_order_acquire); in GetOptions() 277 options->max_redzone = atomic_load(&max_redzone, memory_order_acquire); in GetOptions() 280 atomic_load(&alloc_dealloc_mismatch, memory_order_acquire); in GetOptions() 293 u32 min_rz = atomic_load(&min_redzone, memory_order_acquire); in ComputeRZLog() 294 u32 max_rz = atomic_load(&max_redzone, memory_order_acquire); in ComputeRZLog() 477 if (atomic_load(&alloc_dealloc_mismatch, memory_order_acquire)) { in QuarantineChunk()
|
/external/llvm/test/CodeGen/PowerPC/ |
D | atomic-2.ll | 93 define i64 @atomic_load(i64* %mem) nounwind { 95 ; CHECK: @atomic_load
|
/external/compiler-rt/lib/lsan/ |
D | lsan_interceptors.cc | 216 while ((tid = atomic_load(&p->tid, memory_order_acquire)) == 0) in __lsan_thread_start_func() 245 while (atomic_load(&p.tid, memory_order_acquire) != 0) in INTERCEPTOR()
|
/external/compiler-rt/lib/tsan/tests/rtl/ |
D | tsan_mutex.cc | 169 int *val = (int *)atomic_load(singleton, memory_order_acquire); in singleton_thread() 215 uptr v = atomic_load(&flag, memory_order_acquire); in TEST()
|
D | tsan_test_util_posix.cc | 380 Event* ev = (Event*)atomic_load(&impl->event, memory_order_acquire); in ScopedThreadCallback() 400 CHECK_EQ(atomic_load(&event, memory_order_relaxed), 0); in send() 402 while (atomic_load(&event, memory_order_acquire) != 0) in send()
|