/external/scudo/standalone/ |
D | stack_depot.h | 143 u32 RingPos = atomic_load_relaxed(&Tab[Pos]); in insert() 144 u64 Entry = atomic_load_relaxed(&Ring[RingPos]); in insert() 169 u32 RingPos = atomic_load_relaxed(&Tab[Pos]); in find() 172 u64 Entry = atomic_load_relaxed(&Ring[RingPos]); in find() 184 B.add(u32(atomic_load_relaxed(&Ring[RingPos])) >> 2); in find() 191 return atomic_load_relaxed(&Ring[RingPos & RingMask]); in at()
|
D | quarantine.h | 68 void init() { DCHECK_EQ(atomic_load_relaxed(&Size), 0U); } in init() 71 uptr getSize() const { return atomic_load_relaxed(&Size); } in getSize() 178 DCHECK_EQ(atomic_load_relaxed(&MaxSize), 0U); in init() 179 DCHECK_EQ(atomic_load_relaxed(&MinSize), 0U); in init() 180 DCHECK_EQ(atomic_load_relaxed(&MaxCacheSize), 0U); in init() 192 uptr getMaxSize() const { return atomic_load_relaxed(&MaxSize); } in getMaxSize() 193 uptr getCacheSize() const { return atomic_load_relaxed(&MaxCacheSize); } in getCacheSize() 216 recycle(atomic_load_relaxed(&MinSize), Cb); in drain()
|
D | stats.h | 39 V += atomic_load_relaxed(&StatsArray[I]); in add() 44 V = atomic_load_relaxed(&StatsArray[I]) - V; in sub() 50 uptr get(StatType I) const { return atomic_load_relaxed(&StatsArray[I]); } in get()
|
D | condition_variable_linux.cpp | 25 const u32 V = atomic_load_relaxed(&Counter); in notifyAllImpl() 40 const u32 V = atomic_load_relaxed(&Counter) + 1; in waitImpl()
|
D | options.h | 49 Options load() const { return Options{atomic_load_relaxed(&Val)}; } in load() 61 u32 Opts = atomic_load_relaxed(&Val), NewOpts; in setFillContentsMode()
|
D | tsd.h | 43 if (atomic_load_relaxed(&Precedence) == 0) in tryLock() 54 inline uptr getPrecedence() { return atomic_load_relaxed(&Precedence); } in getPrecedence()
|
D | chunk.h | 124 PackedHeader NewPackedHeader = atomic_load_relaxed(getConstAtomicHeader(Ptr)); in loadHeader() 133 PackedHeader NewPackedHeader = atomic_load_relaxed(getConstAtomicHeader(Ptr)); in isValid()
|
D | secondary.h | 208 const s32 Interval = atomic_load_relaxed(&ReleaseToOsIntervalMs); in getStats() 212 LRUEntries.size(), atomic_load_relaxed(&MaxEntriesCount), in getStats() 213 atomic_load_relaxed(&MaxEntrySize), Interval >= 0 ? Interval : -1); in getStats() 260 const s32 Interval = atomic_load_relaxed(&ReleaseToOsIntervalMs); in store() 329 while (LRUEntries.size() >= atomic_load_relaxed(&MaxEntriesCount)) { in store() 463 return atomic_load_relaxed(&MaxEntriesCount) != 0U && in canCache() 464 Size <= atomic_load_relaxed(&MaxEntrySize); in canCache()
|
D | combined.h | 1571 uptr Pos = atomic_load_relaxed(&RingBuffer->Pos); in getRingBufferErrorInfo() 1577 uptr EntryPtr = atomic_load_relaxed(&Entry->Ptr); in getRingBufferErrorInfo() 1582 uptr EntrySize = atomic_load_relaxed(&Entry->AllocationSize); in getRingBufferErrorInfo() 1583 u32 AllocationTrace = atomic_load_relaxed(&Entry->AllocationTrace); in getRingBufferErrorInfo() 1584 u32 AllocationTid = atomic_load_relaxed(&Entry->AllocationTid); in getRingBufferErrorInfo() 1585 u32 DeallocationTrace = atomic_load_relaxed(&Entry->DeallocationTrace); in getRingBufferErrorInfo() 1586 u32 DeallocationTid = atomic_load_relaxed(&Entry->DeallocationTid); in getRingBufferErrorInfo()
|
D | atomic_helpers.h | 126 inline typename T::Type atomic_load_relaxed(const volatile T *A) { in atomic_load_relaxed() function
|
D | primary64.h | 372 const s32 IntervalMs = atomic_load_relaxed(&ReleaseToOsIntervalMs); in getStats() 1410 const s64 IntervalMs = atomic_load_relaxed(&ReleaseToOsIntervalMs); in hasChanceToReleasePages()
|
D | primary32.h | 1046 const s32 IntervalMs = atomic_load_relaxed(&ReleaseToOsIntervalMs); in hasChanceToReleasePages()
|
/external/compiler-rt/lib/tsan/rtl/ |
D | tsan_interface_ann.cc | 94 atomic_load_relaxed(&race->addcount) + 1); in AddExpectRace() 163 const uptr cnt = atomic_load_relaxed(&(race->*counter)); in CollectMatchedBenignRaces() 198 atomic_load_relaxed(&hit_matched[i].hitcount), in PrintMatchedBenignRaces() 208 atomic_load_relaxed(&add_matched[i].addcount), in PrintMatchedBenignRaces() 308 if (atomic_load_relaxed(&race->hitcount) == 0) { in AnnotateFlushExpectedRaces()
|
D | tsan_suppressions.cc | 160 hit_count += atomic_load_relaxed(&matched[i]->hit_count); in PrintMatchedSuppressions() 164 Printf("%d %s:%s\n", atomic_load_relaxed(&matched[i]->hit_count), in PrintMatchedSuppressions()
|
D | tsan_platform_mac.cc | 48 void *val = (void *)atomic_load_relaxed(a); in SignalSafeGetOrAllocate()
|
D | tsan_mman.cc | 136 if (atomic_load_relaxed(&thr->in_signal_handler) == 0 || in SignalUnsafeCall()
|
/external/compiler-rt/lib/sanitizer_common/ |
D | sanitizer_atomic.h | 71 INLINE typename T::Type atomic_load_relaxed(const volatile T *a) { in atomic_load_relaxed() function
|
D | sanitizer_suppressions.cc | 164 if (atomic_load_relaxed(&suppressions_[i].hit_count)) in GetMatched()
|
D | sanitizer_coverage_libcdep.cc | 246 uptr idx = atomic_load_relaxed(&pc_array_index); in InitializeGuardArray()
|
/external/llvm/test/CodeGen/X86/ |
D | atomic128.ll | 233 define i128 @atomic_load_relaxed(i128* %p) { 234 ; CHECK: atomic_load_relaxed:
|
/external/llvm/test/CodeGen/AArch64/ |
D | arm64-atomic-128.ll | 176 define i128 @atomic_load_relaxed(i64, i64, i128* %p) { 177 ; CHECK-LABEL: atomic_load_relaxed:
|
/external/compiler-rt/lib/lsan/ |
D | lsan_common.cc | 412 Printf("%7zu %10zu %s\n", static_cast<uptr>(atomic_load_relaxed( in PrintMatchedSuppressions() 638 atomic_store_relaxed(&s->hit_count, atomic_load_relaxed(&s->hit_count) + in ApplySuppressions()
|
/external/rust/android-crates-io/crates/portable-atomic/src/imp/atomic128/ |
D | aarch64.rs | 650 macro_rules! atomic_load_relaxed { in _atomic_load_ldp() macro 670 Ordering::Relaxed => atomic_load_relaxed!(""), in _atomic_load_ldp() 672 Ordering::Acquire => atomic_load_relaxed!("dmb ishld"), in _atomic_load_ldp()
|
D | intrinsics.rs | 78 Relaxed => intrinsics::atomic_load_relaxed(src), in atomic_load()
|
/external/compiler-rt/lib/asan/ |
D | asan_report.cc | 1040 uptr cmp = atomic_load_relaxed(&AsanBuggyPcPool[i]); in SuppressErrorReport()
|