/external/llvm/test/CodeGen/ARM/ |
D | swift-atomics.ll | 2 ; RUN: llc -mtriple=armv7-apple-ios6.0 < %s | FileCheck %s --check-prefix=CHECK-STRICT-ATOMIC 11 ; CHECK-STRICT-ATOMIC-LABEL: test_store_release: 12 ; CHECK-STRICT-ATOMIC: dmb {{ish$}} 28 ; CHECK-STRICT-ATOMIC-LABEL: test_seq_cst: 29 ; CHECK-STRICT-ATOMIC: dmb {{ish$}} 30 ; CHECK-STRICT-ATOMIC: str 31 ; CHECK-STRICT-ATOMIC: dmb {{ish$}} 32 ; CHECK-STRICT-ATOMIC: ldr 33 ; CHECK-STRICT-ATOMIC: dmb {{ish$}} 47 ; CHECK-STRICT-ATOMIC-LABEL: test_acq: [all …]
|
/external/swiftshader/third_party/llvm-7.0/llvm/test/CodeGen/ARM/ |
D | swift-atomics.ll | 2 ; RUN: llc -mtriple=armv7-apple-ios6.0 < %s | FileCheck %s --check-prefix=CHECK-STRICT-ATOMIC 11 ; CHECK-STRICT-ATOMIC-LABEL: test_store_release: 12 ; CHECK-STRICT-ATOMIC: dmb {{ish$}} 28 ; CHECK-STRICT-ATOMIC-LABEL: test_seq_cst: 29 ; CHECK-STRICT-ATOMIC: dmb {{ish$}} 30 ; CHECK-STRICT-ATOMIC: str 31 ; CHECK-STRICT-ATOMIC: dmb {{ish$}} 32 ; CHECK-STRICT-ATOMIC: ldr 33 ; CHECK-STRICT-ATOMIC: dmb {{ish$}} 47 ; CHECK-STRICT-ATOMIC-LABEL: test_acq: [all …]
|
/external/v8/src/heap/ |
D | marking.cc | 38 SetBitsInCell<AccessMode::ATOMIC>(start_cell_index, in SetRange() 46 SetBitsInCell<AccessMode::ATOMIC>(end_cell_index, (end_index_mask - 1)); in SetRange() 48 SetBitsInCell<AccessMode::ATOMIC>(start_cell_index, in SetRange() 66 ClearBitsInCell<AccessMode::ATOMIC>(start_cell_index, in ClearRange() 74 ClearBitsInCell<AccessMode::ATOMIC>(end_cell_index, (end_index_mask - 1)); in ClearRange() 76 ClearBitsInCell<AccessMode::ATOMIC>(start_cell_index, in ClearRange()
|
D | slot-set.h | 58 template <AccessMode access_mode = AccessMode::ATOMIC> 309 template <AccessMode access_mode = AccessMode::ATOMIC> 311 if (access_mode == AccessMode::ATOMIC) in LoadBucket() 316 template <AccessMode access_mode = AccessMode::ATOMIC> 318 if (access_mode == AccessMode::ATOMIC) { in StoreBucket() 334 template <AccessMode access_mode = AccessMode::ATOMIC> 336 if (access_mode == AccessMode::ATOMIC) { in SwapInNewBucket() 346 template <AccessMode access_mode = AccessMode::ATOMIC> 348 if (access_mode == AccessMode::ATOMIC) in LoadCell() 361 template <AccessMode access_mode = AccessMode::ATOMIC> [all …]
|
D | marking.h | 66 inline bool MarkBit::Set<AccessMode::ATOMIC>() { 76 inline bool MarkBit::Get<AccessMode::ATOMIC>() { 88 inline bool MarkBit::Clear<AccessMode::ATOMIC>() { 185 inline void Bitmap::SetBitsInCell<AccessMode::ATOMIC>(uint32_t cell_index, 197 inline void Bitmap::ClearBitsInCell<AccessMode::ATOMIC>(uint32_t cell_index,
|
D | mark-compact.h | 311 : public MarkingStateBase<MinorMarkingState, AccessMode::ATOMIC> { 353 : public MarkingStateBase<IncrementalMarkingState, AccessMode::ATOMIC> { 374 : public MarkingStateBase<MajorAtomicMarkingState, AccessMode::ATOMIC> {
|
D | incremental-marking.h | 84 static const AccessMode kAtomicity = AccessMode::ATOMIC;
|
D | mark-compact-inl.h | 398 if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>() && in RecordSlot() 399 !source_page->ShouldSkipEvacuationSlotRecording<AccessMode::ATOMIC>()) { in RecordSlot()
|
D | remembered-set.h | 25 template <AccessMode access_mode = AccessMode::ATOMIC>
|
D | spaces.h | 490 template <RememberedSetType type, AccessMode access_mode = AccessMode::ATOMIC> 492 if (access_mode == AccessMode::ATOMIC) in slot_set() 497 template <RememberedSetType type, AccessMode access_mode = AccessMode::ATOMIC> 499 if (access_mode == AccessMode::ATOMIC) in typed_slot_set()
|
D | concurrent-marking.cc | 31 : public MarkingStateBase<ConcurrentMarkingState, AccessMode::ATOMIC> {
|
D | mark-compact.cc | 2318 DCHECK_NULL((p->slot_set<OLD_TO_OLD, AccessMode::ATOMIC>())); in EvacuateEpilogue() 2319 DCHECK_NULL((p->typed_slot_set<OLD_TO_OLD, AccessMode::ATOMIC>())); in EvacuateEpilogue()
|
D | factory.cc | 160 chunk->SetFlag<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR); in AllocateRawArray() 380 chunk->SetFlag<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR); in TryNewFixedArray()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/AMDGPU/ |
D | SIMemoryLegalizer.cpp | 94 ATOMIC = GLOBAL | LDS | SCRATCH | GDS, enumerator 133 SIAtomicAddrSpace OrderingAddrSpace = SIAtomicAddrSpace::ATOMIC, in SIMemOpInfo() 431 SIAtomicAddrSpace::ATOMIC & InstrScope, in toSIAtomicScope() 435 SIAtomicAddrSpace::ATOMIC & InstrScope, in toSIAtomicScope() 439 SIAtomicAddrSpace::ATOMIC & InstrScope, in toSIAtomicScope() 443 SIAtomicAddrSpace::ATOMIC & InstrScope, in toSIAtomicScope() 447 SIAtomicAddrSpace::ATOMIC & InstrScope, in toSIAtomicScope() 525 ((OrderingAddrSpace & SIAtomicAddrSpace::ATOMIC) != OrderingAddrSpace)) { in constructFromMIWithMMO() 573 auto ScopeOrNone = toSIAtomicScope(SSID, SIAtomicAddrSpace::ATOMIC); in getAtomicFenceInfo() 586 ((OrderingAddrSpace & SIAtomicAddrSpace::ATOMIC) != OrderingAddrSpace)) { in getAtomicFenceInfo() [all …]
|
/external/mesa3d/src/compiler/nir/ |
D | nir_intrinsics.h | 159 #define ATOMIC(name, flags) \ macro 169 ATOMIC(atomic_counter_inc, 0) 170 ATOMIC(atomic_counter_dec, 0) 171 ATOMIC(atomic_counter_read, NIR_INTRINSIC_CAN_ELIMINATE)
|
/external/compiler-rt/lib/tsan/rtl/ |
D | tsan_interface_atomic.cc | 852 #define ATOMIC(func, ...) \ macro 885 ATOMIC(Store, *(a32**)a, *(a32*)(a+8), mo_release); in __tsan_go_atomic32_store() 890 ATOMIC(Store, *(a64**)a, *(a64*)(a+8), mo_release); in __tsan_go_atomic64_store()
|
/external/libnl/lib/ |
D | msg.c | 685 PRINT_FLAG(ATOMIC); in nl_nlmsg_flags2str()
|
/external/v8/src/ |
D | globals.h | 573 enum class AccessMode { ATOMIC, NON_ATOMIC }; enumerator
|
/external/skqp/src/compute/skc/platforms/cl_12/kernels/ |
D | prefix.cl | 166 // GPU/SIMT -- IMPLIES SUPPORT FOR ATOMIC SCATTER-ADD
|
D | render.cl | 592 // GPU/SIMT -- IMPLIES SUPPORT FOR ATOMIC SCATTER-ADD
|
/external/skia/src/compute/skc/platforms/cl_12/kernels/ |
D | prefix.cl | 166 // GPU/SIMT -- IMPLIES SUPPORT FOR ATOMIC SCATTER-ADD
|
D | render.cl | 592 // GPU/SIMT -- IMPLIES SUPPORT FOR ATOMIC SCATTER-ADD
|
/external/pcre/dist2/doc/ |
D | pcre2.txt | 7667 ATOMIC GROUPING AND POSSESSIVE QUANTIFIERS 10205 ATOMIC GROUPS
|