/external/v8/src/heap/cppgc/ |
D | garbage-collector.h | 26 MarkingType::kAtomic, SweepingType::kAtomic}; in ConservativeAtomicConfig() 31 MarkingType::kAtomic, SweepingType::kAtomic}; in PreciseAtomicConfig() 36 MarkingType::kIncremental, SweepingType::kAtomic}; in ConservativeIncrementalConfig() 41 MarkingType::kIncremental, SweepingType::kAtomic}; in PreciseIncrementalConfig() 46 MarkingType::kAtomic, SweepingType::kAtomic}; in MinorPreciseAtomicConfig() 51 MarkingType marking_type = MarkingType::kAtomic; 52 SweepingType sweeping_type = SweepingType::kAtomic;
|
D | marking-state.h | 186 if (header.IsInConstruction<AccessMode::kAtomic>()) { in MarkAndPush() 187 not_fully_constructed_worklist_.Push<AccessMode::kAtomic>(&header); in MarkAndPush() 198 DCHECK(!header.IsFree<AccessMode::kAtomic>()); in MarkNoPush() 211 DCHECK(header.IsMarked<AccessMode::kAtomic>()); in PushMarked() 212 DCHECK(!header.IsInConstruction<AccessMode::kAtomic>()); in PushMarked() 226 .IsMarked<AccessMode::kAtomic>()) in RegisterWeakReferenceIfNeeded() 238 weak_containers_worklist_.Push<AccessMode::kAtomic>(&header); in RegisterWeakContainer() 250 if (header.IsInConstruction<AccessMode::kAtomic>()) { in ProcessWeakContainer() 251 not_fully_constructed_worklist_.Push<AccessMode::kAtomic>(&header); in ProcessWeakContainer() 275 if (HeapObjectHeader::FromPayload(key).IsMarked<AccessMode::kAtomic>()) { in ProcessEphemeron() [all …]
|
D | heap.cc | 45 internal::GarbageCollector::Config::MarkingType::kAtomic, in ForceGarbageCollectionSlow() 46 internal::GarbageCollector::Config::SweepingType::kAtomic}); in ForceGarbageCollectionSlow() 93 DCHECK_EQ(Config::MarkingType::kAtomic, config.marking_type); in CollectGarbage() 108 DCHECK_NE(Config::MarkingType::kAtomic, config.marking_type); in StartIncrementalGarbageCollection() 123 DCHECK_NE(Config::MarkingType::kAtomic, config_.marking_type); in FinalizeIncrementalGarbageCollectionIfRunning()
|
D | trace-trait.cc | 19 ->ObjectHeaderFromInnerAddress<AccessMode::kAtomic>(address); in GetTraceDescriptor() 21 header.GetGCInfoIndex<AccessMode::kAtomic>()) in GetTraceDescriptor()
|
D | concurrent-marker.cc | 108 DynamicallyTraceMarkedObject<AccessMode::kAtomic>( in ProcessWorklists() 124 DCHECK(!header.IsInConstruction<AccessMode::kAtomic>()); in ProcessWorklists() 125 DCHECK(header.IsMarked<AccessMode::kAtomic>()); in ProcessWorklists() 141 DynamicallyTraceMarkedObject<AccessMode::kAtomic>( in ProcessWorklists()
|
D | marker.cc | 222 config_.marking_type = MarkingConfig::MarkingType::kAtomic; in EnterAtomicPause() 261 DCHECK_EQ(MarkingConfig::MarkingType::kAtomic, config_.marking_type); in ProcessWeakness() 285 if (config_.marking_type == MarkingConfig::MarkingType::kAtomic) { in VisitRoots() 344 DCHECK_NE(MarkingConfig::MarkingType::kAtomic, config_.marking_type); in AdvanceMarkingWithDeadline() 357 if ((config_.marking_type == MarkingConfig::MarkingType::kAtomic) || in ProcessWorklistsWithDeadline() 453 DCHECK_EQ(MarkingConfig::MarkingType::kAtomic, config_.marking_type); in NotifyCompactionCancelled()
|
D | marker.h | 48 kAtomic, enumerator 219 .Push<AccessMode::kAtomic>(&header); in WriteBarrierForInConstructionObject()
|
D | marking-state.cc | 14 not_fully_constructed_worklist_.Extract<AccessMode::kAtomic>(); in FlushNotFullyConstructedObjects()
|
D | sweeper.h | 25 enum class SweepingType : uint8_t { kAtomic, kIncrementalAndConcurrent }; enumerator
|
D | globals.h | 24 enum class AccessMode : uint8_t { kNonAtomic, kAtomic }; enumerator
|
D | write-barrier.cc | 42 header.Unmark<AccessMode::kAtomic>(); in MarkValue()
|
D | sweeper.cc | 117 header->Unmark<AccessMode::kAtomic>(); in StickyUnmark() 181 constexpr auto kAtomicAccess = AccessMode::kAtomic; in SweepNormalPage() 507 if (config.sweeping_type == SweepingConfig::SweepingType::kAtomic) { in Start()
|
D | object-allocator.h | 134 .SetBit<AccessMode::kAtomic>(reinterpret_cast<ConstAddress>(header)); in AllocateObjectOnSpace()
|
D | marking-worklists.h | 139 AccessMode::kAtomic> {
|
D | heap-page.h | 249 header->GetSize<AccessMode::kAtomic>()); in ObjectHeaderFromInnerAddressImpl()
|
D | object-start-bitmap.h | 219 if (mode == AccessMode::kAtomic) { in ShouldForceNonAtomic()
|
D | compactor.cc | 439 (marking_type == GarbageCollector::Config::MarkingType::kAtomic && in ShouldCompact()
|
/external/vixl/src/aarch64/ |
D | cpu-aarch64.cc | 51 const IDRegister::Field AA64ISAR0::kAtomic(20); member in vixl::aarch64::AA64ISAR0 99 if (Get(kAtomic) >= 1) f.Combine(CPUFeatures::kAtomics); in GetCPUFeatures()
|
D | cpu-aarch64.h | 120 static const Field kAtomic; variable
|
/external/v8/src/heap/cppgc-js/ |
D | cpp-heap.cc | 205 UnifiedHeapMarker::MarkingConfig::MarkingType::kAtomic, in EnterFinalPause()
|