/third_party/node/deps/v8/src/heap/ |
D | incremental-marking.cc | 55 : heap_(heap), in IncrementalMarking() 117 return FLAG_incremental_marking && heap_->gc_state() == Heap::NOT_IN_GC && in CanBeActivated() 118 heap_->deserialization_complete() && in CanBeActivated() 119 !heap_->isolate()->serializer_enabled() && !heap_->IsShared(); in CanBeActivated() 123 return heap_->OldGenerationSizeOfObjects() <= kV8ActivationThreshold && in IsBelowActivationThresholds() 124 heap_->EmbedderSizeOfObjects() <= kEmbedderActivationThreshold; in IsBelowActivationThresholds() 129 DCHECK(!heap_->IsShared()); in Start() 152 DCHECK(heap_->gc_state() == Heap::NOT_IN_GC); in Start() 153 DCHECK(!heap_->isolate()->serializer_enabled()); in Start() 155 Counters* counters = heap_->isolate()->counters(); in Start() [all …]
|
D | marking-barrier.cc | 26 : heap_(heap), in MarkingBarrier() 27 collector_(heap_->mark_compact_collector()), in MarkingBarrier() 28 incremental_marking_(heap_->incremental_marking()), in MarkingBarrier() 30 marking_state_(heap_->isolate()), in MarkingBarrier() 32 is_shared_heap_(heap_->IsShared()) {} in MarkingBarrier() 35 : heap_(local_heap->heap()), in MarkingBarrier() 36 collector_(heap_->mark_compact_collector()), in MarkingBarrier() 39 marking_state_(heap_->isolate()), in MarkingBarrier() 41 is_shared_heap_(heap_->IsShared()) {} in MarkingBarrier() 61 heap_->AddRetainingRoot(Root::kWriteBarrier, value); in WriteWithoutHost() [all …]
|
D | heap-allocator.cc | 19 HeapAllocator::HeapAllocator(Heap* heap) : heap_(heap) {} in HeapAllocator() 23 spaces_[i] = heap_->space(i); in Setup() 30 shared_old_allocator_ = heap_->shared_old_allocator_.get(); in Setup() 31 shared_map_allocator_ = heap_->shared_map_allocator_ in Setup() 32 ? heap_->shared_map_allocator_.get() in Setup() 43 DCHECK_GT(size_in_bytes, heap_->MaxRegularHeapObjectSize(allocation)); in AllocateRawLargeInternal() 90 heap_->CollectSharedGarbage(GarbageCollectionReason::kAllocationFailure); in AllocateRawWithLightRetrySlowPath() 92 heap_->CollectGarbage(AllocationTypeToGCSpace(allocation), in AllocateRawWithLightRetrySlowPath() 110 heap_->isolate()->counters()->gc_last_resort_from_handles()->Increment(); in AllocateRawWithRetryOrFailSlowPath() 112 heap_->CollectSharedGarbage(GarbageCollectionReason::kLastResort); in AllocateRawWithRetryOrFailSlowPath() [all …]
|
D | stress-scavenge-observer.cc | 18 heap_(heap), in StressScavengeObserver() 24 heap_->isolate()->PrintWithTimestamp( in StressScavengeObserver() 31 if (has_requested_gc_ || heap_->new_space()->Capacity() == 0) { in Step() 36 heap_->new_space()->Size() * 100.0 / heap_->new_space()->Capacity(); in Step() 39 heap_->isolate()->PrintWithTimestamp( in Step() 52 heap_->isolate()->PrintWithTimestamp("[Scavenge] GC requested\n"); in Step() 56 heap_->isolate()->stack_guard()->RequestGC(); in Step() 66 heap_->new_space()->Size() * 100.0 / heap_->new_space()->Capacity(); in RequestedGCDone() 70 heap_->isolate()->PrintWithTimestamp( in RequestedGCDone() 73 heap_->isolate()->PrintWithTimestamp("[Scavenge] %d%% is the new limit\n", in RequestedGCDone() [all …]
|
D | local-heap.cc | 50 : heap_(heap), in LocalHeap() 59 DCHECK_IMPLIES(!is_main_thread(), heap_->deserialization_complete()); in LocalHeap() 62 heap_->safepoint()->AddLocalHeap(this, [this] { in LocalHeap() 65 if (heap_->incremental_marking()->IsMarking()) { in LocalHeap() 67 heap_->incremental_marking()->IsCompacting()); in LocalHeap() 83 heap_->safepoint()->RemoveLocalHeap(this, [this] { in ~LocalHeap() 110 std::make_unique<ConcurrentAllocator>(this, heap_->old_space()); in SetUp() 114 std::make_unique<ConcurrentAllocator>(this, heap_->code_space()); in SetUp() 117 if (heap_->isolate()->shared_isolate()) { in SetUp() 119 std::make_unique<ConcurrentAllocator>(this, heap_->shared_old_space()); in SetUp() [all …]
|
D | scavenger.cc | 212 TRACE_GC_EPOCH(outer_->heap_->tracer(), in Run() 239 PrintIsolate(outer_->heap_->isolate(), in ProcessItems() 264 : isolate_(heap->isolate()), heap_(heap) {} in ScavengerCollector() 292 Sweeper* sweeper = heap_->mark_compact_collector()->sweeper(); in CollectGarbage() 308 new Scavenger(this, heap_, is_logging, &empty_chunks, &copied_list, in CollectGarbage() 314 heap_, [&memory_chunks](MemoryChunk* chunk) { in CollectGarbage() 323 heap_->tracer(), in CollectGarbage() 330 TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_ROOTS); in CollectGarbage() 340 heap_->IterateRoots(&root_scavenge_visitor, options); in CollectGarbage() 347 TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_PARALLEL); in CollectGarbage() [all …]
|
D | gc-tracer.cc | 50 Isolate* isolate = tracer_->heap_->isolate(); in AssertMainThread() 166 : heap_(heap), in GCTracer() 243 heap_->isolate()->counters()->young_generation_handling()->AddSample( in NotifyYoungGenerationHandling() 268 current_.reduce_memory = heap_->ShouldReduceMemory(); in UpdateCurrentEvent() 315 current_.reduce_memory = heap_->ShouldReduceMemory(); in StartCycle() 337 SampleAllocation(current_.start_time, heap_->NewSpaceAllocationCounter(), in StartInSafepoint() 338 heap_->OldGenerationAllocationCounter(), in StartInSafepoint() 339 heap_->EmbedderAllocationCounter()); in StartInSafepoint() 341 current_.start_object_size = heap_->SizeOfObjects(); in StartInSafepoint() 342 current_.start_memory_size = heap_->memory_allocator()->Size(); in StartInSafepoint() [all …]
|
D | heap-inl.h | 577 : ObjectVisitorWithCageBases(heap), heap_(heap) {} in VerifyPointersVisitor() 579 AlwaysAllocateScope::AlwaysAllocateScope(Heap* heap) : heap_(heap) { in AlwaysAllocateScope() 580 heap_->always_allocate_scope_count_++; in AlwaysAllocateScope() 584 heap_->always_allocate_scope_count_--; in ~AlwaysAllocateScope() 588 : heap_(heap) { in OptionalAlwaysAllocateScope() 589 if (heap_) heap_->always_allocate_scope_count_++; in OptionalAlwaysAllocateScope() 593 if (heap_) heap_->always_allocate_scope_count_--; in ~OptionalAlwaysAllocateScope() 600 : heap_(heap) { in CodeSpaceMemoryModificationScope() 601 DCHECK_EQ(ThreadId::Current(), heap_->isolate()->thread_id()); in CodeSpaceMemoryModificationScope() 602 heap_->safepoint()->AssertActive(); in CodeSpaceMemoryModificationScope() [all …]
|
D | finalization-registry-cleanup-task.cc | 19 : CancelableTask(heap->isolate()), heap_(heap) {} in FinalizationRegistryCleanupTask() 32 Isolate* isolate = heap_->isolate(); in SlowAssertNoActiveJavaScript() 39 Isolate* isolate = heap_->isolate(); in RunInternal() 50 if (!heap_->DequeueDirtyJSFinalizationRegistry().ToHandle( in RunInternal() 93 heap_->EnqueueDirtyJSFinalizationRegistry(*finalization_registry, nop); in RunInternal() 97 heap_->set_is_finalization_registry_cleanup_task_posted(false); in RunInternal() 98 heap_->PostFinalizationRegistryCleanupTaskIfNeeded(); in RunInternal()
|
D | evacuation-allocator.h | 26 : heap_(heap), in EvacuationAllocator() 35 heap_->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE)); in Finalize() 36 heap_->code_space()->MergeCompactionSpace( in Finalize() 38 if (heap_->map_space()) { in Finalize() 39 heap_->map_space()->MergeCompactionSpace( in Finalize() 66 Heap* const heap_; variable
|
D | array-buffer-sweeper.cc | 101 ArrayBufferSweeper::ArrayBufferSweeper(Heap* heap) : heap_(heap) {} in ArrayBufferSweeper() 112 TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_COMPLETE_SWEEP_ARRAY_BUFFERS); in EnsureFinished() 114 heap_->isolate()->cancelable_task_manager()->TryAbort(job_->id_); in EnsureFinished() 137 DCHECK_LE(heap_->backing_store_bytes(), SIZE_MAX); in EnsureFinished() 157 if (!heap_->IsTearingDown() && !heap_->ShouldReduceMemory() && in RequestSweep() 159 auto task = MakeCancelableTask(heap_->isolate(), [this, type] { in RequestSweep() 164 TRACE_GC_EPOCH(heap_->tracer(), scope_id, ThreadKind::kBackground); in RequestSweep() 259 heap_->IncrementExternalBackingStoreBytes( in IncrementExternalMemoryCounters() 261 reinterpret_cast<v8::Isolate*>(heap_->isolate()) in IncrementExternalMemoryCounters() 267 heap_->DecrementExternalBackingStoreBytes( in DecrementExternalMemoryCounters() [all …]
|
D | mark-compact.cc | 96 : ObjectVisitorWithCageBases(heap), heap_(heap) {} in MarkingVerifier() 141 Heap* heap_; member in v8::internal::__anon9cdfb8530111::MarkingVerifier 145 heap_->IterateRootsIncludingClients(this, in VerifyRoots() 218 VerifyMarking(heap_->new_space()); in Run() 219 VerifyMarking(heap_->new_lo_space()); in Run() 220 VerifyMarking(heap_->old_space()); in Run() 221 VerifyMarking(heap_->code_space()); in Run() 222 if (heap_->map_space()) VerifyMarking(heap_->map_space()); in Run() 223 VerifyMarking(heap_->lo_space()); in Run() 224 VerifyMarking(heap_->code_lo_space()); in Run() [all …]
|
D | heap-allocator-inl.h | 58 DCHECK_EQ(heap_->gc_state(), Heap::NOT_IN_GC); in AllocateRaw() 68 if (!heap_->always_allocate() && allocation_timeout_-- <= 0) { in AllocateRaw() 78 if (heap_->CanSafepoint()) { in AllocateRaw() 79 heap_->main_thread_local_heap()->Safepoint(); in AllocateRaw() 82 const size_t large_object_threshold = heap_->MaxRegularHeapObjectSize(type); in AllocateRaw() 90 allocation = heap_->tp_heap_->Allocate(size_in_bytes, type, alignment); in AllocateRaw() 135 heap_->UnprotectAndRegisterMemoryChunk( in AllocateRaw() 137 heap_->ZapCodeObject(object.address(), size_in_bytes); in AllocateRaw() 153 for (auto& tracker : heap_->allocation_trackers_) { in AllocateRaw()
|
D | collection-barrier.cc | 40 : CancelableTask(heap->isolate()), heap_(heap) {} in BackgroundCollectionInterruptTask() 50 void RunInternal() override { heap_->CheckCollectionRequested(); } in RunInternal() 52 Heap* heap_; member in v8::internal::BackgroundCollectionInterruptTask 113 Isolate* isolate = heap_->isolate(); in ActivateStackGuardAndPostTask() 119 ->PostTask(std::make_unique<BackgroundCollectionInterruptTask>(heap_)); in ActivateStackGuardAndPostTask() 134 heap_->isolate() in StopTimeToCollectionTimer()
|
D | concurrent-marking.cc | 422 TRACE_GC_EPOCH(concurrent_marking_->heap_->tracer(), in Run() 444 : heap_(heap), in ConcurrentMarking() 461 auto* cpp_heap = CppHeap::From(heap_->cpp_heap()); in Run() 468 task_id, &local_marking_worklists, &local_weak_objects, heap_, in Run() 470 heap_->local_embedder_heap_tracer()->InUse(), should_keep_ages_unchanged, in Run() 477 Isolate* isolate = heap_->isolate(); in Run() 513 if (heap_->new_space()) { in Run() 515 new_space_top = heap_->new_space()->original_top_acquire(); in Run() 516 new_space_limit = heap_->new_space()->original_limit_relaxed(); in Run() 519 if (heap_->new_lo_space()) { in Run() [all …]
|
D | base-space.h | 28 DCHECK_NOT_NULL(heap_); in heap() 29 return heap_; in heap() 66 : heap_(heap), id_(id), committed_(0), max_committed_(0) {} in BaseSpace() 71 Heap* heap_;
|
D | object-stats.h | 97 explicit ObjectStats(Heap* heap) : heap_(heap) { ClearObjectStats(true); } in ObjectStats() 135 Heap* heap() { return heap_; } in heap() 155 Heap* heap_; variable 180 : heap_(heap), live_(live), dead_(dead) { in ObjectStatsCollector() 181 DCHECK_NOT_NULL(heap_); in ObjectStatsCollector() 191 Heap* const heap_;
|
D | sweeper.cc | 23 : heap_(heap), in Sweeper() 151 should_reduce_memory_ = heap_->ShouldReduceMemory(); in StartSweeping() 153 heap_->mark_compact_collector()->non_atomic_marking_state(); in StartSweeping() 174 !heap_->delay_sweeper_tasks_for_testing_) { in StartSweeperTasks() 177 std::make_unique<SweeperJob>(heap_->isolate(), this)); in StartSweeperTasks() 389 PtrComprCageBase cage_base(heap_->isolate()); in RawSweep() 476 TRACE_GC_EPOCH(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL_SWEEPING, in IncrementalSweepSpace() 478 const double start = heap_->MonotonicallyIncreasingTimeInMs(); in IncrementalSweepSpace() 482 const double duration = heap_->MonotonicallyIncreasingTimeInMs() - start; in IncrementalSweepSpace() 483 heap_->tracer()->AddIncrementalSweepingStep(duration); in IncrementalSweepSpace() [all …]
|
D | object-stats.cc | 392 Isolate* isolate() { return heap_->isolate(); } in isolate() 451 Heap* heap_; member in v8::internal::ObjectStatsCollectorImpl 461 : heap_(heap), in ObjectStatsCollectorImpl() 466 heap_, &stats->tagged_fields_count_, &stats->embedder_fields_count_, in ObjectStatsCollectorImpl() 478 if (obj == ReadOnlyRoots(heap_).empty_property_array()) return false; in ShouldRecordObject() 595 if (properties != ReadOnlyRoots(heap_).empty_property_array()) { in RecordVirtualJSObjectDetails() 620 if (elements != ReadOnlyRoots(heap_).empty_fixed_array()) { in RecordVirtualJSObjectDetails() 710 GetFeedbackSlotType(vector.Get(slot), it.kind(), heap_->isolate()), in RecordVirtualFeedbackVectorDetails() 800 Object list = heap_->allocation_sites_list(); in CollectGlobalStatistics() 808 RecordSimpleVirtualObjectStats(HeapObject(), heap_->serialized_objects(), in CollectGlobalStatistics() [all …]
|
D | safepoint.cc | 30 : heap_(heap), local_heaps_head_(nullptr), active_safepoint_scopes_(0) {} in IsolateSafepoint() 44 TRACE_GC(heap_->tracer(), GCTracer::Scope::TIME_TO_SAFEPOINT); in EnterLocalSafepointScope() 91 : CancelableTask(heap->isolate()), heap_(heap) {} in GlobalSafepointInterruptTask() 100 void RunInternal() override { heap_->main_thread_local_heap()->Safepoint(); } in RunInternal() 102 Heap* heap_; member in v8::internal::GlobalSafepointInterruptTask 119 ->PostTask(std::make_unique<GlobalSafepointInterruptTask>(heap_)); in InitiateGlobalSafepointScopeRaw() 272 DCHECK_EQ(local_heaps_head_, heap_->main_thread_local_heap()); in AssertMainThreadIsOnlyThread() 273 DCHECK_NULL(heap_->main_thread_local_heap()->next_); in AssertMainThreadIsOnlyThread() 276 Isolate* IsolateSafepoint::isolate() const { return heap_->isolate(); } in isolate()
|
/third_party/node/deps/v8/src/utils/ |
D | identity-map.cc | 28 heap_->UnregisterStrongRoots(strong_roots_entry_); in Clear() 52 Address not_mapped = ReadOnlyRoots(heap_).not_mapped_symbol().ptr(); in ScanKeysFor() 66 DCHECK_EQ(gc_counter_, heap_->gc_count()); in InsertKey() 73 Address not_mapped = ReadOnlyRoots(heap_).not_mapped_symbol().ptr(); in InsertKey() 95 Address not_mapped = ReadOnlyRoots(heap_).not_mapped_symbol().ptr(); in DeleteIndex() 136 if (index < 0 && gc_counter_ != heap_->gc_count()) { in Lookup() 151 if (gc_counter_ != heap_->gc_count()) Rehash(); in LookupOrInsert() 161 CHECK_NE(address, ReadOnlyRoots(heap_).not_mapped_symbol().ptr()); in Hash() 200 gc_counter_ = heap_->gc_count(); in InsertEntry() 203 Address not_mapped = ReadOnlyRoots(heap_).not_mapped_symbol().ptr(); in InsertEntry() [all …]
|
/third_party/node/deps/v8/src/heap/cppgc/ |
D | pointer-policies.cc | 47 if (!heap_) { in CheckPointerImpl() 48 heap_ = &base_page->heap(); in CheckPointerImpl() 49 if (!heap_->page_backend()->Lookup(reinterpret_cast<Address>(this))) { in CheckPointerImpl() 59 DCHECK_EQ(heap_, &base_page->heap()); in CheckPointerImpl() 61 DCHECK_EQ(heap_->GetCreationThreadId(), v8::base::OS::GetCurrentThreadId()); in CheckPointerImpl() 80 if (heap_->prefinalizer_handler()->IsInvokingPreFinalizers()) { in CheckPointerImpl() 82 const auto* slot_page = BasePage::FromInnerAddress(heap_, this); in CheckPointerImpl()
|
D | process-heap.h | 30 HeapBase& heap_; 43 HeapRegistry::Subscription::Subscription(HeapBase& heap) : heap_(heap) { in Subscription() 44 HeapRegistry::RegisterHeap(heap_); in Subscription() 48 HeapRegistry::UnregisterHeap(heap_); in ~Subscription()
|
D | compactor.cc | 42 explicit MovableReferences(HeapBase& heap) : heap_(heap) {} in MovableReferences() 58 HeapBase& heap_; member in cppgc::internal::__anon07b4ead80111::MovableReferences 83 const BasePage* slot_page = BasePage::FromInnerAddress(&heap_, slot); in AddOrFilter() 100 const BasePage* value_page = BasePage::FromInnerAddress(&heap_, value); in AddOrFilter() 438 Compactor::Compactor(RawHeap& heap) : heap_(heap) { in Compactor() 439 for (auto& space : heap_) { in Compactor() 498 StatsCollector::EnabledScope stats_scope(heap_.heap()->stats_collector(), in CompactSpacesIfEnabled() 501 MovableReferences movable_references(*heap_.heap()); in CompactSpacesIfEnabled() 521 DCHECK_NULL(heap_.heap()->marker()); in EnableForNextGCForTesting()
|
/third_party/node/deps/v8/src/heap/cppgc-js/ |
D | unified-heap-marking-state.cc | 15 : heap_(heap), in UnifiedHeapMarkingState() 16 marking_state_(heap_ ? heap_->mark_compact_collector()->marking_state() in UnifiedHeapMarkingState() 22 DCHECK_IMPLIES(heap_, marking_state_); in UnifiedHeapMarkingState() 28 DCHECK_NOT_NULL(heap_); in Update()
|