• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/cppgc-js/cpp-heap.h"
6 
7 #include <cstdint>
8 #include <memory>
9 #include <numeric>
10 
11 #include "include/cppgc/heap-consistency.h"
12 #include "include/cppgc/platform.h"
13 #include "include/v8-isolate.h"
14 #include "include/v8-local-handle.h"
15 #include "include/v8-platform.h"
16 #include "src/base/logging.h"
17 #include "src/base/macros.h"
18 #include "src/base/platform/platform.h"
19 #include "src/base/platform/time.h"
20 #include "src/execution/isolate-inl.h"
21 #include "src/flags/flags.h"
22 #include "src/handles/global-handles.h"
23 #include "src/handles/handles.h"
24 #include "src/heap/base/stack.h"
25 #include "src/heap/cppgc-js/cpp-marking-state.h"
26 #include "src/heap/cppgc-js/cpp-snapshot.h"
27 #include "src/heap/cppgc-js/unified-heap-marking-state.h"
28 #include "src/heap/cppgc-js/unified-heap-marking-verifier.h"
29 #include "src/heap/cppgc-js/unified-heap-marking-visitor.h"
30 #include "src/heap/cppgc/concurrent-marker.h"
31 #include "src/heap/cppgc/gc-info-table.h"
32 #include "src/heap/cppgc/heap-base.h"
33 #include "src/heap/cppgc/heap-object-header.h"
34 #include "src/heap/cppgc/marker.h"
35 #include "src/heap/cppgc/marking-state.h"
36 #include "src/heap/cppgc/marking-visitor.h"
37 #include "src/heap/cppgc/metric-recorder.h"
38 #include "src/heap/cppgc/object-allocator.h"
39 #include "src/heap/cppgc/prefinalizer-handler.h"
40 #include "src/heap/cppgc/raw-heap.h"
41 #include "src/heap/cppgc/stats-collector.h"
42 #include "src/heap/cppgc/sweeper.h"
43 #include "src/heap/cppgc/unmarker.h"
44 #include "src/heap/embedder-tracing-inl.h"
45 #include "src/heap/embedder-tracing.h"
46 #include "src/heap/gc-tracer.h"
47 #include "src/heap/marking-worklist.h"
48 #include "src/heap/sweeper.h"
49 #include "src/init/v8.h"
50 #include "src/profiler/heap-profiler.h"
51 
52 namespace v8 {
53 
54 namespace {
55 
56 class V8ToCppGCReferencesVisitor final
57     : public v8::EmbedderHeapTracer::TracedGlobalHandleVisitor {
58  public:
V8ToCppGCReferencesVisitor(cppgc::internal::MutatorMarkingState & marking_state,v8::internal::Isolate * isolate,const v8::WrapperDescriptor & wrapper_descriptor)59   V8ToCppGCReferencesVisitor(
60       cppgc::internal::MutatorMarkingState& marking_state,
61       v8::internal::Isolate* isolate,
62       const v8::WrapperDescriptor& wrapper_descriptor)
63       : marking_state_(marking_state),
64         isolate_(isolate),
65         wrapper_descriptor_(wrapper_descriptor) {}
66 
VisitTracedReference(const v8::TracedReference<v8::Value> & value)67   void VisitTracedReference(const v8::TracedReference<v8::Value>& value) final {
68     VisitHandle(value, value.WrapperClassId());
69   }
70 
71  private:
VisitHandle(const v8::TracedReference<v8::Value> & value,uint16_t class_id)72   void VisitHandle(const v8::TracedReference<v8::Value>& value,
73                    uint16_t class_id) {
74     DCHECK(!value.IsEmpty());
75 
76     const internal::JSObject js_object =
77         *reinterpret_cast<const internal::JSObject* const&>(value);
78     if (!js_object.ptr() || js_object.IsSmi() ||
79         !js_object.MayHaveEmbedderFields())
80       return;
81 
82     internal::LocalEmbedderHeapTracer::WrapperInfo info;
83     if (!internal::LocalEmbedderHeapTracer::ExtractWrappableInfo(
84             isolate_, js_object, wrapper_descriptor_, &info))
85       return;
86 
87     marking_state_.MarkAndPush(
88         cppgc::internal::HeapObjectHeader::FromObject(info.second));
89   }
90 
91   cppgc::internal::MutatorMarkingState& marking_state_;
92   v8::internal::Isolate* isolate_;
93   const v8::WrapperDescriptor& wrapper_descriptor_;
94 };
95 
TraceV8ToCppGCReferences(v8::internal::Isolate * isolate,cppgc::internal::MutatorMarkingState & marking_state,const v8::WrapperDescriptor & wrapper_descriptor)96 void TraceV8ToCppGCReferences(
97     v8::internal::Isolate* isolate,
98     cppgc::internal::MutatorMarkingState& marking_state,
99     const v8::WrapperDescriptor& wrapper_descriptor) {
100   DCHECK(isolate);
101   V8ToCppGCReferencesVisitor forwarding_visitor(marking_state, isolate,
102                                                 wrapper_descriptor);
103   isolate->global_handles()->IterateTracedNodes(&forwarding_visitor);
104 }
105 
106 }  // namespace
107 
108 // static
109 constexpr uint16_t WrapperDescriptor::kUnknownEmbedderId;
110 
111 // static
Create(v8::Platform * platform,const CppHeapCreateParams & params)112 std::unique_ptr<CppHeap> CppHeap::Create(v8::Platform* platform,
113                                          const CppHeapCreateParams& params) {
114   return std::make_unique<internal::CppHeap>(platform, params.custom_spaces,
115                                              params.wrapper_descriptor);
116 }
117 
GetAllocationHandle()118 cppgc::AllocationHandle& CppHeap::GetAllocationHandle() {
119   return internal::CppHeap::From(this)->object_allocator();
120 }
121 
GetHeapHandle()122 cppgc::HeapHandle& CppHeap::GetHeapHandle() {
123   return *internal::CppHeap::From(this);
124 }
125 
Terminate()126 void CppHeap::Terminate() { internal::CppHeap::From(this)->Terminate(); }
127 
CollectStatistics(cppgc::HeapStatistics::DetailLevel detail_level)128 cppgc::HeapStatistics CppHeap::CollectStatistics(
129     cppgc::HeapStatistics::DetailLevel detail_level) {
130   return internal::CppHeap::From(this)->AsBase().CollectStatistics(
131       detail_level);
132 }
133 
CollectCustomSpaceStatisticsAtLastGC(std::vector<cppgc::CustomSpaceIndex> custom_spaces,std::unique_ptr<CustomSpaceStatisticsReceiver> receiver)134 void CppHeap::CollectCustomSpaceStatisticsAtLastGC(
135     std::vector<cppgc::CustomSpaceIndex> custom_spaces,
136     std::unique_ptr<CustomSpaceStatisticsReceiver> receiver) {
137   return internal::CppHeap::From(this)->CollectCustomSpaceStatisticsAtLastGC(
138       std::move(custom_spaces), std::move(receiver));
139 }
140 
EnableDetachedGarbageCollectionsForTesting()141 void CppHeap::EnableDetachedGarbageCollectionsForTesting() {
142   return internal::CppHeap::From(this)
143       ->EnableDetachedGarbageCollectionsForTesting();
144 }
145 
CollectGarbageForTesting(cppgc::EmbedderStackState stack_state)146 void CppHeap::CollectGarbageForTesting(cppgc::EmbedderStackState stack_state) {
147   return internal::CppHeap::From(this)->CollectGarbageForTesting(
148       internal::CppHeap::CollectionType::kMajor, stack_state);
149 }
150 
CollectGarbageInYoungGenerationForTesting(cppgc::EmbedderStackState stack_state)151 void CppHeap::CollectGarbageInYoungGenerationForTesting(
152     cppgc::EmbedderStackState stack_state) {
153   return internal::CppHeap::From(this)->CollectGarbageForTesting(
154       internal::CppHeap::CollectionType::kMinor, stack_state);
155 }
156 
157 namespace internal {
158 
159 namespace {
160 
161 class CppgcPlatformAdapter final : public cppgc::Platform {
162  public:
CppgcPlatformAdapter(v8::Platform * platform)163   explicit CppgcPlatformAdapter(v8::Platform* platform) : platform_(platform) {}
164 
165   CppgcPlatformAdapter(const CppgcPlatformAdapter&) = delete;
166   CppgcPlatformAdapter& operator=(const CppgcPlatformAdapter&) = delete;
167 
GetPageAllocator()168   PageAllocator* GetPageAllocator() final {
169     return platform_->GetPageAllocator();
170   }
171 
MonotonicallyIncreasingTime()172   double MonotonicallyIncreasingTime() final {
173     return platform_->MonotonicallyIncreasingTime();
174   }
175 
GetForegroundTaskRunner()176   std::shared_ptr<TaskRunner> GetForegroundTaskRunner() final {
177     // If no Isolate has been set, there's no task runner to leverage for
178     // foreground tasks. In detached mode the original platform handles the
179     // task runner retrieval.
180     if (!isolate_ && !is_in_detached_mode_) return nullptr;
181 
182     return platform_->GetForegroundTaskRunner(isolate_);
183   }
184 
PostJob(TaskPriority priority,std::unique_ptr<JobTask> job_task)185   std::unique_ptr<JobHandle> PostJob(TaskPriority priority,
186                                      std::unique_ptr<JobTask> job_task) final {
187     return platform_->PostJob(priority, std::move(job_task));
188   }
189 
GetTracingController()190   TracingController* GetTracingController() override {
191     return platform_->GetTracingController();
192   }
193 
SetIsolate(v8::Isolate * isolate)194   void SetIsolate(v8::Isolate* isolate) { isolate_ = isolate; }
EnableDetachedModeForTesting()195   void EnableDetachedModeForTesting() { is_in_detached_mode_ = true; }
196 
197  private:
198   v8::Platform* platform_;
199   v8::Isolate* isolate_ = nullptr;
200   bool is_in_detached_mode_ = false;
201 };
202 
203 class UnifiedHeapConcurrentMarker
204     : public cppgc::internal::ConcurrentMarkerBase {
205  public:
UnifiedHeapConcurrentMarker(cppgc::internal::HeapBase & heap,Heap * v8_heap,cppgc::internal::MarkingWorklists & marking_worklists,cppgc::internal::IncrementalMarkingSchedule & incremental_marking_schedule,cppgc::Platform * platform,UnifiedHeapMarkingState & unified_heap_marking_state)206   UnifiedHeapConcurrentMarker(
207       cppgc::internal::HeapBase& heap, Heap* v8_heap,
208       cppgc::internal::MarkingWorklists& marking_worklists,
209       cppgc::internal::IncrementalMarkingSchedule& incremental_marking_schedule,
210       cppgc::Platform* platform,
211       UnifiedHeapMarkingState& unified_heap_marking_state)
212       : cppgc::internal::ConcurrentMarkerBase(
213             heap, marking_worklists, incremental_marking_schedule, platform),
214         v8_heap_(v8_heap) {}
215 
216   std::unique_ptr<cppgc::Visitor> CreateConcurrentMarkingVisitor(
217       cppgc::internal::ConcurrentMarkingState&) const final;
218 
219  private:
220   Heap* const v8_heap_;
221 };
222 
223 std::unique_ptr<cppgc::Visitor>
CreateConcurrentMarkingVisitor(cppgc::internal::ConcurrentMarkingState & marking_state) const224 UnifiedHeapConcurrentMarker::CreateConcurrentMarkingVisitor(
225     cppgc::internal::ConcurrentMarkingState& marking_state) const {
226   return std::make_unique<ConcurrentUnifiedHeapMarkingVisitor>(heap(), v8_heap_,
227                                                                marking_state);
228 }
229 
FatalOutOfMemoryHandlerImpl(const std::string & reason,const SourceLocation &,HeapBase * heap)230 void FatalOutOfMemoryHandlerImpl(const std::string& reason,
231                                  const SourceLocation&, HeapBase* heap) {
232   FatalProcessOutOfMemory(static_cast<v8::internal::CppHeap*>(heap)->isolate(),
233                           reason.c_str());
234 }
235 
236 }  // namespace
237 
238 class UnifiedHeapMarker final : public cppgc::internal::MarkerBase {
239  public:
240   UnifiedHeapMarker(Heap* v8_heap, cppgc::internal::HeapBase& cpp_heap,
241                     cppgc::Platform* platform, MarkingConfig config);
242 
243   ~UnifiedHeapMarker() final = default;
244 
245   void AddObject(void*);
246 
GetMarkingWorklists()247   cppgc::internal::MarkingWorklists& GetMarkingWorklists() {
248     return marking_worklists_;
249   }
250 
GetMutatorMarkingState()251   cppgc::internal::MutatorMarkingState& GetMutatorMarkingState() {
252     return static_cast<cppgc::internal::MutatorMarkingState&>(
253         marking_visitor_->marking_state_);
254   }
255 
GetMutatorUnifiedHeapMarkingState()256   UnifiedHeapMarkingState& GetMutatorUnifiedHeapMarkingState() {
257     return mutator_unified_heap_marking_state_;
258   }
259 
260  protected:
visitor()261   cppgc::Visitor& visitor() final { return *marking_visitor_; }
conservative_visitor()262   cppgc::internal::ConservativeTracingVisitor& conservative_visitor() final {
263     return conservative_marking_visitor_;
264   }
stack_visitor()265   ::heap::base::StackVisitor& stack_visitor() final {
266     return conservative_marking_visitor_;
267   }
268 
269  private:
270   UnifiedHeapMarkingState mutator_unified_heap_marking_state_;
271   std::unique_ptr<MutatorUnifiedHeapMarkingVisitor> marking_visitor_;
272   cppgc::internal::ConservativeMarkingVisitor conservative_marking_visitor_;
273 };
274 
UnifiedHeapMarker(Heap * v8_heap,cppgc::internal::HeapBase & heap,cppgc::Platform * platform,MarkingConfig config)275 UnifiedHeapMarker::UnifiedHeapMarker(Heap* v8_heap,
276                                      cppgc::internal::HeapBase& heap,
277                                      cppgc::Platform* platform,
278                                      MarkingConfig config)
279     : cppgc::internal::MarkerBase(heap, platform, config),
280       mutator_unified_heap_marking_state_(v8_heap, nullptr),
281       marking_visitor_(config.collection_type == CppHeap::CollectionType::kMajor
282                            ? std::make_unique<MutatorUnifiedHeapMarkingVisitor>(
283                                  heap, mutator_marking_state_,
284                                  mutator_unified_heap_marking_state_)
285                            : std::make_unique<MutatorMinorGCMarkingVisitor>(
286                                  heap, mutator_marking_state_,
287                                  mutator_unified_heap_marking_state_)),
288       conservative_marking_visitor_(heap, mutator_marking_state_,
289                                     *marking_visitor_) {
290   concurrent_marker_ = std::make_unique<UnifiedHeapConcurrentMarker>(
291       heap_, v8_heap, marking_worklists_, schedule_, platform_,
292       mutator_unified_heap_marking_state_);
293 }
294 
AddObject(void * object)295 void UnifiedHeapMarker::AddObject(void* object) {
296   mutator_marking_state_.MarkAndPush(
297       cppgc::internal::HeapObjectHeader::FromObject(object));
298 }
299 
AddMainThreadEvent(const GCCycle & cppgc_event)300 void CppHeap::MetricRecorderAdapter::AddMainThreadEvent(
301     const GCCycle& cppgc_event) {
302   auto* tracer = GetIsolate()->heap()->tracer();
303   if (cppgc_event.type == MetricRecorder::GCCycle::Type::kMinor) {
304     DCHECK(!last_young_gc_event_);
305     last_young_gc_event_ = cppgc_event;
306     tracer->NotifyYoungCppGCCompleted();
307   } else {
308     DCHECK(!last_full_gc_event_);
309     last_full_gc_event_ = cppgc_event;
310     tracer->NotifyFullCppGCCompleted();
311   }
312 }
313 
AddMainThreadEvent(const MainThreadIncrementalMark & cppgc_event)314 void CppHeap::MetricRecorderAdapter::AddMainThreadEvent(
315     const MainThreadIncrementalMark& cppgc_event) {
316   // Incremental marking steps might be nested in V8 marking steps. In such
317   // cases, stash the relevant values and delegate to V8 to report them. For
318   // non-nested steps, report to the Recorder directly.
319   if (cpp_heap_.is_in_v8_marking_step_) {
320     last_incremental_mark_event_ = cppgc_event;
321     return;
322   }
323   // This is a standalone incremental marking step.
324   const std::shared_ptr<metrics::Recorder>& recorder =
325       GetIsolate()->metrics_recorder();
326   DCHECK_NOT_NULL(recorder);
327   if (!recorder->HasEmbedderRecorder()) return;
328   incremental_mark_batched_events_.events.emplace_back();
329   incremental_mark_batched_events_.events.back().cpp_wall_clock_duration_in_us =
330       cppgc_event.duration_us;
331   if (incremental_mark_batched_events_.events.size() == kMaxBatchedEvents) {
332     recorder->AddMainThreadEvent(std::move(incremental_mark_batched_events_),
333                                  GetContextId());
334     incremental_mark_batched_events_ = {};
335   }
336 }
337 
AddMainThreadEvent(const MainThreadIncrementalSweep & cppgc_event)338 void CppHeap::MetricRecorderAdapter::AddMainThreadEvent(
339     const MainThreadIncrementalSweep& cppgc_event) {
340   // Incremental sweeping steps are never nested inside V8 sweeping steps, so
341   // report to the Recorder directly.
342   const std::shared_ptr<metrics::Recorder>& recorder =
343       GetIsolate()->metrics_recorder();
344   DCHECK_NOT_NULL(recorder);
345   if (!recorder->HasEmbedderRecorder()) return;
346   incremental_sweep_batched_events_.events.emplace_back();
347   incremental_sweep_batched_events_.events.back()
348       .cpp_wall_clock_duration_in_us = cppgc_event.duration_us;
349   if (incremental_sweep_batched_events_.events.size() == kMaxBatchedEvents) {
350     recorder->AddMainThreadEvent(std::move(incremental_sweep_batched_events_),
351                                  GetContextId());
352     incremental_sweep_batched_events_ = {};
353   }
354 }
355 
FlushBatchedIncrementalEvents()356 void CppHeap::MetricRecorderAdapter::FlushBatchedIncrementalEvents() {
357   const std::shared_ptr<metrics::Recorder>& recorder =
358       GetIsolate()->metrics_recorder();
359   DCHECK_NOT_NULL(recorder);
360   if (!incremental_mark_batched_events_.events.empty()) {
361     recorder->AddMainThreadEvent(std::move(incremental_mark_batched_events_),
362                                  GetContextId());
363     incremental_mark_batched_events_ = {};
364   }
365   if (!incremental_sweep_batched_events_.events.empty()) {
366     recorder->AddMainThreadEvent(std::move(incremental_sweep_batched_events_),
367                                  GetContextId());
368     incremental_sweep_batched_events_ = {};
369   }
370 }
371 
FullGCMetricsReportPending() const372 bool CppHeap::MetricRecorderAdapter::FullGCMetricsReportPending() const {
373   return last_full_gc_event_.has_value();
374 }
375 
YoungGCMetricsReportPending() const376 bool CppHeap::MetricRecorderAdapter::YoungGCMetricsReportPending() const {
377   return last_young_gc_event_.has_value();
378 }
379 
380 const base::Optional<cppgc::internal::MetricRecorder::GCCycle>
ExtractLastFullGcEvent()381 CppHeap::MetricRecorderAdapter::ExtractLastFullGcEvent() {
382   auto res = std::move(last_full_gc_event_);
383   last_full_gc_event_.reset();
384   return res;
385 }
386 
387 const base::Optional<cppgc::internal::MetricRecorder::GCCycle>
ExtractLastYoungGcEvent()388 CppHeap::MetricRecorderAdapter::ExtractLastYoungGcEvent() {
389   auto res = std::move(last_young_gc_event_);
390   last_young_gc_event_.reset();
391   return res;
392 }
393 
394 const base::Optional<cppgc::internal::MetricRecorder::MainThreadIncrementalMark>
ExtractLastIncrementalMarkEvent()395 CppHeap::MetricRecorderAdapter::ExtractLastIncrementalMarkEvent() {
396   auto res = std::move(last_incremental_mark_event_);
397   last_incremental_mark_event_.reset();
398   return res;
399 }
400 
ClearCachedEvents()401 void CppHeap::MetricRecorderAdapter::ClearCachedEvents() {
402   incremental_mark_batched_events_.events.clear();
403   incremental_sweep_batched_events_.events.clear();
404   last_incremental_mark_event_.reset();
405   last_full_gc_event_.reset();
406   last_young_gc_event_.reset();
407 }
408 
GetIsolate() const409 Isolate* CppHeap::MetricRecorderAdapter::GetIsolate() const {
410   DCHECK_NOT_NULL(cpp_heap_.isolate());
411   return reinterpret_cast<Isolate*>(cpp_heap_.isolate());
412 }
413 
GetContextId() const414 v8::metrics::Recorder::ContextId CppHeap::MetricRecorderAdapter::GetContextId()
415     const {
416   DCHECK_NOT_NULL(GetIsolate());
417   if (GetIsolate()->context().is_null())
418     return v8::metrics::Recorder::ContextId::Empty();
419   HandleScope scope(GetIsolate());
420   return GetIsolate()->GetOrRegisterRecorderContextId(
421       GetIsolate()->native_context());
422 }
423 
CppHeap(v8::Platform * platform,const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>> & custom_spaces,const v8::WrapperDescriptor & wrapper_descriptor)424 CppHeap::CppHeap(
425     v8::Platform* platform,
426     const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>& custom_spaces,
427     const v8::WrapperDescriptor& wrapper_descriptor)
428     : cppgc::internal::HeapBase(
429           std::make_shared<CppgcPlatformAdapter>(platform), custom_spaces,
430           cppgc::internal::HeapBase::StackSupport::
431               kSupportsConservativeStackScan,
432           FLAG_single_threaded_gc ? MarkingType::kIncremental
433                                   : MarkingType::kIncrementalAndConcurrent,
434           FLAG_single_threaded_gc ? SweepingType::kIncremental
435                                   : SweepingType::kIncrementalAndConcurrent),
436       wrapper_descriptor_(wrapper_descriptor) {
437   CHECK_NE(WrapperDescriptor::kUnknownEmbedderId,
438            wrapper_descriptor_.embedder_id_for_garbage_collected);
439   // Enter no GC scope. `AttachIsolate()` removes this and allows triggering
440   // garbage collections.
441   no_gc_scope_++;
442   stats_collector()->RegisterObserver(this);
443 }
444 
~CppHeap()445 CppHeap::~CppHeap() {
446   if (isolate_) {
447     isolate_->heap()->DetachCppHeap();
448   }
449 }
450 
Terminate()451 void CppHeap::Terminate() {
452   // Must not be attached to a heap when invoking termination GCs.
453   CHECK(!isolate_);
454   // Gracefully terminate the C++ heap invoking destructors.
455   HeapBase::Terminate();
456 }
457 
AttachIsolate(Isolate * isolate)458 void CppHeap::AttachIsolate(Isolate* isolate) {
459   CHECK(!in_detached_testing_mode_);
460   CHECK_NULL(isolate_);
461   isolate_ = isolate;
462   static_cast<CppgcPlatformAdapter*>(platform())
463       ->SetIsolate(reinterpret_cast<v8::Isolate*>(isolate_));
464   if (isolate_->heap_profiler()) {
465     isolate_->heap_profiler()->AddBuildEmbedderGraphCallback(
466         &CppGraphBuilder::Run, this);
467   }
468   SetMetricRecorder(std::make_unique<MetricRecorderAdapter>(*this));
469   isolate_->global_handles()->SetStackStart(base::Stack::GetStackStart());
470   oom_handler().SetCustomHandler(&FatalOutOfMemoryHandlerImpl);
471   no_gc_scope_--;
472 }
473 
DetachIsolate()474 void CppHeap::DetachIsolate() {
475   // TODO(chromium:1056170): Investigate whether this can be enforced with a
476   // CHECK across all relevant embedders and setups.
477   if (!isolate_) return;
478 
479   // Delegate to existing EmbedderHeapTracer API to finish any ongoing garbage
480   // collection.
481   if (isolate_->heap()->incremental_marking()->IsMarking()) {
482     isolate_->heap()->FinalizeIncrementalMarkingAtomically(
483         i::GarbageCollectionReason::kExternalFinalize);
484   }
485   sweeper_.FinishIfRunning();
486 
487   auto* heap_profiler = isolate_->heap_profiler();
488   if (heap_profiler) {
489     heap_profiler->RemoveBuildEmbedderGraphCallback(&CppGraphBuilder::Run,
490                                                     this);
491   }
492   SetMetricRecorder(nullptr);
493   isolate_ = nullptr;
494   // Any future garbage collections will ignore the V8->C++ references.
495   oom_handler().SetCustomHandler(nullptr);
496   // Enter no GC scope.
497   no_gc_scope_++;
498 }
499 
500 namespace {
501 
IsMemoryReducingGC(CppHeap::GarbageCollectionFlags flags)502 bool IsMemoryReducingGC(CppHeap::GarbageCollectionFlags flags) {
503   return flags & CppHeap::GarbageCollectionFlagValues::kReduceMemory;
504 }
505 
IsForceGC(CppHeap::GarbageCollectionFlags flags)506 bool IsForceGC(CppHeap::GarbageCollectionFlags flags) {
507   return flags & CppHeap::GarbageCollectionFlagValues::kForced;
508 }
509 
ShouldReduceMemory(CppHeap::GarbageCollectionFlags flags)510 bool ShouldReduceMemory(CppHeap::GarbageCollectionFlags flags) {
511   return IsMemoryReducingGC(flags) || IsForceGC(flags);
512 }
513 
514 }  // namespace
515 
SelectMarkingType() const516 CppHeap::MarkingType CppHeap::SelectMarkingType() const {
517   // For now, force atomic marking for minor collections.
518   if (*collection_type_ == CollectionType::kMinor) return MarkingType::kAtomic;
519 
520   if (IsForceGC(current_gc_flags_) && !force_incremental_marking_for_testing_)
521     return MarkingType::kAtomic;
522 
523   return marking_support();
524 }
525 
SelectSweepingType() const526 CppHeap::SweepingType CppHeap::SelectSweepingType() const {
527   if (IsForceGC(current_gc_flags_)) return SweepingType::kAtomic;
528 
529   return sweeping_support();
530 }
531 
InitializeTracing(CollectionType collection_type,GarbageCollectionFlags gc_flags)532 void CppHeap::InitializeTracing(CollectionType collection_type,
533                                 GarbageCollectionFlags gc_flags) {
534   CHECK(!sweeper_.IsSweepingInProgress());
535 
536   // Check that previous cycle metrics for the same collection type have been
537   // reported.
538   if (GetMetricRecorder()) {
539     if (collection_type == CollectionType::kMajor)
540       DCHECK(!GetMetricRecorder()->FullGCMetricsReportPending());
541     else
542       DCHECK(!GetMetricRecorder()->YoungGCMetricsReportPending());
543   }
544 
545   DCHECK(!collection_type_);
546   collection_type_ = collection_type;
547 
548 #if defined(CPPGC_YOUNG_GENERATION)
549   if (*collection_type_ == CollectionType::kMajor)
550     cppgc::internal::SequentialUnmarker unmarker(raw_heap());
551 #endif  // defined(CPPGC_YOUNG_GENERATION)
552 
553   current_gc_flags_ = gc_flags;
554 
555   const UnifiedHeapMarker::MarkingConfig marking_config{
556       *collection_type_, StackState::kNoHeapPointers, SelectMarkingType(),
557       IsForceGC(current_gc_flags_)
558           ? UnifiedHeapMarker::MarkingConfig::IsForcedGC::kForced
559           : UnifiedHeapMarker::MarkingConfig::IsForcedGC::kNotForced};
560   DCHECK_IMPLIES(!isolate_,
561                  (MarkingType::kAtomic == marking_config.marking_type) ||
562                      force_incremental_marking_for_testing_);
563   if (ShouldReduceMemory(current_gc_flags_)) {
564     // Only enable compaction when in a memory reduction garbage collection as
565     // it may significantly increase the final garbage collection pause.
566     compactor_.InitializeIfShouldCompact(marking_config.marking_type,
567                                          marking_config.stack_state);
568   }
569   marker_ = std::make_unique<UnifiedHeapMarker>(
570       isolate_ ? isolate()->heap() : nullptr, AsBase(), platform_.get(),
571       marking_config);
572 }
573 
StartTracing()574 void CppHeap::StartTracing() {
575   if (isolate_) {
576     // Reuse the same local worklist for the mutator marking state which results
577     // in directly processing the objects by the JS logic. Also avoids
578     // publishing local objects.
579     static_cast<UnifiedHeapMarker*>(marker_.get())
580         ->GetMutatorUnifiedHeapMarkingState()
581         .Update(isolate_->heap()
582                     ->mark_compact_collector()
583                     ->local_marking_worklists());
584   }
585   marker_->StartMarking();
586   marking_done_ = false;
587 }
588 
AdvanceTracing(double max_duration)589 bool CppHeap::AdvanceTracing(double max_duration) {
590   is_in_v8_marking_step_ = true;
591   cppgc::internal::StatsCollector::EnabledScope stats_scope(
592       stats_collector(),
593       in_atomic_pause_ ? cppgc::internal::StatsCollector::kAtomicMark
594                        : cppgc::internal::StatsCollector::kIncrementalMark);
595   const v8::base::TimeDelta deadline =
596       in_atomic_pause_ ? v8::base::TimeDelta::Max()
597                        : v8::base::TimeDelta::FromMillisecondsD(max_duration);
598   const size_t marked_bytes_limit = in_atomic_pause_ ? SIZE_MAX : 0;
599   DCHECK_NOT_NULL(marker_);
600   // TODO(chromium:1056170): Replace when unified heap transitions to
601   // bytes-based deadline.
602   marking_done_ =
603       marker_->AdvanceMarkingWithLimits(deadline, marked_bytes_limit);
604   DCHECK_IMPLIES(in_atomic_pause_, marking_done_);
605   is_in_v8_marking_step_ = false;
606   return marking_done_;
607 }
608 
IsTracingDone()609 bool CppHeap::IsTracingDone() { return marking_done_; }
610 
EnterFinalPause(cppgc::EmbedderStackState stack_state)611 void CppHeap::EnterFinalPause(cppgc::EmbedderStackState stack_state) {
612   CHECK(!in_disallow_gc_scope());
613   in_atomic_pause_ = true;
614   marker_->EnterAtomicPause(stack_state);
615   if (isolate_ && *collection_type_ == CollectionType::kMinor) {
616     // Visit V8 -> cppgc references.
617     TraceV8ToCppGCReferences(isolate_,
618                              static_cast<UnifiedHeapMarker*>(marker_.get())
619                                  ->GetMutatorMarkingState(),
620                              wrapper_descriptor_);
621   }
622   compactor_.CancelIfShouldNotCompact(MarkingType::kAtomic, stack_state);
623 }
624 
FinishConcurrentMarkingIfNeeded()625 bool CppHeap::FinishConcurrentMarkingIfNeeded() {
626   return marker_->JoinConcurrentMarkingIfNeeded();
627 }
628 
TraceEpilogue()629 void CppHeap::TraceEpilogue() {
630   CHECK(in_atomic_pause_);
631   CHECK(marking_done_);
632   {
633     cppgc::subtle::DisallowGarbageCollectionScope disallow_gc_scope(*this);
634     marker_->LeaveAtomicPause();
635   }
636   marker_.reset();
637   if (isolate_) {
638     auto* tracer = isolate_->heap()->local_embedder_heap_tracer();
639     DCHECK_NOT_NULL(tracer);
640     tracer->UpdateRemoteStats(
641         stats_collector_->marked_bytes(),
642         stats_collector_->marking_time().InMillisecondsF());
643   }
644   // The allocated bytes counter in v8 was reset to the current marked bytes, so
645   // any pending allocated bytes updates should be discarded.
646   buffered_allocated_bytes_ = 0;
647   const size_t bytes_allocated_in_prefinalizers = ExecutePreFinalizers();
648 #if CPPGC_VERIFY_HEAP
649   UnifiedHeapMarkingVerifier verifier(*this, *collection_type_);
650   verifier.Run(stack_state_of_prev_gc(), stack_end_of_current_gc(),
651                stats_collector()->marked_bytes_on_current_cycle() +
652                    bytes_allocated_in_prefinalizers);
653 #endif  // CPPGC_VERIFY_HEAP
654   USE(bytes_allocated_in_prefinalizers);
655 
656 #if defined(CPPGC_YOUNG_GENERATION)
657   ResetRememberedSet();
658 #endif  // defined(CPPGC_YOUNG_GENERATION)
659 
660   {
661     cppgc::subtle::NoGarbageCollectionScope no_gc(*this);
662     cppgc::internal::Sweeper::SweepingConfig::CompactableSpaceHandling
663         compactable_space_handling = compactor_.CompactSpacesIfEnabled();
664     const cppgc::internal::Sweeper::SweepingConfig sweeping_config{
665         SelectSweepingType(), compactable_space_handling,
666         ShouldReduceMemory(current_gc_flags_)
667             ? cppgc::internal::Sweeper::SweepingConfig::FreeMemoryHandling::
668                   kDiscardWherePossible
669             : cppgc::internal::Sweeper::SweepingConfig::FreeMemoryHandling::
670                   kDoNotDiscard};
671     DCHECK_IMPLIES(!isolate_,
672                    SweepingType::kAtomic == sweeping_config.sweeping_type);
673     sweeper().Start(sweeping_config);
674   }
675   in_atomic_pause_ = false;
676   collection_type_.reset();
677   sweeper().NotifyDoneIfNeeded();
678 }
679 
RunMinorGC(StackState stack_state)680 void CppHeap::RunMinorGC(StackState stack_state) {
681   DCHECK(!sweeper_.IsSweepingInProgress());
682 
683   if (in_no_gc_scope()) return;
684   // Minor GC does not support nesting in full GCs.
685   if (IsMarking()) return;
686   // Minor GCs with the stack are currently not supported.
687   if (stack_state == StackState::kMayContainHeapPointers) return;
688 
689   // Notify GC tracer that CppGC started young GC cycle.
690   isolate_->heap()->tracer()->NotifyYoungCppGCRunning();
691 
692   SetStackEndOfCurrentGC(v8::base::Stack::GetCurrentStackPosition());
693 
694   // Perform an atomic GC, with starting incremental/concurrent marking and
695   // immediately finalizing the garbage collection.
696   InitializeTracing(CollectionType::kMinor,
697                     GarbageCollectionFlagValues::kNoFlags);
698   StartTracing();
699   // TODO(chromium:1029379): Should be safe to run without stack.
700   EnterFinalPause(cppgc::EmbedderStackState::kMayContainHeapPointers);
701   CHECK(AdvanceTracing(std::numeric_limits<double>::infinity()));
702   if (FinishConcurrentMarkingIfNeeded()) {
703     CHECK(AdvanceTracing(std::numeric_limits<double>::infinity()));
704   }
705   TraceEpilogue();
706 }
707 
AllocatedObjectSizeIncreased(size_t bytes)708 void CppHeap::AllocatedObjectSizeIncreased(size_t bytes) {
709   buffered_allocated_bytes_ += static_cast<int64_t>(bytes);
710   ReportBufferedAllocationSizeIfPossible();
711 }
712 
AllocatedObjectSizeDecreased(size_t bytes)713 void CppHeap::AllocatedObjectSizeDecreased(size_t bytes) {
714   buffered_allocated_bytes_ -= static_cast<int64_t>(bytes);
715   ReportBufferedAllocationSizeIfPossible();
716 }
717 
ReportBufferedAllocationSizeIfPossible()718 void CppHeap::ReportBufferedAllocationSizeIfPossible() {
719   // Avoid reporting to V8 in the following conditions as that may trigger GC
720   // finalizations where not allowed.
721   // - Recursive sweeping.
722   // - GC forbidden scope.
723   if (sweeper().IsSweepingOnMutatorThread() || in_no_gc_scope() || !isolate_) {
724     return;
725   }
726 
727   // The calls below may trigger full GCs that are synchronous and also execute
728   // epilogue callbacks. Since such callbacks may allocate, the counter must
729   // already be zeroed by that time.
730   const int64_t bytes_to_report = buffered_allocated_bytes_;
731   buffered_allocated_bytes_ = 0;
732 
733   auto* const tracer = isolate_->heap()->local_embedder_heap_tracer();
734   DCHECK_NOT_NULL(tracer);
735   if (bytes_to_report < 0) {
736     tracer->DecreaseAllocatedSize(static_cast<size_t>(-bytes_to_report));
737   } else {
738     tracer->IncreaseAllocatedSize(static_cast<size_t>(bytes_to_report));
739   }
740 }
741 
CollectGarbageForTesting(CollectionType collection_type,StackState stack_state)742 void CppHeap::CollectGarbageForTesting(CollectionType collection_type,
743                                        StackState stack_state) {
744   if (in_no_gc_scope()) return;
745 
746   // Finish sweeping in case it is still running.
747   sweeper().FinishIfRunning();
748 
749   SetStackEndOfCurrentGC(v8::base::Stack::GetCurrentStackPosition());
750 
751   if (isolate_) {
752     reinterpret_cast<v8::Isolate*>(isolate_)
753         ->RequestGarbageCollectionForTesting(
754             v8::Isolate::kFullGarbageCollection, stack_state);
755   } else {
756     // Perform an atomic GC, with starting incremental/concurrent marking and
757     // immediately finalizing the garbage collection.
758     if (!IsMarking()) {
759       InitializeTracing(collection_type, GarbageCollectionFlagValues::kForced);
760       StartTracing();
761     }
762     EnterFinalPause(stack_state);
763     CHECK(AdvanceTracing(std::numeric_limits<double>::infinity()));
764     if (FinishConcurrentMarkingIfNeeded()) {
765       CHECK(AdvanceTracing(std::numeric_limits<double>::infinity()));
766     }
767     TraceEpilogue();
768   }
769 }
770 
EnableDetachedGarbageCollectionsForTesting()771 void CppHeap::EnableDetachedGarbageCollectionsForTesting() {
772   CHECK(!in_detached_testing_mode_);
773   CHECK_NULL(isolate_);
774   no_gc_scope_--;
775   in_detached_testing_mode_ = true;
776   static_cast<CppgcPlatformAdapter*>(platform())
777       ->EnableDetachedModeForTesting();
778 }
779 
StartIncrementalGarbageCollectionForTesting()780 void CppHeap::StartIncrementalGarbageCollectionForTesting() {
781   DCHECK(!in_no_gc_scope());
782   DCHECK_NULL(isolate_);
783   if (IsMarking()) return;
784   force_incremental_marking_for_testing_ = true;
785   InitializeTracing(CollectionType::kMajor,
786                     GarbageCollectionFlagValues::kForced);
787   StartTracing();
788   force_incremental_marking_for_testing_ = false;
789 }
790 
FinalizeIncrementalGarbageCollectionForTesting(cppgc::EmbedderStackState stack_state)791 void CppHeap::FinalizeIncrementalGarbageCollectionForTesting(
792     cppgc::EmbedderStackState stack_state) {
793   DCHECK(!in_no_gc_scope());
794   DCHECK_NULL(isolate_);
795   DCHECK(IsMarking());
796   if (IsMarking()) {
797     CollectGarbageForTesting(CollectionType::kMajor, stack_state);
798   }
799   sweeper_.FinishIfRunning();
800 }
801 
802 namespace {
803 
ReportCustomSpaceStatistics(cppgc::internal::RawHeap & raw_heap,std::vector<cppgc::CustomSpaceIndex> custom_spaces,std::unique_ptr<CustomSpaceStatisticsReceiver> receiver)804 void ReportCustomSpaceStatistics(
805     cppgc::internal::RawHeap& raw_heap,
806     std::vector<cppgc::CustomSpaceIndex> custom_spaces,
807     std::unique_ptr<CustomSpaceStatisticsReceiver> receiver) {
808   for (auto custom_space_index : custom_spaces) {
809     const cppgc::internal::BaseSpace* space =
810         raw_heap.CustomSpace(custom_space_index);
811     size_t allocated_bytes = std::accumulate(
812         space->begin(), space->end(), 0, [](size_t sum, auto* page) {
813           return sum + page->AllocatedBytesAtLastGC();
814         });
815     receiver->AllocatedBytes(custom_space_index, allocated_bytes);
816   }
817 }
818 
819 class CollectCustomSpaceStatisticsAtLastGCTask final : public v8::Task {
820  public:
821   static constexpr v8::base::TimeDelta kTaskDelayMs =
822       v8::base::TimeDelta::FromMilliseconds(10);
823 
CollectCustomSpaceStatisticsAtLastGCTask(cppgc::internal::HeapBase & heap,std::vector<cppgc::CustomSpaceIndex> custom_spaces,std::unique_ptr<CustomSpaceStatisticsReceiver> receiver)824   CollectCustomSpaceStatisticsAtLastGCTask(
825       cppgc::internal::HeapBase& heap,
826       std::vector<cppgc::CustomSpaceIndex> custom_spaces,
827       std::unique_ptr<CustomSpaceStatisticsReceiver> receiver)
828       : heap_(heap),
829         custom_spaces_(std::move(custom_spaces)),
830         receiver_(std::move(receiver)) {}
831 
Run()832   void Run() final {
833     cppgc::internal::Sweeper& sweeper = heap_.sweeper();
834     if (sweeper.PerformSweepOnMutatorThread(
835             heap_.platform()->MonotonicallyIncreasingTime() +
836             kStepSizeMs.InSecondsF())) {
837       // Sweeping is done.
838       DCHECK(!sweeper.IsSweepingInProgress());
839       ReportCustomSpaceStatistics(heap_.raw_heap(), std::move(custom_spaces_),
840                                   std::move(receiver_));
841     } else {
842       heap_.platform()->GetForegroundTaskRunner()->PostDelayedTask(
843           std::make_unique<CollectCustomSpaceStatisticsAtLastGCTask>(
844               heap_, std::move(custom_spaces_), std::move(receiver_)),
845           kTaskDelayMs.InSecondsF());
846     }
847   }
848 
849  private:
850   static constexpr v8::base::TimeDelta kStepSizeMs =
851       v8::base::TimeDelta::FromMilliseconds(5);
852 
853   cppgc::internal::HeapBase& heap_;
854   std::vector<cppgc::CustomSpaceIndex> custom_spaces_;
855   std::unique_ptr<CustomSpaceStatisticsReceiver> receiver_;
856 };
857 
858 constexpr v8::base::TimeDelta
859     CollectCustomSpaceStatisticsAtLastGCTask::kTaskDelayMs;
860 constexpr v8::base::TimeDelta
861     CollectCustomSpaceStatisticsAtLastGCTask::kStepSizeMs;
862 
863 }  // namespace
864 
CollectCustomSpaceStatisticsAtLastGC(std::vector<cppgc::CustomSpaceIndex> custom_spaces,std::unique_ptr<CustomSpaceStatisticsReceiver> receiver)865 void CppHeap::CollectCustomSpaceStatisticsAtLastGC(
866     std::vector<cppgc::CustomSpaceIndex> custom_spaces,
867     std::unique_ptr<CustomSpaceStatisticsReceiver> receiver) {
868   if (sweeper().IsSweepingInProgress()) {
869     platform()->GetForegroundTaskRunner()->PostDelayedTask(
870         std::make_unique<CollectCustomSpaceStatisticsAtLastGCTask>(
871             AsBase(), std::move(custom_spaces), std::move(receiver)),
872         CollectCustomSpaceStatisticsAtLastGCTask::kTaskDelayMs.InSecondsF());
873     return;
874   }
875   ReportCustomSpaceStatistics(raw_heap(), std::move(custom_spaces),
876                               std::move(receiver));
877 }
878 
GetMetricRecorder() const879 CppHeap::MetricRecorderAdapter* CppHeap::GetMetricRecorder() const {
880   return static_cast<MetricRecorderAdapter*>(
881       stats_collector_->GetMetricRecorder());
882 }
883 
FinishSweepingIfRunning()884 void CppHeap::FinishSweepingIfRunning() { sweeper_.FinishIfRunning(); }
885 
FinishSweepingIfOutOfWork()886 void CppHeap::FinishSweepingIfOutOfWork() { sweeper_.FinishIfOutOfWork(); }
887 
CreateCppMarkingState()888 std::unique_ptr<CppMarkingState> CppHeap::CreateCppMarkingState() {
889   DCHECK(IsMarking());
890   return std::make_unique<CppMarkingState>(
891       isolate(), wrapper_descriptor_,
892       std::make_unique<cppgc::internal::MarkingStateBase>(
893           AsBase(),
894           static_cast<UnifiedHeapMarker*>(marker())->GetMarkingWorklists()));
895 }
896 
897 std::unique_ptr<CppMarkingState>
CreateCppMarkingStateForMutatorThread()898 CppHeap::CreateCppMarkingStateForMutatorThread() {
899   DCHECK(IsMarking());
900   return std::make_unique<CppMarkingState>(
901       isolate(), wrapper_descriptor_,
902       static_cast<UnifiedHeapMarker*>(marker())->GetMutatorMarkingState());
903 }
904 
PauseConcurrentMarkingScope(CppHeap * cpp_heap)905 CppHeap::PauseConcurrentMarkingScope::PauseConcurrentMarkingScope(
906     CppHeap* cpp_heap) {
907   if (cpp_heap && cpp_heap->marker()) {
908     pause_scope_.emplace(*cpp_heap->marker());
909   }
910 }
911 
912 }  // namespace internal
913 }  // namespace v8
914