• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/gc-tracer.h"
6 
7 #include <cstdarg>
8 
9 #include "include/v8-metrics.h"
10 #include "src/base/atomic-utils.h"
11 #include "src/base/strings.h"
12 #include "src/common/globals.h"
13 #include "src/execution/thread-id.h"
14 #include "src/heap/cppgc-js/cpp-heap.h"
15 #include "src/heap/cppgc/metric-recorder.h"
16 #include "src/heap/gc-tracer-inl.h"
17 #include "src/heap/heap-inl.h"
18 #include "src/heap/heap.h"
19 #include "src/heap/incremental-marking.h"
20 #include "src/heap/spaces.h"
21 #include "src/logging/counters.h"
22 #include "src/logging/metrics.h"
23 #include "src/logging/tracing-flags.h"
24 #include "src/tracing/tracing-category-observer.h"
25 
26 namespace v8 {
27 namespace internal {
28 
CountTotalHolesSize(Heap * heap)29 static size_t CountTotalHolesSize(Heap* heap) {
30   size_t holes_size = 0;
31   PagedSpaceIterator spaces(heap);
32   for (PagedSpace* space = spaces.Next(); space != nullptr;
33        space = spaces.Next()) {
34     DCHECK_GE(holes_size + space->Waste() + space->Available(), holes_size);
35     holes_size += space->Waste() + space->Available();
36   }
37   return holes_size;
38 }
39 
40 namespace {
41 std::atomic<CollectionEpoch> global_epoch{0};
42 
next_epoch()43 CollectionEpoch next_epoch() {
44   return global_epoch.fetch_add(1, std::memory_order_relaxed) + 1;
45 }
46 }  // namespace
47 
48 #if DEBUG
AssertMainThread()49 void GCTracer::Scope::AssertMainThread() {
50   Isolate* isolate = tracer_->heap_->isolate();
51   Isolate* shared_isolate = isolate->shared_isolate();
52   ThreadId thread_id = ThreadId::Current();
53 
54   // Either run on isolate's main thread or on the current main thread of the
55   // shared isolate during shared GCs.
56   DCHECK(isolate->thread_id() == thread_id ||
57          (shared_isolate && shared_isolate->thread_id() == thread_id));
58 }
59 #endif  // DEBUG
60 
Name(ScopeId id)61 const char* GCTracer::Scope::Name(ScopeId id) {
62 #define CASE(scope)  \
63   case Scope::scope: \
64     return "V8.GC_" #scope;
65   switch (id) {
66     TRACER_SCOPES(CASE)
67     TRACER_BACKGROUND_SCOPES(CASE)
68     case Scope::NUMBER_OF_SCOPES:
69       break;
70   }
71 #undef CASE
72   UNREACHABLE();
73 }
74 
NeedsYoungEpoch(ScopeId id)75 bool GCTracer::Scope::NeedsYoungEpoch(ScopeId id) {
76 #define CASE(scope)  \
77   case Scope::scope: \
78     return true;
79   switch (id) {
80     TRACER_YOUNG_EPOCH_SCOPES(CASE)
81     default:
82       return false;
83   }
84 #undef CASE
85   UNREACHABLE();
86 }
87 
Event(Type type,State state,GarbageCollectionReason gc_reason,const char * collector_reason)88 GCTracer::Event::Event(Type type, State state,
89                        GarbageCollectionReason gc_reason,
90                        const char* collector_reason)
91     : type(type),
92       state(state),
93       gc_reason(gc_reason),
94       collector_reason(collector_reason),
95       start_time(0.0),
96       end_time(0.0),
97       reduce_memory(false),
98       start_object_size(0),
99       end_object_size(0),
100       start_memory_size(0),
101       end_memory_size(0),
102       start_holes_size(0),
103       end_holes_size(0),
104       young_object_size(0),
105       survived_young_object_size(0),
106       incremental_marking_bytes(0),
107       incremental_marking_duration(0.0) {
108   for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
109     scopes[i] = 0;
110   }
111 }
112 
TypeName(bool short_name) const113 const char* GCTracer::Event::TypeName(bool short_name) const {
114   switch (type) {
115     case SCAVENGER:
116       return (short_name) ? "s" : "Scavenge";
117     case MARK_COMPACTOR:
118     case INCREMENTAL_MARK_COMPACTOR:
119       return (short_name) ? "ms" : "Mark-sweep";
120     case MINOR_MARK_COMPACTOR:
121       return (short_name) ? "mmc" : "Minor Mark-Compact";
122     case START:
123       return (short_name) ? "st" : "Start";
124   }
125   return "Unknown Event Type";
126 }
127 
RecordGCPhasesInfo(Heap * heap,GarbageCollector collector)128 GCTracer::RecordGCPhasesInfo::RecordGCPhasesInfo(Heap* heap,
129                                                  GarbageCollector collector) {
130   Counters* counters = heap->isolate()->counters();
131   const bool in_background = heap->isolate()->IsIsolateInBackground();
132   if (Heap::IsYoungGenerationCollector(collector)) {
133     mode = Mode::Scavenger;
134     type_timer = type_priority_timer = nullptr;
135   } else {
136     DCHECK_EQ(GarbageCollector::MARK_COMPACTOR, collector);
137     if (heap->incremental_marking()->IsStopped()) {
138       mode = Mode::None;
139       type_timer = counters->gc_compactor();
140       type_priority_timer = in_background ? counters->gc_compactor_background()
141                                           : counters->gc_compactor_foreground();
142     } else if (heap->ShouldReduceMemory()) {
143       mode = Mode::None;
144       type_timer = counters->gc_finalize_reduce_memory();
145       type_priority_timer =
146           in_background ? counters->gc_finalize_reduce_memory_background()
147                         : counters->gc_finalize_reduce_memory_foreground();
148     } else {
149       if (heap->incremental_marking()->IsMarking() &&
150           heap->incremental_marking()
151               ->local_marking_worklists()
152               ->IsPerContextMode()) {
153         mode = Mode::None;
154         type_timer = counters->gc_finalize_measure_memory();
155       } else {
156         mode = Mode::Finalize;
157         type_timer = counters->gc_finalize();
158       }
159       type_priority_timer = in_background ? counters->gc_finalize_background()
160                                           : counters->gc_finalize_foreground();
161     }
162   }
163 }
164 
GCTracer(Heap * heap)165 GCTracer::GCTracer(Heap* heap)
166     : heap_(heap),
167       current_(Event::START, Event::State::NOT_RUNNING,
168                GarbageCollectionReason::kUnknown, nullptr),
169       previous_(current_),
170       incremental_marking_bytes_(0),
171       incremental_marking_duration_(0.0),
172       incremental_marking_start_time_(0.0),
173       recorded_incremental_marking_speed_(0.0),
174       allocation_time_ms_(0.0),
175       new_space_allocation_counter_bytes_(0),
176       old_generation_allocation_counter_bytes_(0),
177       embedder_allocation_counter_bytes_(0),
178       allocation_duration_since_gc_(0.0),
179       new_space_allocation_in_bytes_since_gc_(0),
180       old_generation_allocation_in_bytes_since_gc_(0),
181       embedder_allocation_in_bytes_since_gc_(0),
182       combined_mark_compact_speed_cache_(0.0),
183       start_counter_(0),
184       average_mutator_duration_(0),
185       average_mark_compact_duration_(0),
186       current_mark_compact_mutator_utilization_(1.0),
187       previous_mark_compact_end_time_(0) {
188   // All accesses to incremental_marking_scope assume that incremental marking
189   // scopes come first.
190   STATIC_ASSERT(0 == Scope::FIRST_INCREMENTAL_SCOPE);
191   // We assume that MC_INCREMENTAL is the first scope so that we can properly
192   // map it to RuntimeCallStats.
193   STATIC_ASSERT(0 == Scope::MC_INCREMENTAL);
194   current_.end_time = MonotonicallyIncreasingTimeInMs();
195   for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
196     background_counter_[i].total_duration_ms = 0;
197   }
198 }
199 
ResetForTesting()200 void GCTracer::ResetForTesting() {
201   current_ = Event(Event::START, Event::State::NOT_RUNNING,
202                    GarbageCollectionReason::kTesting, nullptr);
203   current_.end_time = MonotonicallyIncreasingTimeInMs();
204   previous_ = current_;
205   start_of_observable_pause_ = 0.0;
206   notified_sweeping_completed_ = false;
207   notified_full_cppgc_completed_ = false;
208   notified_young_cppgc_completed_ = false;
209   notified_young_cppgc_running_ = false;
210   young_gc_while_full_gc_ = false;
211   ResetIncrementalMarkingCounters();
212   allocation_time_ms_ = 0.0;
213   new_space_allocation_counter_bytes_ = 0.0;
214   old_generation_allocation_counter_bytes_ = 0.0;
215   allocation_duration_since_gc_ = 0.0;
216   new_space_allocation_in_bytes_since_gc_ = 0.0;
217   old_generation_allocation_in_bytes_since_gc_ = 0.0;
218   combined_mark_compact_speed_cache_ = 0.0;
219   recorded_minor_gcs_total_.Reset();
220   recorded_minor_gcs_survived_.Reset();
221   recorded_compactions_.Reset();
222   recorded_mark_compacts_.Reset();
223   recorded_incremental_mark_compacts_.Reset();
224   recorded_new_generation_allocations_.Reset();
225   recorded_old_generation_allocations_.Reset();
226   recorded_embedder_generation_allocations_.Reset();
227   recorded_survival_ratios_.Reset();
228   start_counter_ = 0;
229   average_mutator_duration_ = 0;
230   average_mark_compact_duration_ = 0;
231   current_mark_compact_mutator_utilization_ = 1.0;
232   previous_mark_compact_end_time_ = 0;
233   base::MutexGuard guard(&background_counter_mutex_);
234   for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
235     background_counter_[i].total_duration_ms = 0;
236   }
237 }
238 
NotifyYoungGenerationHandling(YoungGenerationHandling young_generation_handling)239 void GCTracer::NotifyYoungGenerationHandling(
240     YoungGenerationHandling young_generation_handling) {
241   DCHECK_GE(1, start_counter_);
242   DCHECK_EQ(Event::SCAVENGER, current_.type);
243   heap_->isolate()->counters()->young_generation_handling()->AddSample(
244       static_cast<int>(young_generation_handling));
245 }
246 
StartObservablePause()247 void GCTracer::StartObservablePause() {
248   DCHECK_EQ(0, start_counter_);
249   start_counter_++;
250 
251   DCHECK(!IsInObservablePause());
252   start_of_observable_pause_ = MonotonicallyIncreasingTimeInMs();
253 }
254 
UpdateCurrentEvent(GarbageCollectionReason gc_reason,const char * collector_reason)255 void GCTracer::UpdateCurrentEvent(GarbageCollectionReason gc_reason,
256                                   const char* collector_reason) {
257   // For incremental marking, the event has already been created and we just
258   // need to update a few fields.
259   DCHECK_EQ(Event::INCREMENTAL_MARK_COMPACTOR, current_.type);
260   DCHECK_EQ(Event::State::ATOMIC, current_.state);
261   DCHECK(IsInObservablePause());
262   current_.gc_reason = gc_reason;
263   current_.collector_reason = collector_reason;
264   // TODO(chromium:1154636): The start_time of the current event contains
265   // currently the start time of the observable pause. This should be
266   // reconsidered.
267   current_.start_time = start_of_observable_pause_;
268   current_.reduce_memory = heap_->ShouldReduceMemory();
269 }
270 
StartCycle(GarbageCollector collector,GarbageCollectionReason gc_reason,const char * collector_reason,MarkingType marking)271 void GCTracer::StartCycle(GarbageCollector collector,
272                           GarbageCollectionReason gc_reason,
273                           const char* collector_reason, MarkingType marking) {
274   // We cannot start a new cycle while there's another one in its atomic pause.
275   DCHECK_NE(Event::State::ATOMIC, current_.state);
276   // We cannot start a new cycle while a young generation GC cycle has
277   // already interrupted a full GC cycle.
278   DCHECK(!young_gc_while_full_gc_);
279 
280   young_gc_while_full_gc_ = current_.state != Event::State::NOT_RUNNING;
281 
282   DCHECK_IMPLIES(young_gc_while_full_gc_,
283                  Heap::IsYoungGenerationCollector(collector) &&
284                      !Event::IsYoungGenerationEvent(current_.type));
285 
286   Event::Type type;
287   switch (collector) {
288     case GarbageCollector::SCAVENGER:
289       type = Event::SCAVENGER;
290       break;
291     case GarbageCollector::MINOR_MARK_COMPACTOR:
292       type = Event::MINOR_MARK_COMPACTOR;
293       break;
294     case GarbageCollector::MARK_COMPACTOR:
295       type = marking == MarkingType::kIncremental
296                  ? Event::INCREMENTAL_MARK_COMPACTOR
297                  : Event::MARK_COMPACTOR;
298       break;
299   }
300 
301   DCHECK_IMPLIES(!young_gc_while_full_gc_,
302                  current_.state == Event::State::NOT_RUNNING);
303   DCHECK_EQ(Event::State::NOT_RUNNING, previous_.state);
304 
305   previous_ = current_;
306   current_ = Event(type, Event::State::MARKING, gc_reason, collector_reason);
307 
308   switch (marking) {
309     case MarkingType::kAtomic:
310       DCHECK(IsInObservablePause());
311       // TODO(chromium:1154636): The start_time of the current event contains
312       // currently the start time of the observable pause. This should be
313       // reconsidered.
314       current_.start_time = start_of_observable_pause_;
315       current_.reduce_memory = heap_->ShouldReduceMemory();
316       break;
317     case MarkingType::kIncremental:
318       // The current event will be updated later.
319       DCHECK(!Heap::IsYoungGenerationCollector(collector));
320       DCHECK(!IsInObservablePause());
321       break;
322   }
323 
324   if (Heap::IsYoungGenerationCollector(collector)) {
325     epoch_young_ = next_epoch();
326   } else {
327     epoch_full_ = next_epoch();
328   }
329 }
330 
StartAtomicPause()331 void GCTracer::StartAtomicPause() {
332   DCHECK_EQ(Event::State::MARKING, current_.state);
333   current_.state = Event::State::ATOMIC;
334 }
335 
StartInSafepoint()336 void GCTracer::StartInSafepoint() {
337   SampleAllocation(current_.start_time, heap_->NewSpaceAllocationCounter(),
338                    heap_->OldGenerationAllocationCounter(),
339                    heap_->EmbedderAllocationCounter());
340 
341   current_.start_object_size = heap_->SizeOfObjects();
342   current_.start_memory_size = heap_->memory_allocator()->Size();
343   current_.start_holes_size = CountTotalHolesSize(heap_);
344   size_t new_space_size = (heap_->new_space() ? heap_->new_space()->Size() : 0);
345   size_t new_lo_space_size =
346       (heap_->new_lo_space() ? heap_->new_lo_space()->SizeOfObjects() : 0);
347   current_.young_object_size = new_space_size + new_lo_space_size;
348 }
349 
ResetIncrementalMarkingCounters()350 void GCTracer::ResetIncrementalMarkingCounters() {
351   incremental_marking_bytes_ = 0;
352   incremental_marking_duration_ = 0;
353   for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
354     incremental_scopes_[i].ResetCurrentCycle();
355   }
356 }
357 
StopInSafepoint()358 void GCTracer::StopInSafepoint() {
359   current_.end_object_size = heap_->SizeOfObjects();
360   current_.end_memory_size = heap_->memory_allocator()->Size();
361   current_.end_holes_size = CountTotalHolesSize(heap_);
362   current_.survived_young_object_size = heap_->SurvivedYoungObjectSize();
363 }
364 
StopObservablePause()365 void GCTracer::StopObservablePause() {
366   start_counter_--;
367   DCHECK_EQ(0, start_counter_);
368 
369   DCHECK(IsInObservablePause());
370   start_of_observable_pause_ = 0.0;
371 
372   // TODO(chromium:1154636): The end_time of the current event contains
373   // currently the end time of the observable pause. This should be
374   // reconsidered.
375   current_.end_time = MonotonicallyIncreasingTimeInMs();
376 }
377 
UpdateStatistics(GarbageCollector collector)378 void GCTracer::UpdateStatistics(GarbageCollector collector) {
379   const bool is_young = Heap::IsYoungGenerationCollector(collector);
380   DCHECK(IsConsistentWithCollector(collector));
381 
382   AddAllocation(current_.end_time);
383 
384   double duration = current_.end_time - current_.start_time;
385   int64_t duration_us =
386       static_cast<int64_t>(duration * base::Time::kMicrosecondsPerMillisecond);
387   auto* long_task_stats = heap_->isolate()->GetCurrentLongTaskStats();
388 
389   if (is_young) {
390     recorded_minor_gcs_total_.Push(
391         MakeBytesAndDuration(current_.young_object_size, duration));
392     recorded_minor_gcs_survived_.Push(
393         MakeBytesAndDuration(current_.survived_young_object_size, duration));
394     long_task_stats->gc_young_wall_clock_duration_us += duration_us;
395   } else {
396     if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
397       RecordIncrementalMarkingSpeed(incremental_marking_bytes_,
398                                     incremental_marking_duration_);
399       recorded_incremental_mark_compacts_.Push(
400           MakeBytesAndDuration(current_.end_object_size, duration));
401     } else {
402       recorded_mark_compacts_.Push(
403           MakeBytesAndDuration(current_.end_object_size, duration));
404     }
405     RecordMutatorUtilization(current_.end_time,
406                              duration + incremental_marking_duration_);
407     RecordGCSumCounters();
408     combined_mark_compact_speed_cache_ = 0.0;
409     long_task_stats->gc_full_atomic_wall_clock_duration_us += duration_us;
410   }
411 
412   heap_->UpdateTotalGCTime(duration);
413 
414   if (FLAG_trace_gc_ignore_scavenger && is_young) return;
415 
416   if (FLAG_trace_gc_nvp) {
417     PrintNVP();
418   } else {
419     Print();
420   }
421 
422   if (FLAG_trace_gc) {
423     heap_->PrintShortHeapStatistics();
424   }
425 
426   if (V8_UNLIKELY(TracingFlags::gc.load(std::memory_order_relaxed) &
427                   v8::tracing::TracingCategoryObserver::ENABLED_BY_TRACING)) {
428     std::stringstream heap_stats;
429     heap_->DumpJSONHeapStatistics(heap_stats);
430 
431     TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("v8.gc"), "V8.GC_Heap_Stats",
432                          TRACE_EVENT_SCOPE_THREAD, "stats",
433                          TRACE_STR_COPY(heap_stats.str().c_str()));
434   }
435 }
436 
FinalizeCurrentEvent()437 void GCTracer::FinalizeCurrentEvent() {
438   const bool is_young = Event::IsYoungGenerationEvent(current_.type);
439 
440   if (is_young) {
441     FetchBackgroundMinorGCCounters();
442   } else {
443     if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
444       current_.incremental_marking_bytes = incremental_marking_bytes_;
445       current_.incremental_marking_duration = incremental_marking_duration_;
446       for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
447         current_.incremental_scopes[i] = incremental_scopes_[i];
448         current_.scopes[i] = incremental_scopes_[i].duration;
449       }
450       ResetIncrementalMarkingCounters();
451     } else {
452       DCHECK_EQ(0u, incremental_marking_bytes_);
453       DCHECK_EQ(0.0, incremental_marking_duration_);
454       DCHECK_EQ(0u, current_.incremental_marking_bytes);
455       DCHECK_EQ(0.0, current_.incremental_marking_duration);
456     }
457     FetchBackgroundMarkCompactCounters();
458   }
459   FetchBackgroundGeneralCounters();
460 }
461 
StopAtomicPause()462 void GCTracer::StopAtomicPause() {
463   DCHECK_EQ(Event::State::ATOMIC, current_.state);
464   current_.state = Event::State::SWEEPING;
465 }
466 
StopCycle(GarbageCollector collector)467 void GCTracer::StopCycle(GarbageCollector collector) {
468   DCHECK_EQ(Event::State::SWEEPING, current_.state);
469   current_.state = Event::State::NOT_RUNNING;
470 
471   DCHECK(IsConsistentWithCollector(collector));
472   FinalizeCurrentEvent();
473 
474   if (Heap::IsYoungGenerationCollector(collector)) {
475     ReportYoungCycleToRecorder();
476 
477     // If a young generation GC interrupted an unfinished full GC cycle, restore
478     // the event corresponding to the full GC cycle.
479     if (young_gc_while_full_gc_) {
480       std::swap(current_, previous_);
481       young_gc_while_full_gc_ = false;
482     }
483   } else {
484     ReportFullCycleToRecorder();
485 
486     heap_->isolate()->counters()->mark_compact_reason()->AddSample(
487         static_cast<int>(current_.gc_reason));
488 
489     if (FLAG_trace_gc_freelists) {
490       PrintIsolate(heap_->isolate(),
491                    "FreeLists statistics before collection:\n");
492       heap_->PrintFreeListsStats();
493     }
494   }
495 }
496 
StopFullCycleIfNeeded()497 void GCTracer::StopFullCycleIfNeeded() {
498   if (current_.state != Event::State::SWEEPING) return;
499   if (!notified_sweeping_completed_) return;
500   if (heap_->cpp_heap() && !notified_full_cppgc_completed_) return;
501   StopCycle(GarbageCollector::MARK_COMPACTOR);
502   notified_sweeping_completed_ = false;
503   notified_full_cppgc_completed_ = false;
504 }
505 
StopYoungCycleIfNeeded()506 void GCTracer::StopYoungCycleIfNeeded() {
507   // We rely here on the fact that young GCs in V8 are atomic and by the time
508   // this is called, the Scavenger or Minor MC has already finished.
509   DCHECK(Event::IsYoungGenerationEvent(current_.type));
510   if (current_.state != Event::State::SWEEPING) return;
511   // Check if young cppgc was scheduled but hasn't completed yet.
512   if (heap_->cpp_heap() && notified_young_cppgc_running_ &&
513       !notified_young_cppgc_completed_)
514     return;
515   StopCycle(current_.type == Event::SCAVENGER
516                 ? GarbageCollector::SCAVENGER
517                 : GarbageCollector::MINOR_MARK_COMPACTOR);
518   notified_young_cppgc_running_ = false;
519   notified_young_cppgc_completed_ = false;
520 }
521 
NotifySweepingCompleted()522 void GCTracer::NotifySweepingCompleted() {
523 #ifdef VERIFY_HEAP
524   // If heap verification is enabled, sweeping finalization can also be
525   // triggered from inside a full GC cycle's atomic pause.
526   DCHECK((current_.type == Event::MARK_COMPACTOR ||
527           current_.type == Event::INCREMENTAL_MARK_COMPACTOR) &&
528          (current_.state == Event::State::SWEEPING ||
529           (FLAG_verify_heap && current_.state == Event::State::ATOMIC)));
530 #else
531   DCHECK(IsSweepingInProgress());
532 #endif
533 
534   // Stop a full GC cycle only when both v8 and cppgc (if available) GCs have
535   // finished sweeping. This method is invoked by v8.
536   if (FLAG_trace_gc_freelists) {
537     PrintIsolate(heap_->isolate(),
538                  "FreeLists statistics after sweeping completed:\n");
539     heap_->PrintFreeListsStats();
540   }
541   if (FLAG_trace_allocations_origins) {
542     heap_->new_space()->PrintAllocationsOrigins();
543     heap_->old_space()->PrintAllocationsOrigins();
544     heap_->code_space()->PrintAllocationsOrigins();
545     heap_->map_space()->PrintAllocationsOrigins();
546   }
547   DCHECK(!notified_sweeping_completed_);
548   notified_sweeping_completed_ = true;
549   StopFullCycleIfNeeded();
550 }
551 
NotifyFullCppGCCompleted()552 void GCTracer::NotifyFullCppGCCompleted() {
553   // Stop a full GC cycle only when both v8 and cppgc (if available) GCs have
554   // finished sweeping. This method is invoked by cppgc.
555   DCHECK(heap_->cpp_heap());
556   const auto* metric_recorder =
557       CppHeap::From(heap_->cpp_heap())->GetMetricRecorder();
558   USE(metric_recorder);
559   DCHECK(metric_recorder->FullGCMetricsReportPending());
560   DCHECK(!notified_full_cppgc_completed_);
561   notified_full_cppgc_completed_ = true;
562   StopFullCycleIfNeeded();
563 }
564 
NotifyYoungCppGCCompleted()565 void GCTracer::NotifyYoungCppGCCompleted() {
566   // Stop a young GC cycle only when both v8 and cppgc (if available) GCs have
567   // finished sweeping. This method is invoked by cppgc.
568   DCHECK(heap_->cpp_heap());
569   DCHECK(notified_young_cppgc_running_);
570   const auto* metric_recorder =
571       CppHeap::From(heap_->cpp_heap())->GetMetricRecorder();
572   USE(metric_recorder);
573   DCHECK(metric_recorder->YoungGCMetricsReportPending());
574   DCHECK(!notified_young_cppgc_completed_);
575   notified_young_cppgc_completed_ = true;
576   StopYoungCycleIfNeeded();
577 }
578 
NotifyYoungCppGCRunning()579 void GCTracer::NotifyYoungCppGCRunning() {
580   DCHECK(!notified_young_cppgc_running_);
581   notified_young_cppgc_running_ = true;
582 }
583 
SampleAllocation(double current_ms,size_t new_space_counter_bytes,size_t old_generation_counter_bytes,size_t embedder_counter_bytes)584 void GCTracer::SampleAllocation(double current_ms,
585                                 size_t new_space_counter_bytes,
586                                 size_t old_generation_counter_bytes,
587                                 size_t embedder_counter_bytes) {
588   if (allocation_time_ms_ == 0) {
589     // It is the first sample.
590     allocation_time_ms_ = current_ms;
591     new_space_allocation_counter_bytes_ = new_space_counter_bytes;
592     old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
593     embedder_allocation_counter_bytes_ = embedder_counter_bytes;
594     return;
595   }
596   // This assumes that counters are unsigned integers so that the subtraction
597   // below works even if the new counter is less than the old counter.
598   size_t new_space_allocated_bytes =
599       new_space_counter_bytes - new_space_allocation_counter_bytes_;
600   size_t old_generation_allocated_bytes =
601       old_generation_counter_bytes - old_generation_allocation_counter_bytes_;
602   size_t embedder_allocated_bytes =
603       embedder_counter_bytes - embedder_allocation_counter_bytes_;
604   double duration = current_ms - allocation_time_ms_;
605   allocation_time_ms_ = current_ms;
606   new_space_allocation_counter_bytes_ = new_space_counter_bytes;
607   old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
608   embedder_allocation_counter_bytes_ = embedder_counter_bytes;
609   allocation_duration_since_gc_ += duration;
610   new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes;
611   old_generation_allocation_in_bytes_since_gc_ +=
612       old_generation_allocated_bytes;
613   embedder_allocation_in_bytes_since_gc_ += embedder_allocated_bytes;
614 }
615 
AddAllocation(double current_ms)616 void GCTracer::AddAllocation(double current_ms) {
617   allocation_time_ms_ = current_ms;
618   if (allocation_duration_since_gc_ > 0) {
619     recorded_new_generation_allocations_.Push(
620         MakeBytesAndDuration(new_space_allocation_in_bytes_since_gc_,
621                              allocation_duration_since_gc_));
622     recorded_old_generation_allocations_.Push(
623         MakeBytesAndDuration(old_generation_allocation_in_bytes_since_gc_,
624                              allocation_duration_since_gc_));
625     recorded_embedder_generation_allocations_.Push(MakeBytesAndDuration(
626         embedder_allocation_in_bytes_since_gc_, allocation_duration_since_gc_));
627   }
628   allocation_duration_since_gc_ = 0;
629   new_space_allocation_in_bytes_since_gc_ = 0;
630   old_generation_allocation_in_bytes_since_gc_ = 0;
631   embedder_allocation_in_bytes_since_gc_ = 0;
632 }
633 
AddCompactionEvent(double duration,size_t live_bytes_compacted)634 void GCTracer::AddCompactionEvent(double duration,
635                                   size_t live_bytes_compacted) {
636   recorded_compactions_.Push(
637       MakeBytesAndDuration(live_bytes_compacted, duration));
638 }
639 
AddSurvivalRatio(double promotion_ratio)640 void GCTracer::AddSurvivalRatio(double promotion_ratio) {
641   recorded_survival_ratios_.Push(promotion_ratio);
642 }
643 
AddIncrementalMarkingStep(double duration,size_t bytes)644 void GCTracer::AddIncrementalMarkingStep(double duration, size_t bytes) {
645   if (bytes > 0) {
646     incremental_marking_bytes_ += bytes;
647     incremental_marking_duration_ += duration;
648   }
649   ReportIncrementalMarkingStepToRecorder(duration);
650 }
651 
AddIncrementalSweepingStep(double duration)652 void GCTracer::AddIncrementalSweepingStep(double duration) {
653   ReportIncrementalSweepingStepToRecorder(duration);
654 }
655 
Output(const char * format,...) const656 void GCTracer::Output(const char* format, ...) const {
657   if (FLAG_trace_gc) {
658     va_list arguments;
659     va_start(arguments, format);
660     base::OS::VPrint(format, arguments);
661     va_end(arguments);
662   }
663 
664   const int kBufferSize = 256;
665   char raw_buffer[kBufferSize];
666   base::Vector<char> buffer(raw_buffer, kBufferSize);
667   va_list arguments2;
668   va_start(arguments2, format);
669   base::VSNPrintF(buffer, format, arguments2);
670   va_end(arguments2);
671 
672   heap_->AddToRingBuffer(buffer.begin());
673 }
674 
Print() const675 void GCTracer::Print() const {
676   double duration = current_.end_time - current_.start_time;
677   const size_t kIncrementalStatsSize = 128;
678   char incremental_buffer[kIncrementalStatsSize] = {0};
679 
680   if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
681     base::OS::SNPrintF(
682         incremental_buffer, kIncrementalStatsSize,
683         " (+ %.1f ms in %d steps since start of marking, "
684         "biggest step %.1f ms, walltime since start of marking %.f ms)",
685         current_scope(Scope::MC_INCREMENTAL),
686         incremental_scope(Scope::MC_INCREMENTAL).steps,
687         incremental_scope(Scope::MC_INCREMENTAL).longest_step,
688         current_.end_time - incremental_marking_start_time_);
689   }
690 
691   const double total_external_time =
692       current_scope(Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES) +
693       current_scope(Scope::HEAP_EXTERNAL_EPILOGUE) +
694       current_scope(Scope::HEAP_EXTERNAL_PROLOGUE) +
695       current_scope(Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE) +
696       current_scope(Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE);
697 
698   // Avoid PrintF as Output also appends the string to the tracing ring buffer
699   // that gets printed on OOM failures.
700   Output(
701       "[%d:%p] "
702       "%8.0f ms: "
703       "%s%s%s %.1f (%.1f) -> %.1f (%.1f) MB, "
704       "%.1f / %.1f ms %s (average mu = %.3f, current mu = %.3f) %s; %s\n",
705       base::OS::GetCurrentProcessId(),
706       reinterpret_cast<void*>(heap_->isolate()),
707       heap_->isolate()->time_millis_since_init(),
708       heap_->IsShared() ? "Shared " : "", current_.TypeName(false),
709       current_.reduce_memory ? " (reduce)" : "",
710       static_cast<double>(current_.start_object_size) / MB,
711       static_cast<double>(current_.start_memory_size) / MB,
712       static_cast<double>(current_.end_object_size) / MB,
713       static_cast<double>(current_.end_memory_size) / MB, duration,
714       total_external_time, incremental_buffer,
715       AverageMarkCompactMutatorUtilization(),
716       CurrentMarkCompactMutatorUtilization(),
717       Heap::GarbageCollectionReasonToString(current_.gc_reason),
718       current_.collector_reason != nullptr ? current_.collector_reason : "");
719 }
720 
PrintNVP() const721 void GCTracer::PrintNVP() const {
722   double duration = current_.end_time - current_.start_time;
723   double spent_in_mutator = current_.start_time - previous_.end_time;
724   size_t allocated_since_last_gc =
725       current_.start_object_size - previous_.end_object_size;
726 
727   double incremental_walltime_duration = 0;
728 
729   if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
730     incremental_walltime_duration =
731         current_.end_time - incremental_marking_start_time_;
732   }
733 
734   // Avoid data races when printing the background scopes.
735   base::MutexGuard guard(&background_counter_mutex_);
736 
737   switch (current_.type) {
738     case Event::SCAVENGER:
739       heap_->isolate()->PrintWithTimestamp(
740           "pause=%.1f "
741           "mutator=%.1f "
742           "gc=%s "
743           "reduce_memory=%d "
744           "time_to_safepoint=%.2f "
745           "heap.prologue=%.2f "
746           "heap.epilogue=%.2f "
747           "heap.epilogue.reduce_new_space=%.2f "
748           "heap.external.prologue=%.2f "
749           "heap.external.epilogue=%.2f "
750           "heap.external_weak_global_handles=%.2f "
751           "fast_promote=%.2f "
752           "complete.sweep_array_buffers=%.2f "
753           "scavenge=%.2f "
754           "scavenge.free_remembered_set=%.2f "
755           "scavenge.roots=%.2f "
756           "scavenge.weak=%.2f "
757           "scavenge.weak_global_handles.identify=%.2f "
758           "scavenge.weak_global_handles.process=%.2f "
759           "scavenge.parallel=%.2f "
760           "scavenge.update_refs=%.2f "
761           "scavenge.sweep_array_buffers=%.2f "
762           "background.scavenge.parallel=%.2f "
763           "background.unmapper=%.2f "
764           "unmapper=%.2f "
765           "incremental.steps_count=%d "
766           "incremental.steps_took=%.1f "
767           "scavenge_throughput=%.f "
768           "total_size_before=%zu "
769           "total_size_after=%zu "
770           "holes_size_before=%zu "
771           "holes_size_after=%zu "
772           "allocated=%zu "
773           "promoted=%zu "
774           "semi_space_copied=%zu "
775           "nodes_died_in_new=%d "
776           "nodes_copied_in_new=%d "
777           "nodes_promoted=%d "
778           "promotion_ratio=%.1f%% "
779           "average_survival_ratio=%.1f%% "
780           "promotion_rate=%.1f%% "
781           "semi_space_copy_rate=%.1f%% "
782           "new_space_allocation_throughput=%.1f "
783           "unmapper_chunks=%d\n",
784           duration, spent_in_mutator, current_.TypeName(true),
785           current_.reduce_memory, current_.scopes[Scope::TIME_TO_SAFEPOINT],
786           current_scope(Scope::HEAP_PROLOGUE),
787           current_scope(Scope::HEAP_EPILOGUE),
788           current_scope(Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE),
789           current_scope(Scope::HEAP_EXTERNAL_PROLOGUE),
790           current_scope(Scope::HEAP_EXTERNAL_EPILOGUE),
791           current_scope(Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES),
792           current_scope(Scope::SCAVENGER_FAST_PROMOTE),
793           current_scope(Scope::SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS),
794           current_scope(Scope::SCAVENGER_SCAVENGE),
795           current_scope(Scope::SCAVENGER_FREE_REMEMBERED_SET),
796           current_scope(Scope::SCAVENGER_SCAVENGE_ROOTS),
797           current_scope(Scope::SCAVENGER_SCAVENGE_WEAK),
798           current_scope(Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_IDENTIFY),
799           current_scope(Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_PROCESS),
800           current_scope(Scope::SCAVENGER_SCAVENGE_PARALLEL),
801           current_scope(Scope::SCAVENGER_SCAVENGE_UPDATE_REFS),
802           current_scope(Scope::SCAVENGER_SWEEP_ARRAY_BUFFERS),
803           current_scope(Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL),
804           current_scope(Scope::BACKGROUND_UNMAPPER),
805           current_scope(Scope::UNMAPPER),
806           incremental_scope(GCTracer::Scope::MC_INCREMENTAL).steps,
807           current_scope(Scope::MC_INCREMENTAL),
808           ScavengeSpeedInBytesPerMillisecond(), current_.start_object_size,
809           current_.end_object_size, current_.start_holes_size,
810           current_.end_holes_size, allocated_since_last_gc,
811           heap_->promoted_objects_size(),
812           heap_->semi_space_copied_object_size(),
813           heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
814           heap_->nodes_promoted_, heap_->promotion_ratio_,
815           AverageSurvivalRatio(), heap_->promotion_rate_,
816           heap_->semi_space_copied_rate_,
817           NewSpaceAllocationThroughputInBytesPerMillisecond(),
818           heap_->memory_allocator()->unmapper()->NumberOfChunks());
819       break;
820     case Event::MINOR_MARK_COMPACTOR:
821       heap_->isolate()->PrintWithTimestamp(
822           "pause=%.1f "
823           "mutator=%.1f "
824           "gc=%s "
825           "reduce_memory=%d "
826           "minor_mc=%.2f "
827           "finish_sweeping=%.2f "
828           "time_to_safepoint=%.2f "
829           "mark=%.2f "
830           "mark.seed=%.2f "
831           "mark.roots=%.2f "
832           "mark.weak=%.2f "
833           "mark.global_handles=%.2f "
834           "clear=%.2f "
835           "clear.string_table=%.2f "
836           "clear.weak_lists=%.2f "
837           "evacuate=%.2f "
838           "evacuate.copy=%.2f "
839           "evacuate.update_pointers=%.2f "
840           "evacuate.update_pointers.to_new_roots=%.2f "
841           "evacuate.update_pointers.slots=%.2f "
842           "background.mark=%.2f "
843           "background.evacuate.copy=%.2f "
844           "background.evacuate.update_pointers=%.2f "
845           "background.unmapper=%.2f "
846           "unmapper=%.2f "
847           "update_marking_deque=%.2f "
848           "reset_liveness=%.2f\n",
849           duration, spent_in_mutator, "mmc", current_.reduce_memory,
850           current_scope(Scope::MINOR_MC),
851           current_scope(Scope::MINOR_MC_SWEEPING),
852           current_scope(Scope::TIME_TO_SAFEPOINT),
853           current_scope(Scope::MINOR_MC_MARK),
854           current_scope(Scope::MINOR_MC_MARK_SEED),
855           current_scope(Scope::MINOR_MC_MARK_ROOTS),
856           current_scope(Scope::MINOR_MC_MARK_WEAK),
857           current_scope(Scope::MINOR_MC_MARK_GLOBAL_HANDLES),
858           current_scope(Scope::MINOR_MC_CLEAR),
859           current_scope(Scope::MINOR_MC_CLEAR_STRING_TABLE),
860           current_scope(Scope::MINOR_MC_CLEAR_WEAK_LISTS),
861           current_scope(Scope::MINOR_MC_EVACUATE),
862           current_scope(Scope::MINOR_MC_EVACUATE_COPY),
863           current_scope(Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS),
864           current_scope(Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS),
865           current_scope(Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS),
866           current_scope(Scope::MINOR_MC_BACKGROUND_MARKING),
867           current_scope(Scope::MINOR_MC_BACKGROUND_EVACUATE_COPY),
868           current_scope(Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS),
869           current_scope(Scope::BACKGROUND_UNMAPPER),
870           current_scope(Scope::UNMAPPER),
871           current_scope(Scope::MINOR_MC_MARKING_DEQUE),
872           current_scope(Scope::MINOR_MC_RESET_LIVENESS));
873       break;
874     case Event::MARK_COMPACTOR:
875     case Event::INCREMENTAL_MARK_COMPACTOR:
876       heap_->isolate()->PrintWithTimestamp(
877           "pause=%.1f "
878           "mutator=%.1f "
879           "gc=%s "
880           "reduce_memory=%d "
881           "time_to_safepoint=%.2f "
882           "heap.prologue=%.2f "
883           "heap.embedder_tracing_epilogue=%.2f "
884           "heap.epilogue=%.2f "
885           "heap.epilogue.reduce_new_space=%.2f "
886           "heap.external.prologue=%.1f "
887           "heap.external.epilogue=%.1f "
888           "heap.external.weak_global_handles=%.1f "
889           "clear=%1.f "
890           "clear.dependent_code=%.1f "
891           "clear.maps=%.1f "
892           "clear.slots_buffer=%.1f "
893           "clear.string_table=%.1f "
894           "clear.weak_collections=%.1f "
895           "clear.weak_lists=%.1f "
896           "clear.weak_references=%.1f "
897           "complete.sweep_array_buffers=%.1f "
898           "epilogue=%.1f "
899           "evacuate=%.1f "
900           "evacuate.candidates=%.1f "
901           "evacuate.clean_up=%.1f "
902           "evacuate.copy=%.1f "
903           "evacuate.prologue=%.1f "
904           "evacuate.epilogue=%.1f "
905           "evacuate.rebalance=%.1f "
906           "evacuate.update_pointers=%.1f "
907           "evacuate.update_pointers.to_new_roots=%.1f "
908           "evacuate.update_pointers.slots.main=%.1f "
909           "evacuate.update_pointers.weak=%.1f "
910           "finish=%.1f "
911           "finish.sweep_array_buffers=%.1f "
912           "mark=%.1f "
913           "mark.finish_incremental=%.1f "
914           "mark.roots=%.1f "
915           "mark.main=%.1f "
916           "mark.weak_closure=%.1f "
917           "mark.weak_closure.ephemeron=%.1f "
918           "mark.weak_closure.ephemeron.marking=%.1f "
919           "mark.weak_closure.ephemeron.linear=%.1f "
920           "mark.weak_closure.weak_handles=%.1f "
921           "mark.weak_closure.weak_roots=%.1f "
922           "mark.weak_closure.harmony=%.1f "
923           "mark.embedder_prologue=%.1f "
924           "mark.embedder_tracing=%.1f "
925           "prologue=%.1f "
926           "sweep=%.1f "
927           "sweep.code=%.1f "
928           "sweep.map=%.1f "
929           "sweep.old=%.1f "
930           "incremental=%.1f "
931           "incremental.finalize=%.1f "
932           "incremental.finalize.body=%.1f "
933           "incremental.finalize.external.prologue=%.1f "
934           "incremental.finalize.external.epilogue=%.1f "
935           "incremental.layout_change=%.1f "
936           "incremental.sweep_array_buffers=%.1f "
937           "incremental.sweeping=%.1f "
938           "incremental.embedder_prologue=%.1f "
939           "incremental.embedder_tracing=%.1f "
940           "incremental_wrapper_tracing_longest_step=%.1f "
941           "incremental_finalize_longest_step=%.1f "
942           "incremental_finalize_steps_count=%d "
943           "incremental_longest_step=%.1f "
944           "incremental_steps_count=%d "
945           "incremental_marking_throughput=%.f "
946           "incremental_walltime_duration=%.f "
947           "background.mark=%.1f "
948           "background.sweep=%.1f "
949           "background.evacuate.copy=%.1f "
950           "background.evacuate.update_pointers=%.1f "
951           "background.unmapper=%.1f "
952           "unmapper=%.1f "
953           "total_size_before=%zu "
954           "total_size_after=%zu "
955           "holes_size_before=%zu "
956           "holes_size_after=%zu "
957           "allocated=%zu "
958           "promoted=%zu "
959           "semi_space_copied=%zu "
960           "nodes_died_in_new=%d "
961           "nodes_copied_in_new=%d "
962           "nodes_promoted=%d "
963           "promotion_ratio=%.1f%% "
964           "average_survival_ratio=%.1f%% "
965           "promotion_rate=%.1f%% "
966           "semi_space_copy_rate=%.1f%% "
967           "new_space_allocation_throughput=%.1f "
968           "unmapper_chunks=%d "
969           "compaction_speed=%.f\n",
970           duration, spent_in_mutator, current_.TypeName(true),
971           current_.reduce_memory, current_scope(Scope::TIME_TO_SAFEPOINT),
972           current_scope(Scope::HEAP_PROLOGUE),
973           current_scope(Scope::HEAP_EMBEDDER_TRACING_EPILOGUE),
974           current_scope(Scope::HEAP_EPILOGUE),
975           current_scope(Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE),
976           current_scope(Scope::HEAP_EXTERNAL_PROLOGUE),
977           current_scope(Scope::HEAP_EXTERNAL_EPILOGUE),
978           current_scope(Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES),
979           current_scope(Scope::MC_CLEAR),
980           current_scope(Scope::MC_CLEAR_DEPENDENT_CODE),
981           current_scope(Scope::MC_CLEAR_MAPS),
982           current_scope(Scope::MC_CLEAR_SLOTS_BUFFER),
983           current_scope(Scope::MC_CLEAR_STRING_TABLE),
984           current_scope(Scope::MC_CLEAR_WEAK_COLLECTIONS),
985           current_scope(Scope::MC_CLEAR_WEAK_LISTS),
986           current_scope(Scope::MC_CLEAR_WEAK_REFERENCES),
987           current_scope(Scope::MC_COMPLETE_SWEEP_ARRAY_BUFFERS),
988           current_scope(Scope::MC_EPILOGUE), current_scope(Scope::MC_EVACUATE),
989           current_scope(Scope::MC_EVACUATE_CANDIDATES),
990           current_scope(Scope::MC_EVACUATE_CLEAN_UP),
991           current_scope(Scope::MC_EVACUATE_COPY),
992           current_scope(Scope::MC_EVACUATE_PROLOGUE),
993           current_scope(Scope::MC_EVACUATE_EPILOGUE),
994           current_scope(Scope::MC_EVACUATE_REBALANCE),
995           current_scope(Scope::MC_EVACUATE_UPDATE_POINTERS),
996           current_scope(Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS),
997           current_scope(Scope::MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAIN),
998           current_scope(Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK),
999           current_scope(Scope::MC_FINISH),
1000           current_scope(Scope::MC_FINISH_SWEEP_ARRAY_BUFFERS),
1001           current_scope(Scope::MC_MARK),
1002           current_scope(Scope::MC_MARK_FINISH_INCREMENTAL),
1003           current_scope(Scope::MC_MARK_ROOTS),
1004           current_scope(Scope::MC_MARK_MAIN),
1005           current_scope(Scope::MC_MARK_WEAK_CLOSURE),
1006           current_scope(Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON),
1007           current_scope(Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_MARKING),
1008           current_scope(Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_LINEAR),
1009           current_scope(Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES),
1010           current_scope(Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS),
1011           current_scope(Scope::MC_MARK_WEAK_CLOSURE_HARMONY),
1012           current_scope(Scope::MC_MARK_EMBEDDER_PROLOGUE),
1013           current_scope(Scope::MC_MARK_EMBEDDER_TRACING),
1014           current_scope(Scope::MC_PROLOGUE), current_scope(Scope::MC_SWEEP),
1015           current_scope(Scope::MC_SWEEP_CODE),
1016           current_scope(Scope::MC_SWEEP_MAP),
1017           current_scope(Scope::MC_SWEEP_OLD),
1018           current_scope(Scope::MC_INCREMENTAL),
1019           current_scope(Scope::MC_INCREMENTAL_FINALIZE),
1020           current_scope(Scope::MC_INCREMENTAL_FINALIZE_BODY),
1021           current_scope(Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE),
1022           current_scope(Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE),
1023           current_scope(Scope::MC_INCREMENTAL_LAYOUT_CHANGE),
1024           current_scope(Scope::MC_INCREMENTAL_START),
1025           current_scope(Scope::MC_INCREMENTAL_SWEEPING),
1026           current_scope(Scope::MC_INCREMENTAL_EMBEDDER_PROLOGUE),
1027           current_scope(Scope::MC_INCREMENTAL_EMBEDDER_TRACING),
1028           incremental_scope(Scope::MC_INCREMENTAL_EMBEDDER_TRACING)
1029               .longest_step,
1030           incremental_scope(Scope::MC_INCREMENTAL_FINALIZE_BODY).longest_step,
1031           incremental_scope(Scope::MC_INCREMENTAL_FINALIZE_BODY).steps,
1032           incremental_scope(Scope::MC_INCREMENTAL).longest_step,
1033           incremental_scope(Scope::MC_INCREMENTAL).steps,
1034           IncrementalMarkingSpeedInBytesPerMillisecond(),
1035           incremental_walltime_duration,
1036           current_scope(Scope::MC_BACKGROUND_MARKING),
1037           current_scope(Scope::MC_BACKGROUND_SWEEPING),
1038           current_scope(Scope::MC_BACKGROUND_EVACUATE_COPY),
1039           current_scope(Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS),
1040           current_scope(Scope::BACKGROUND_UNMAPPER),
1041           current_scope(Scope::UNMAPPER), current_.start_object_size,
1042           current_.end_object_size, current_.start_holes_size,
1043           current_.end_holes_size, allocated_since_last_gc,
1044           heap_->promoted_objects_size(),
1045           heap_->semi_space_copied_object_size(),
1046           heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
1047           heap_->nodes_promoted_, heap_->promotion_ratio_,
1048           AverageSurvivalRatio(), heap_->promotion_rate_,
1049           heap_->semi_space_copied_rate_,
1050           NewSpaceAllocationThroughputInBytesPerMillisecond(),
1051           heap_->memory_allocator()->unmapper()->NumberOfChunks(),
1052           CompactionSpeedInBytesPerMillisecond());
1053       break;
1054     case Event::START:
1055       break;
1056     default:
1057       UNREACHABLE();
1058   }
1059 }
1060 
AverageSpeed(const base::RingBuffer<BytesAndDuration> & buffer,const BytesAndDuration & initial,double time_ms)1061 double GCTracer::AverageSpeed(const base::RingBuffer<BytesAndDuration>& buffer,
1062                               const BytesAndDuration& initial, double time_ms) {
1063   BytesAndDuration sum = buffer.Sum(
1064       [time_ms](BytesAndDuration a, BytesAndDuration b) {
1065         if (time_ms != 0 && a.second >= time_ms) return a;
1066         return std::make_pair(a.first + b.first, a.second + b.second);
1067       },
1068       initial);
1069   uint64_t bytes = sum.first;
1070   double durations = sum.second;
1071   if (durations == 0.0) return 0;
1072   double speed = bytes / durations;
1073   const int max_speed = 1024 * MB;
1074   const int min_speed = 1;
1075   if (speed >= max_speed) return max_speed;
1076   if (speed <= min_speed) return min_speed;
1077   return speed;
1078 }
1079 
AverageSpeed(const base::RingBuffer<BytesAndDuration> & buffer)1080 double GCTracer::AverageSpeed(
1081     const base::RingBuffer<BytesAndDuration>& buffer) {
1082   return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0);
1083 }
1084 
RecordIncrementalMarkingSpeed(size_t bytes,double duration)1085 void GCTracer::RecordIncrementalMarkingSpeed(size_t bytes, double duration) {
1086   if (duration == 0 || bytes == 0) return;
1087   double current_speed = bytes / duration;
1088   if (recorded_incremental_marking_speed_ == 0) {
1089     recorded_incremental_marking_speed_ = current_speed;
1090   } else {
1091     recorded_incremental_marking_speed_ =
1092         (recorded_incremental_marking_speed_ + current_speed) / 2;
1093   }
1094 }
1095 
RecordTimeToIncrementalMarkingTask(double time_to_task)1096 void GCTracer::RecordTimeToIncrementalMarkingTask(double time_to_task) {
1097   if (average_time_to_incremental_marking_task_ == 0.0) {
1098     average_time_to_incremental_marking_task_ = time_to_task;
1099   } else {
1100     average_time_to_incremental_marking_task_ =
1101         (average_time_to_incremental_marking_task_ + time_to_task) / 2;
1102   }
1103 }
1104 
AverageTimeToIncrementalMarkingTask() const1105 double GCTracer::AverageTimeToIncrementalMarkingTask() const {
1106   return average_time_to_incremental_marking_task_;
1107 }
1108 
RecordEmbedderSpeed(size_t bytes,double duration)1109 void GCTracer::RecordEmbedderSpeed(size_t bytes, double duration) {
1110   if (duration == 0 || bytes == 0) return;
1111   double current_speed = bytes / duration;
1112   if (recorded_embedder_speed_ == 0.0) {
1113     recorded_embedder_speed_ = current_speed;
1114   } else {
1115     recorded_embedder_speed_ = (recorded_embedder_speed_ + current_speed) / 2;
1116   }
1117 }
1118 
RecordMutatorUtilization(double mark_compact_end_time,double mark_compact_duration)1119 void GCTracer::RecordMutatorUtilization(double mark_compact_end_time,
1120                                         double mark_compact_duration) {
1121   if (previous_mark_compact_end_time_ == 0) {
1122     // The first event only contributes to previous_mark_compact_end_time_,
1123     // because we cannot compute the mutator duration.
1124     previous_mark_compact_end_time_ = mark_compact_end_time;
1125   } else {
1126     double total_duration =
1127         mark_compact_end_time - previous_mark_compact_end_time_;
1128     double mutator_duration = total_duration - mark_compact_duration;
1129     if (average_mark_compact_duration_ == 0 && average_mutator_duration_ == 0) {
1130       // This is the first event with mutator and mark-compact durations.
1131       average_mark_compact_duration_ = mark_compact_duration;
1132       average_mutator_duration_ = mutator_duration;
1133     } else {
1134       average_mark_compact_duration_ =
1135           (average_mark_compact_duration_ + mark_compact_duration) / 2;
1136       average_mutator_duration_ =
1137           (average_mutator_duration_ + mutator_duration) / 2;
1138     }
1139     current_mark_compact_mutator_utilization_ =
1140         total_duration ? mutator_duration / total_duration : 0;
1141     previous_mark_compact_end_time_ = mark_compact_end_time;
1142   }
1143 }
1144 
AverageMarkCompactMutatorUtilization() const1145 double GCTracer::AverageMarkCompactMutatorUtilization() const {
1146   double average_total_duration =
1147       average_mark_compact_duration_ + average_mutator_duration_;
1148   if (average_total_duration == 0) return 1.0;
1149   return average_mutator_duration_ / average_total_duration;
1150 }
1151 
CurrentMarkCompactMutatorUtilization() const1152 double GCTracer::CurrentMarkCompactMutatorUtilization() const {
1153   return current_mark_compact_mutator_utilization_;
1154 }
1155 
IncrementalMarkingSpeedInBytesPerMillisecond() const1156 double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
1157   if (recorded_incremental_marking_speed_ != 0) {
1158     return recorded_incremental_marking_speed_;
1159   }
1160   if (incremental_marking_duration_ != 0.0) {
1161     return incremental_marking_bytes_ / incremental_marking_duration_;
1162   }
1163   return kConservativeSpeedInBytesPerMillisecond;
1164 }
1165 
EmbedderSpeedInBytesPerMillisecond() const1166 double GCTracer::EmbedderSpeedInBytesPerMillisecond() const {
1167   // Note: Returning 0 is ok here as callers check for whether embedder speeds
1168   // have been recorded at all.
1169   return recorded_embedder_speed_;
1170 }
1171 
ScavengeSpeedInBytesPerMillisecond(ScavengeSpeedMode mode) const1172 double GCTracer::ScavengeSpeedInBytesPerMillisecond(
1173     ScavengeSpeedMode mode) const {
1174   if (mode == kForAllObjects) {
1175     return AverageSpeed(recorded_minor_gcs_total_);
1176   } else {
1177     return AverageSpeed(recorded_minor_gcs_survived_);
1178   }
1179 }
1180 
CompactionSpeedInBytesPerMillisecond() const1181 double GCTracer::CompactionSpeedInBytesPerMillisecond() const {
1182   return AverageSpeed(recorded_compactions_);
1183 }
1184 
MarkCompactSpeedInBytesPerMillisecond() const1185 double GCTracer::MarkCompactSpeedInBytesPerMillisecond() const {
1186   return AverageSpeed(recorded_mark_compacts_);
1187 }
1188 
FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const1189 double GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const {
1190   return AverageSpeed(recorded_incremental_mark_compacts_);
1191 }
1192 
CombinedMarkCompactSpeedInBytesPerMillisecond()1193 double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() {
1194   const double kMinimumMarkingSpeed = 0.5;
1195   if (combined_mark_compact_speed_cache_ > 0)
1196     return combined_mark_compact_speed_cache_;
1197   // MarkCompact speed is more stable than incremental marking speed, because
1198   // there might not be many incremental marking steps because of concurrent
1199   // marking.
1200   combined_mark_compact_speed_cache_ = MarkCompactSpeedInBytesPerMillisecond();
1201   if (combined_mark_compact_speed_cache_ > 0)
1202     return combined_mark_compact_speed_cache_;
1203   double speed1 = IncrementalMarkingSpeedInBytesPerMillisecond();
1204   double speed2 = FinalIncrementalMarkCompactSpeedInBytesPerMillisecond();
1205   if (speed1 < kMinimumMarkingSpeed || speed2 < kMinimumMarkingSpeed) {
1206     // No data for the incremental marking speed.
1207     // Return the non-incremental mark-compact speed.
1208     combined_mark_compact_speed_cache_ =
1209         MarkCompactSpeedInBytesPerMillisecond();
1210   } else {
1211     // Combine the speed of incremental step and the speed of the final step.
1212     // 1 / (1 / speed1 + 1 / speed2) = speed1 * speed2 / (speed1 + speed2).
1213     combined_mark_compact_speed_cache_ = speed1 * speed2 / (speed1 + speed2);
1214   }
1215   return combined_mark_compact_speed_cache_;
1216 }
1217 
CombineSpeedsInBytesPerMillisecond(double default_speed,double optional_speed)1218 double GCTracer::CombineSpeedsInBytesPerMillisecond(double default_speed,
1219                                                     double optional_speed) {
1220   constexpr double kMinimumSpeed = 0.5;
1221   if (optional_speed < kMinimumSpeed) {
1222     return default_speed;
1223   }
1224   return default_speed * optional_speed / (default_speed + optional_speed);
1225 }
1226 
NewSpaceAllocationThroughputInBytesPerMillisecond(double time_ms) const1227 double GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond(
1228     double time_ms) const {
1229   size_t bytes = new_space_allocation_in_bytes_since_gc_;
1230   double durations = allocation_duration_since_gc_;
1231   return AverageSpeed(recorded_new_generation_allocations_,
1232                       MakeBytesAndDuration(bytes, durations), time_ms);
1233 }
1234 
OldGenerationAllocationThroughputInBytesPerMillisecond(double time_ms) const1235 double GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond(
1236     double time_ms) const {
1237   size_t bytes = old_generation_allocation_in_bytes_since_gc_;
1238   double durations = allocation_duration_since_gc_;
1239   return AverageSpeed(recorded_old_generation_allocations_,
1240                       MakeBytesAndDuration(bytes, durations), time_ms);
1241 }
1242 
EmbedderAllocationThroughputInBytesPerMillisecond(double time_ms) const1243 double GCTracer::EmbedderAllocationThroughputInBytesPerMillisecond(
1244     double time_ms) const {
1245   size_t bytes = embedder_allocation_in_bytes_since_gc_;
1246   double durations = allocation_duration_since_gc_;
1247   return AverageSpeed(recorded_embedder_generation_allocations_,
1248                       MakeBytesAndDuration(bytes, durations), time_ms);
1249 }
1250 
AllocationThroughputInBytesPerMillisecond(double time_ms) const1251 double GCTracer::AllocationThroughputInBytesPerMillisecond(
1252     double time_ms) const {
1253   return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) +
1254          OldGenerationAllocationThroughputInBytesPerMillisecond(time_ms);
1255 }
1256 
CurrentAllocationThroughputInBytesPerMillisecond() const1257 double GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const {
1258   return AllocationThroughputInBytesPerMillisecond(kThroughputTimeFrameMs);
1259 }
1260 
CurrentOldGenerationAllocationThroughputInBytesPerMillisecond() const1261 double GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond()
1262     const {
1263   return OldGenerationAllocationThroughputInBytesPerMillisecond(
1264       kThroughputTimeFrameMs);
1265 }
1266 
CurrentEmbedderAllocationThroughputInBytesPerMillisecond() const1267 double GCTracer::CurrentEmbedderAllocationThroughputInBytesPerMillisecond()
1268     const {
1269   return EmbedderAllocationThroughputInBytesPerMillisecond(
1270       kThroughputTimeFrameMs);
1271 }
1272 
AverageSurvivalRatio() const1273 double GCTracer::AverageSurvivalRatio() const {
1274   if (recorded_survival_ratios_.Count() == 0) return 0.0;
1275   double sum = recorded_survival_ratios_.Sum(
1276       [](double a, double b) { return a + b; }, 0.0);
1277   return sum / recorded_survival_ratios_.Count();
1278 }
1279 
SurvivalEventsRecorded() const1280 bool GCTracer::SurvivalEventsRecorded() const {
1281   return recorded_survival_ratios_.Count() > 0;
1282 }
1283 
ResetSurvivalEvents()1284 void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); }
1285 
NotifyIncrementalMarkingStart()1286 void GCTracer::NotifyIncrementalMarkingStart() {
1287   incremental_marking_start_time_ = MonotonicallyIncreasingTimeInMs();
1288 }
1289 
FetchBackgroundMarkCompactCounters()1290 void GCTracer::FetchBackgroundMarkCompactCounters() {
1291   FetchBackgroundCounters(Scope::FIRST_MC_BACKGROUND_SCOPE,
1292                           Scope::LAST_MC_BACKGROUND_SCOPE);
1293   heap_->isolate()->counters()->background_marking()->AddSample(
1294       static_cast<int>(current_.scopes[Scope::MC_BACKGROUND_MARKING]));
1295   heap_->isolate()->counters()->background_sweeping()->AddSample(
1296       static_cast<int>(current_.scopes[Scope::MC_BACKGROUND_SWEEPING]));
1297 }
1298 
FetchBackgroundMinorGCCounters()1299 void GCTracer::FetchBackgroundMinorGCCounters() {
1300   FetchBackgroundCounters(Scope::FIRST_MINOR_GC_BACKGROUND_SCOPE,
1301                           Scope::LAST_MINOR_GC_BACKGROUND_SCOPE);
1302 }
1303 
FetchBackgroundGeneralCounters()1304 void GCTracer::FetchBackgroundGeneralCounters() {
1305   FetchBackgroundCounters(Scope::FIRST_GENERAL_BACKGROUND_SCOPE,
1306                           Scope::LAST_GENERAL_BACKGROUND_SCOPE);
1307 }
1308 
FetchBackgroundCounters(int first_scope,int last_scope)1309 void GCTracer::FetchBackgroundCounters(int first_scope, int last_scope) {
1310   base::MutexGuard guard(&background_counter_mutex_);
1311   for (int i = first_scope; i <= last_scope; i++) {
1312     current_.scopes[i] += background_counter_[i].total_duration_ms;
1313     background_counter_[i].total_duration_ms = 0;
1314   }
1315 }
1316 
RecordGCPhasesHistograms(RecordGCPhasesInfo::Mode mode)1317 void GCTracer::RecordGCPhasesHistograms(RecordGCPhasesInfo::Mode mode) {
1318   Counters* counters = heap_->isolate()->counters();
1319   if (mode == RecordGCPhasesInfo::Mode::Finalize) {
1320     DCHECK_EQ(Scope::FIRST_TOP_MC_SCOPE, Scope::MC_CLEAR);
1321     counters->gc_finalize_clear()->AddSample(
1322         static_cast<int>(current_.scopes[Scope::MC_CLEAR]));
1323     counters->gc_finalize_epilogue()->AddSample(
1324         static_cast<int>(current_.scopes[Scope::MC_EPILOGUE]));
1325     counters->gc_finalize_evacuate()->AddSample(
1326         static_cast<int>(current_.scopes[Scope::MC_EVACUATE]));
1327     counters->gc_finalize_finish()->AddSample(
1328         static_cast<int>(current_.scopes[Scope::MC_FINISH]));
1329     counters->gc_finalize_mark()->AddSample(
1330         static_cast<int>(current_.scopes[Scope::MC_MARK]));
1331     counters->gc_finalize_prologue()->AddSample(
1332         static_cast<int>(current_.scopes[Scope::MC_PROLOGUE]));
1333     counters->gc_finalize_sweep()->AddSample(
1334         static_cast<int>(current_.scopes[Scope::MC_SWEEP]));
1335     if (incremental_marking_duration_ > 0) {
1336       heap_->isolate()->counters()->incremental_marking_sum()->AddSample(
1337           static_cast<int>(incremental_marking_duration_));
1338     }
1339     const double overall_marking_time =
1340         incremental_marking_duration_ + current_.scopes[Scope::MC_MARK];
1341     heap_->isolate()->counters()->gc_marking_sum()->AddSample(
1342         static_cast<int>(overall_marking_time));
1343 
1344     // Filter out samples where
1345     // - we don't have high-resolution timers;
1346     // - size of marked objects is very small;
1347     // - marking time is rounded to 0;
1348     constexpr size_t kMinObjectSizeForReportingThroughput = 1024 * 1024;
1349     if (base::TimeTicks::IsHighResolution() &&
1350         heap_->SizeOfObjects() > kMinObjectSizeForReportingThroughput &&
1351         overall_marking_time > 0) {
1352       const double overall_v8_marking_time =
1353           overall_marking_time -
1354           current_.scopes[Scope::MC_MARK_EMBEDDER_TRACING];
1355       if (overall_v8_marking_time > 0) {
1356         const int main_thread_marking_throughput_mb_per_s =
1357             static_cast<int>(static_cast<double>(heap_->SizeOfObjects()) /
1358                              overall_v8_marking_time * 1000 / 1024 / 1024);
1359         heap_->isolate()
1360             ->counters()
1361             ->gc_main_thread_marking_throughput()
1362             ->AddSample(
1363                 static_cast<int>(main_thread_marking_throughput_mb_per_s));
1364       }
1365     }
1366 
1367     DCHECK_EQ(Scope::LAST_TOP_MC_SCOPE, Scope::MC_SWEEP);
1368   } else if (mode == RecordGCPhasesInfo::Mode::Scavenger) {
1369     counters->gc_scavenger_scavenge_main()->AddSample(
1370         static_cast<int>(current_.scopes[Scope::SCAVENGER_SCAVENGE_PARALLEL]));
1371     counters->gc_scavenger_scavenge_roots()->AddSample(
1372         static_cast<int>(current_.scopes[Scope::SCAVENGER_SCAVENGE_ROOTS]));
1373   }
1374 }
1375 
RecordGCSumCounters()1376 void GCTracer::RecordGCSumCounters() {
1377   base::MutexGuard guard(&background_counter_mutex_);
1378 
1379   const double atomic_pause_duration = current_.scopes[Scope::MARK_COMPACTOR];
1380   const double incremental_marking =
1381       incremental_scopes_[Scope::MC_INCREMENTAL_LAYOUT_CHANGE].duration +
1382       incremental_scopes_[Scope::MC_INCREMENTAL_START].duration +
1383       incremental_marking_duration_ +
1384       incremental_scopes_[Scope::MC_INCREMENTAL_FINALIZE].duration;
1385   const double incremental_sweeping =
1386       incremental_scopes_[Scope::MC_INCREMENTAL_SWEEPING].duration;
1387   const double overall_duration =
1388       atomic_pause_duration + incremental_marking + incremental_sweeping;
1389   const double background_duration =
1390       background_counter_[Scope::MC_BACKGROUND_EVACUATE_COPY]
1391           .total_duration_ms +
1392       background_counter_[Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS]
1393           .total_duration_ms +
1394       background_counter_[Scope::MC_BACKGROUND_MARKING].total_duration_ms +
1395       background_counter_[Scope::MC_BACKGROUND_SWEEPING].total_duration_ms;
1396   const double atomic_marking_duration =
1397       current_.scopes[Scope::MC_PROLOGUE] + current_.scopes[Scope::MC_MARK];
1398   const double marking_duration = atomic_marking_duration + incremental_marking;
1399   const double marking_background_duration =
1400       background_counter_[Scope::MC_BACKGROUND_MARKING].total_duration_ms;
1401 
1402   // Emit trace event counters.
1403   TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
1404                        "V8.GCMarkCompactorSummary", TRACE_EVENT_SCOPE_THREAD,
1405                        "duration", overall_duration, "background_duration",
1406                        background_duration);
1407   TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
1408                        "V8.GCMarkCompactorMarkingSummary",
1409                        TRACE_EVENT_SCOPE_THREAD, "duration", marking_duration,
1410                        "background_duration", marking_background_duration);
1411 }
1412 
1413 namespace {
1414 
CopyTimeMetrics(::v8::metrics::GarbageCollectionPhases & metrics,const cppgc::internal::MetricRecorder::GCCycle::IncrementalPhases & cppgc_metrics)1415 void CopyTimeMetrics(
1416     ::v8::metrics::GarbageCollectionPhases& metrics,
1417     const cppgc::internal::MetricRecorder::GCCycle::IncrementalPhases&
1418         cppgc_metrics) {
1419   DCHECK_NE(-1, cppgc_metrics.mark_duration_us);
1420   metrics.mark_wall_clock_duration_in_us = cppgc_metrics.mark_duration_us;
1421   DCHECK_NE(-1, cppgc_metrics.sweep_duration_us);
1422   metrics.sweep_wall_clock_duration_in_us = cppgc_metrics.sweep_duration_us;
1423   metrics.total_wall_clock_duration_in_us =
1424       metrics.mark_wall_clock_duration_in_us +
1425       metrics.sweep_wall_clock_duration_in_us;
1426 }
1427 
CopyTimeMetrics(::v8::metrics::GarbageCollectionPhases & metrics,const cppgc::internal::MetricRecorder::GCCycle::Phases & cppgc_metrics)1428 void CopyTimeMetrics(
1429     ::v8::metrics::GarbageCollectionPhases& metrics,
1430     const cppgc::internal::MetricRecorder::GCCycle::Phases& cppgc_metrics) {
1431   DCHECK_NE(-1, cppgc_metrics.compact_duration_us);
1432   metrics.compact_wall_clock_duration_in_us = cppgc_metrics.compact_duration_us;
1433   DCHECK_NE(-1, cppgc_metrics.mark_duration_us);
1434   metrics.mark_wall_clock_duration_in_us = cppgc_metrics.mark_duration_us;
1435   DCHECK_NE(-1, cppgc_metrics.sweep_duration_us);
1436   metrics.sweep_wall_clock_duration_in_us = cppgc_metrics.sweep_duration_us;
1437   DCHECK_NE(-1, cppgc_metrics.weak_duration_us);
1438   metrics.weak_wall_clock_duration_in_us = cppgc_metrics.weak_duration_us;
1439   metrics.total_wall_clock_duration_in_us =
1440       metrics.compact_wall_clock_duration_in_us +
1441       metrics.mark_wall_clock_duration_in_us +
1442       metrics.sweep_wall_clock_duration_in_us +
1443       metrics.weak_wall_clock_duration_in_us;
1444 }
1445 
CopySizeMetrics(::v8::metrics::GarbageCollectionSizes & metrics,const cppgc::internal::MetricRecorder::GCCycle::Sizes & cppgc_metrics)1446 void CopySizeMetrics(
1447     ::v8::metrics::GarbageCollectionSizes& metrics,
1448     const cppgc::internal::MetricRecorder::GCCycle::Sizes& cppgc_metrics) {
1449   DCHECK_NE(-1, cppgc_metrics.after_bytes);
1450   metrics.bytes_after = cppgc_metrics.after_bytes;
1451   DCHECK_NE(-1, cppgc_metrics.before_bytes);
1452   metrics.bytes_before = cppgc_metrics.before_bytes;
1453   DCHECK_NE(-1, cppgc_metrics.freed_bytes);
1454   metrics.bytes_freed = cppgc_metrics.freed_bytes;
1455 }
1456 
GetContextId(v8::internal::Isolate * isolate)1457 ::v8::metrics::Recorder::ContextId GetContextId(
1458     v8::internal::Isolate* isolate) {
1459   DCHECK_NOT_NULL(isolate);
1460   if (isolate->context().is_null())
1461     return v8::metrics::Recorder::ContextId::Empty();
1462   HandleScope scope(isolate);
1463   return isolate->GetOrRegisterRecorderContextId(isolate->native_context());
1464 }
1465 
1466 template <typename EventType>
FlushBatchedEvents(v8::metrics::GarbageCollectionBatchedEvents<EventType> & batched_events,Isolate * isolate)1467 void FlushBatchedEvents(
1468     v8::metrics::GarbageCollectionBatchedEvents<EventType>& batched_events,
1469     Isolate* isolate) {
1470   DCHECK_NOT_NULL(isolate->metrics_recorder());
1471   DCHECK(!batched_events.events.empty());
1472   isolate->metrics_recorder()->AddMainThreadEvent(std::move(batched_events),
1473                                                   GetContextId(isolate));
1474   batched_events = {};
1475 }
1476 
1477 }  // namespace
1478 
ReportFullCycleToRecorder()1479 void GCTracer::ReportFullCycleToRecorder() {
1480   DCHECK(!Event::IsYoungGenerationEvent(current_.type));
1481   DCHECK_EQ(Event::State::NOT_RUNNING, current_.state);
1482   auto* cpp_heap = v8::internal::CppHeap::From(heap_->cpp_heap());
1483   DCHECK_IMPLIES(cpp_heap,
1484                  cpp_heap->GetMetricRecorder()->FullGCMetricsReportPending());
1485   const std::shared_ptr<metrics::Recorder>& recorder =
1486       heap_->isolate()->metrics_recorder();
1487   DCHECK_NOT_NULL(recorder);
1488   if (!recorder->HasEmbedderRecorder()) {
1489     incremental_mark_batched_events_ = {};
1490     incremental_sweep_batched_events_ = {};
1491     if (cpp_heap) {
1492       cpp_heap->GetMetricRecorder()->ClearCachedEvents();
1493     }
1494     return;
1495   }
1496   if (!incremental_mark_batched_events_.events.empty()) {
1497     FlushBatchedEvents(incremental_mark_batched_events_, heap_->isolate());
1498   }
1499   if (!incremental_sweep_batched_events_.events.empty()) {
1500     FlushBatchedEvents(incremental_sweep_batched_events_, heap_->isolate());
1501   }
1502 
1503   v8::metrics::GarbageCollectionFullCycle event;
1504   event.reason = static_cast<int>(current_.gc_reason);
1505 
1506   // Managed C++ heap statistics:
1507   if (cpp_heap) {
1508     cpp_heap->GetMetricRecorder()->FlushBatchedIncrementalEvents();
1509     const base::Optional<cppgc::internal::MetricRecorder::GCCycle>
1510         optional_cppgc_event =
1511             cpp_heap->GetMetricRecorder()->ExtractLastFullGcEvent();
1512     DCHECK(optional_cppgc_event.has_value());
1513     DCHECK(!cpp_heap->GetMetricRecorder()->FullGCMetricsReportPending());
1514     const cppgc::internal::MetricRecorder::GCCycle& cppgc_event =
1515         optional_cppgc_event.value();
1516     DCHECK_EQ(cppgc_event.type,
1517               cppgc::internal::MetricRecorder::GCCycle::Type::kMajor);
1518     CopyTimeMetrics(event.total_cpp, cppgc_event.total);
1519     CopyTimeMetrics(event.main_thread_cpp, cppgc_event.main_thread);
1520     CopyTimeMetrics(event.main_thread_atomic_cpp,
1521                     cppgc_event.main_thread_atomic);
1522     CopyTimeMetrics(event.main_thread_incremental_cpp,
1523                     cppgc_event.main_thread_incremental);
1524     CopySizeMetrics(event.objects_cpp, cppgc_event.objects);
1525     CopySizeMetrics(event.memory_cpp, cppgc_event.memory);
1526     DCHECK_NE(-1, cppgc_event.collection_rate_in_percent);
1527     event.collection_rate_cpp_in_percent =
1528         cppgc_event.collection_rate_in_percent;
1529     DCHECK_NE(-1, cppgc_event.efficiency_in_bytes_per_us);
1530     event.efficiency_cpp_in_bytes_per_us =
1531         cppgc_event.efficiency_in_bytes_per_us;
1532     DCHECK_NE(-1, cppgc_event.main_thread_efficiency_in_bytes_per_us);
1533     event.main_thread_efficiency_cpp_in_bytes_per_us =
1534         cppgc_event.main_thread_efficiency_in_bytes_per_us;
1535   }
1536 
1537   // Unified heap statistics:
1538   const double atomic_pause_duration = current_.scopes[Scope::MARK_COMPACTOR];
1539   const double incremental_marking =
1540       current_.incremental_scopes[Scope::MC_INCREMENTAL_LAYOUT_CHANGE]
1541           .duration +
1542       current_.incremental_scopes[Scope::MC_INCREMENTAL_START].duration +
1543       current_.incremental_marking_duration +
1544       current_.incremental_scopes[Scope::MC_INCREMENTAL_FINALIZE].duration;
1545   const double incremental_sweeping =
1546       current_.incremental_scopes[Scope::MC_INCREMENTAL_SWEEPING].duration;
1547   const double overall_duration =
1548       atomic_pause_duration + incremental_marking + incremental_sweeping;
1549   const double marking_background_duration =
1550       current_.scopes[Scope::MC_BACKGROUND_MARKING];
1551   const double sweeping_background_duration =
1552       current_.scopes[Scope::MC_BACKGROUND_SWEEPING];
1553   const double compact_background_duration =
1554       current_.scopes[Scope::MC_BACKGROUND_EVACUATE_COPY] +
1555       current_.scopes[Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS];
1556   const double background_duration = marking_background_duration +
1557                                      sweeping_background_duration +
1558                                      compact_background_duration;
1559   const double atomic_marking_duration =
1560       current_.scopes[Scope::MC_PROLOGUE] + current_.scopes[Scope::MC_MARK];
1561   const double marking_duration = atomic_marking_duration + incremental_marking;
1562   const double weak_duration = current_.scopes[Scope::MC_CLEAR];
1563   const double compact_duration = current_.scopes[Scope::MC_EVACUATE] +
1564                                   current_.scopes[Scope::MC_FINISH] +
1565                                   current_.scopes[Scope::MC_EPILOGUE];
1566   const double atomic_sweeping_duration = current_.scopes[Scope::MC_SWEEP];
1567   const double sweeping_duration =
1568       atomic_sweeping_duration + incremental_sweeping;
1569 
1570   event.main_thread_atomic.total_wall_clock_duration_in_us =
1571       static_cast<int64_t>(atomic_pause_duration *
1572                            base::Time::kMicrosecondsPerMillisecond);
1573   event.main_thread.total_wall_clock_duration_in_us = static_cast<int64_t>(
1574       overall_duration * base::Time::kMicrosecondsPerMillisecond);
1575   event.total.total_wall_clock_duration_in_us =
1576       static_cast<int64_t>((overall_duration + background_duration) *
1577                            base::Time::kMicrosecondsPerMillisecond);
1578   event.main_thread_atomic.mark_wall_clock_duration_in_us =
1579       static_cast<int64_t>(atomic_marking_duration *
1580                            base::Time::kMicrosecondsPerMillisecond);
1581   event.main_thread.mark_wall_clock_duration_in_us = static_cast<int64_t>(
1582       marking_duration * base::Time::kMicrosecondsPerMillisecond);
1583   event.total.mark_wall_clock_duration_in_us =
1584       static_cast<int64_t>((marking_duration + marking_background_duration) *
1585                            base::Time::kMicrosecondsPerMillisecond);
1586   event.main_thread_atomic.weak_wall_clock_duration_in_us =
1587       event.main_thread.weak_wall_clock_duration_in_us =
1588           event.total.weak_wall_clock_duration_in_us = static_cast<int64_t>(
1589               weak_duration * base::Time::kMicrosecondsPerMillisecond);
1590   event.main_thread_atomic.compact_wall_clock_duration_in_us =
1591       event.main_thread.compact_wall_clock_duration_in_us =
1592           static_cast<int64_t>(compact_duration *
1593                                base::Time::kMicrosecondsPerMillisecond);
1594   event.total.compact_wall_clock_duration_in_us =
1595       static_cast<int64_t>((compact_duration + compact_background_duration) *
1596                            base::Time::kMicrosecondsPerMillisecond);
1597   event.main_thread_atomic.sweep_wall_clock_duration_in_us =
1598       static_cast<int64_t>(atomic_sweeping_duration *
1599                            base::Time::kMicrosecondsPerMillisecond);
1600   event.main_thread.sweep_wall_clock_duration_in_us = static_cast<int64_t>(
1601       sweeping_duration * base::Time::kMicrosecondsPerMillisecond);
1602   event.total.sweep_wall_clock_duration_in_us =
1603       static_cast<int64_t>((sweeping_duration + sweeping_background_duration) *
1604                            base::Time::kMicrosecondsPerMillisecond);
1605   event.main_thread_incremental.mark_wall_clock_duration_in_us =
1606       incremental_marking;
1607   event.main_thread_incremental.sweep_wall_clock_duration_in_us =
1608       incremental_sweeping;
1609 
1610   // TODO(chromium:1154636): Populate the following:
1611   // - event.objects
1612   // - event.memory
1613   // - event.collection_rate_in_percent
1614   // - event.efficiency_in_bytes_per_us
1615   // - event.main_thread_efficiency_in_bytes_per_us
1616 
1617   recorder->AddMainThreadEvent(event, GetContextId(heap_->isolate()));
1618 }
1619 
ReportIncrementalMarkingStepToRecorder(double v8_duration)1620 void GCTracer::ReportIncrementalMarkingStepToRecorder(double v8_duration) {
1621   DCHECK_EQ(Event::Type::INCREMENTAL_MARK_COMPACTOR, current_.type);
1622   static constexpr int kMaxBatchedEvents =
1623       CppHeap::MetricRecorderAdapter::kMaxBatchedEvents;
1624   const std::shared_ptr<metrics::Recorder>& recorder =
1625       heap_->isolate()->metrics_recorder();
1626   DCHECK_NOT_NULL(recorder);
1627   if (!recorder->HasEmbedderRecorder()) return;
1628   incremental_mark_batched_events_.events.emplace_back();
1629   if (heap_->cpp_heap()) {
1630     const base::Optional<
1631         cppgc::internal::MetricRecorder::MainThreadIncrementalMark>
1632         cppgc_event = v8::internal::CppHeap::From(heap_->cpp_heap())
1633                           ->GetMetricRecorder()
1634                           ->ExtractLastIncrementalMarkEvent();
1635     if (cppgc_event.has_value()) {
1636       DCHECK_NE(-1, cppgc_event.value().duration_us);
1637       incremental_mark_batched_events_.events.back()
1638           .cpp_wall_clock_duration_in_us = cppgc_event.value().duration_us;
1639     }
1640   }
1641   incremental_mark_batched_events_.events.back().wall_clock_duration_in_us =
1642       static_cast<int64_t>(v8_duration *
1643                            base::Time::kMicrosecondsPerMillisecond);
1644   if (incremental_mark_batched_events_.events.size() == kMaxBatchedEvents) {
1645     FlushBatchedEvents(incremental_mark_batched_events_, heap_->isolate());
1646   }
1647 }
1648 
ReportIncrementalSweepingStepToRecorder(double v8_duration)1649 void GCTracer::ReportIncrementalSweepingStepToRecorder(double v8_duration) {
1650   static constexpr int kMaxBatchedEvents =
1651       CppHeap::MetricRecorderAdapter::kMaxBatchedEvents;
1652   const std::shared_ptr<metrics::Recorder>& recorder =
1653       heap_->isolate()->metrics_recorder();
1654   DCHECK_NOT_NULL(recorder);
1655   if (!recorder->HasEmbedderRecorder()) return;
1656   incremental_sweep_batched_events_.events.emplace_back();
1657   incremental_sweep_batched_events_.events.back().wall_clock_duration_in_us =
1658       static_cast<int64_t>(v8_duration *
1659                            base::Time::kMicrosecondsPerMillisecond);
1660   if (incremental_sweep_batched_events_.events.size() == kMaxBatchedEvents) {
1661     FlushBatchedEvents(incremental_sweep_batched_events_, heap_->isolate());
1662   }
1663 }
1664 
ReportYoungCycleToRecorder()1665 void GCTracer::ReportYoungCycleToRecorder() {
1666   DCHECK(Event::IsYoungGenerationEvent(current_.type));
1667   DCHECK_EQ(Event::State::NOT_RUNNING, current_.state);
1668   const std::shared_ptr<metrics::Recorder>& recorder =
1669       heap_->isolate()->metrics_recorder();
1670   DCHECK_NOT_NULL(recorder);
1671   if (!recorder->HasEmbedderRecorder()) return;
1672 
1673   v8::metrics::GarbageCollectionYoungCycle event;
1674   // Reason:
1675   event.reason = static_cast<int>(current_.gc_reason);
1676 #if defined(CPPGC_YOUNG_GENERATION)
1677   // Managed C++ heap statistics:
1678   auto* cpp_heap = v8::internal::CppHeap::From(heap_->cpp_heap());
1679   if (cpp_heap) {
1680     auto* metric_recorder = cpp_heap->GetMetricRecorder();
1681     const base::Optional<cppgc::internal::MetricRecorder::GCCycle>
1682         optional_cppgc_event = metric_recorder->ExtractLastYoungGcEvent();
1683     // We bail out from Oilpan's young GC if the full GC is already in progress.
1684     // Check here if the young generation event was reported.
1685     if (optional_cppgc_event) {
1686       DCHECK(!metric_recorder->YoungGCMetricsReportPending());
1687       const cppgc::internal::MetricRecorder::GCCycle& cppgc_event =
1688           optional_cppgc_event.value();
1689       DCHECK_EQ(cppgc_event.type,
1690                 cppgc::internal::MetricRecorder::GCCycle::Type::kMinor);
1691       CopyTimeMetrics(event.total_cpp, cppgc_event.total);
1692       CopySizeMetrics(event.objects_cpp, cppgc_event.objects);
1693       CopySizeMetrics(event.memory_cpp, cppgc_event.memory);
1694       DCHECK_NE(-1, cppgc_event.collection_rate_in_percent);
1695       event.collection_rate_cpp_in_percent =
1696           cppgc_event.collection_rate_in_percent;
1697       DCHECK_NE(-1, cppgc_event.efficiency_in_bytes_per_us);
1698       event.efficiency_cpp_in_bytes_per_us =
1699           cppgc_event.efficiency_in_bytes_per_us;
1700       DCHECK_NE(-1, cppgc_event.main_thread_efficiency_in_bytes_per_us);
1701       event.main_thread_efficiency_cpp_in_bytes_per_us =
1702           cppgc_event.main_thread_efficiency_in_bytes_per_us;
1703     }
1704   }
1705 #endif  // defined(CPPGC_YOUNG_GENERATION)
1706 
1707   // Total:
1708   const double total_wall_clock_duration_in_us =
1709       (current_.scopes[Scope::SCAVENGER] +
1710        current_.scopes[Scope::MINOR_MARK_COMPACTOR] +
1711        current_.scopes[Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL] +
1712        current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_COPY] +
1713        current_.scopes[Scope::MINOR_MC_BACKGROUND_MARKING] +
1714        current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS]) *
1715       base::Time::kMicrosecondsPerMillisecond;
1716   // TODO(chromium:1154636): Consider adding BACKGROUND_YOUNG_ARRAY_BUFFER_SWEEP
1717   // (both for the case of the scavenger and the minor mark-compactor), and
1718   // BACKGROUND_UNMAPPER (for the case of the minor mark-compactor).
1719   event.total_wall_clock_duration_in_us =
1720       static_cast<int64_t>(total_wall_clock_duration_in_us);
1721   // MainThread:
1722   const double main_thread_wall_clock_duration_in_us =
1723       (current_.scopes[Scope::SCAVENGER] +
1724        current_.scopes[Scope::MINOR_MARK_COMPACTOR]) *
1725       base::Time::kMicrosecondsPerMillisecond;
1726   event.main_thread_wall_clock_duration_in_us =
1727       static_cast<int64_t>(main_thread_wall_clock_duration_in_us);
1728   // Collection Rate:
1729   event.collection_rate_in_percent =
1730       static_cast<double>(current_.survived_young_object_size) /
1731       current_.young_object_size;
1732   // Efficiency:
1733   auto freed_bytes =
1734       current_.young_object_size - current_.survived_young_object_size;
1735   event.efficiency_in_bytes_per_us =
1736       freed_bytes / total_wall_clock_duration_in_us;
1737   event.main_thread_efficiency_in_bytes_per_us =
1738       freed_bytes / main_thread_wall_clock_duration_in_us;
1739 
1740   recorder->AddMainThreadEvent(event, GetContextId(heap_->isolate()));
1741 }
1742 
1743 }  // namespace internal
1744 }  // namespace v8
1745