1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/gc-tracer.h"
6
7 #include <cstdarg>
8
9 #include "src/base/atomic-utils.h"
10 #include "src/execution/isolate.h"
11 #include "src/heap/heap-inl.h"
12 #include "src/heap/incremental-marking.h"
13 #include "src/heap/spaces.h"
14 #include "src/logging/counters-inl.h"
15
16 namespace v8 {
17 namespace internal {
18
CountTotalHolesSize(Heap * heap)19 static size_t CountTotalHolesSize(Heap* heap) {
20 size_t holes_size = 0;
21 PagedSpaceIterator spaces(heap);
22 for (PagedSpace* space = spaces.Next(); space != nullptr;
23 space = spaces.Next()) {
24 DCHECK_GE(holes_size + space->Waste() + space->Available(), holes_size);
25 holes_size += space->Waste() + space->Available();
26 }
27 return holes_size;
28 }
worker_thread_runtime_call_stats()29 WorkerThreadRuntimeCallStats* GCTracer::worker_thread_runtime_call_stats() {
30 return heap_->isolate()->counters()->worker_thread_runtime_call_stats();
31 }
32
RCSCounterFromScope(Scope::ScopeId id)33 RuntimeCallCounterId GCTracer::RCSCounterFromScope(Scope::ScopeId id) {
34 STATIC_ASSERT(Scope::FIRST_SCOPE == Scope::MC_INCREMENTAL);
35 return static_cast<RuntimeCallCounterId>(
36 static_cast<int>(RuntimeCallCounterId::kGC_MC_INCREMENTAL) +
37 static_cast<int>(id));
38 }
39
RCSCounterFromBackgroundScope(BackgroundScope::ScopeId id)40 RuntimeCallCounterId GCTracer::RCSCounterFromBackgroundScope(
41 BackgroundScope::ScopeId id) {
42 STATIC_ASSERT(Scope::FIRST_BACKGROUND_SCOPE ==
43 Scope::BACKGROUND_ARRAY_BUFFER_FREE);
44 STATIC_ASSERT(
45 0 == static_cast<int>(BackgroundScope::BACKGROUND_ARRAY_BUFFER_FREE));
46 return static_cast<RuntimeCallCounterId>(
47 static_cast<int>(RCSCounterFromScope(Scope::FIRST_BACKGROUND_SCOPE)) +
48 static_cast<int>(id));
49 }
50
Scope(GCTracer * tracer,ScopeId scope)51 GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope)
52 : tracer_(tracer), scope_(scope) {
53 start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs();
54 if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
55 runtime_stats_ = tracer_->heap_->isolate()->counters()->runtime_call_stats();
56 runtime_stats_->Enter(&timer_, GCTracer::RCSCounterFromScope(scope));
57 }
58
~Scope()59 GCTracer::Scope::~Scope() {
60 tracer_->AddScopeSample(
61 scope_, tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_);
62 if (V8_LIKELY(runtime_stats_ == nullptr)) return;
63 runtime_stats_->Leave(&timer_);
64 }
65
BackgroundScope(GCTracer * tracer,ScopeId scope,RuntimeCallStats * runtime_stats)66 GCTracer::BackgroundScope::BackgroundScope(GCTracer* tracer, ScopeId scope,
67 RuntimeCallStats* runtime_stats)
68 : tracer_(tracer), scope_(scope), runtime_stats_(runtime_stats) {
69 start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs();
70 if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
71 runtime_stats_->Enter(&timer_,
72 GCTracer::RCSCounterFromBackgroundScope(scope));
73 }
74
~BackgroundScope()75 GCTracer::BackgroundScope::~BackgroundScope() {
76 double duration_ms =
77 tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_;
78 tracer_->AddBackgroundScopeSample(scope_, duration_ms);
79 if (V8_LIKELY(runtime_stats_ == nullptr)) return;
80 runtime_stats_->Leave(&timer_);
81 }
82
Name(ScopeId id)83 const char* GCTracer::Scope::Name(ScopeId id) {
84 #define CASE(scope) \
85 case Scope::scope: \
86 return "V8.GC_" #scope;
87 switch (id) {
88 TRACER_SCOPES(CASE)
89 TRACER_BACKGROUND_SCOPES(CASE)
90 case Scope::NUMBER_OF_SCOPES:
91 break;
92 }
93 #undef CASE
94 UNREACHABLE();
95 return nullptr;
96 }
97
Name(ScopeId id)98 const char* GCTracer::BackgroundScope::Name(ScopeId id) {
99 #define CASE(scope) \
100 case BackgroundScope::scope: \
101 return "V8.GC_" #scope;
102 switch (id) {
103 TRACER_BACKGROUND_SCOPES(CASE)
104 case BackgroundScope::NUMBER_OF_SCOPES:
105 break;
106 }
107 #undef CASE
108 UNREACHABLE();
109 return nullptr;
110 }
111
Event(Type type,GarbageCollectionReason gc_reason,const char * collector_reason)112 GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason,
113 const char* collector_reason)
114 : type(type),
115 gc_reason(gc_reason),
116 collector_reason(collector_reason),
117 start_time(0.0),
118 end_time(0.0),
119 reduce_memory(false),
120 start_object_size(0),
121 end_object_size(0),
122 start_memory_size(0),
123 end_memory_size(0),
124 start_holes_size(0),
125 end_holes_size(0),
126 young_object_size(0),
127 survived_young_object_size(0),
128 incremental_marking_bytes(0),
129 incremental_marking_duration(0.0) {
130 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
131 scopes[i] = 0;
132 }
133 }
134
TypeName(bool short_name) const135 const char* GCTracer::Event::TypeName(bool short_name) const {
136 switch (type) {
137 case SCAVENGER:
138 return (short_name) ? "s" : "Scavenge";
139 case MARK_COMPACTOR:
140 case INCREMENTAL_MARK_COMPACTOR:
141 return (short_name) ? "ms" : "Mark-sweep";
142 case MINOR_MARK_COMPACTOR:
143 return (short_name) ? "mmc" : "Minor Mark-Compact";
144 case START:
145 return (short_name) ? "st" : "Start";
146 }
147 return "Unknown Event Type";
148 }
149
GCTracer(Heap * heap)150 GCTracer::GCTracer(Heap* heap)
151 : heap_(heap),
152 current_(Event::START, GarbageCollectionReason::kUnknown, nullptr),
153 previous_(current_),
154 incremental_marking_bytes_(0),
155 incremental_marking_duration_(0.0),
156 incremental_marking_start_time_(0.0),
157 recorded_incremental_marking_speed_(0.0),
158 allocation_time_ms_(0.0),
159 new_space_allocation_counter_bytes_(0),
160 old_generation_allocation_counter_bytes_(0),
161 embedder_allocation_counter_bytes_(0),
162 allocation_duration_since_gc_(0.0),
163 new_space_allocation_in_bytes_since_gc_(0),
164 old_generation_allocation_in_bytes_since_gc_(0),
165 embedder_allocation_in_bytes_since_gc_(0),
166 combined_mark_compact_speed_cache_(0.0),
167 start_counter_(0),
168 average_mutator_duration_(0),
169 average_mark_compact_duration_(0),
170 current_mark_compact_mutator_utilization_(1.0),
171 previous_mark_compact_end_time_(0) {
172 // All accesses to incremental_marking_scope assume that incremental marking
173 // scopes come first.
174 STATIC_ASSERT(0 == Scope::FIRST_INCREMENTAL_SCOPE);
175 // We assume that MC_INCREMENTAL is the first scope so that we can properly
176 // map it to RuntimeCallStats.
177 STATIC_ASSERT(0 == Scope::MC_INCREMENTAL);
178 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
179 for (int i = 0; i < BackgroundScope::NUMBER_OF_SCOPES; i++) {
180 background_counter_[i].total_duration_ms = 0;
181 }
182 }
183
ResetForTesting()184 void GCTracer::ResetForTesting() {
185 current_ = Event(Event::START, GarbageCollectionReason::kTesting, nullptr);
186 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
187 previous_ = current_;
188 ResetIncrementalMarkingCounters();
189 allocation_time_ms_ = 0.0;
190 new_space_allocation_counter_bytes_ = 0.0;
191 old_generation_allocation_counter_bytes_ = 0.0;
192 allocation_duration_since_gc_ = 0.0;
193 new_space_allocation_in_bytes_since_gc_ = 0.0;
194 old_generation_allocation_in_bytes_since_gc_ = 0.0;
195 combined_mark_compact_speed_cache_ = 0.0;
196 recorded_minor_gcs_total_.Reset();
197 recorded_minor_gcs_survived_.Reset();
198 recorded_compactions_.Reset();
199 recorded_mark_compacts_.Reset();
200 recorded_incremental_mark_compacts_.Reset();
201 recorded_new_generation_allocations_.Reset();
202 recorded_old_generation_allocations_.Reset();
203 recorded_embedder_generation_allocations_.Reset();
204 recorded_context_disposal_times_.Reset();
205 recorded_survival_ratios_.Reset();
206 start_counter_ = 0;
207 average_mutator_duration_ = 0;
208 average_mark_compact_duration_ = 0;
209 current_mark_compact_mutator_utilization_ = 1.0;
210 previous_mark_compact_end_time_ = 0;
211 base::MutexGuard guard(&background_counter_mutex_);
212 for (int i = 0; i < BackgroundScope::NUMBER_OF_SCOPES; i++) {
213 background_counter_[i].total_duration_ms = 0;
214 }
215 }
216
NotifyYoungGenerationHandling(YoungGenerationHandling young_generation_handling)217 void GCTracer::NotifyYoungGenerationHandling(
218 YoungGenerationHandling young_generation_handling) {
219 DCHECK(current_.type == Event::SCAVENGER || start_counter_ > 1);
220 heap_->isolate()->counters()->young_generation_handling()->AddSample(
221 static_cast<int>(young_generation_handling));
222 }
223
Start(GarbageCollector collector,GarbageCollectionReason gc_reason,const char * collector_reason)224 void GCTracer::Start(GarbageCollector collector,
225 GarbageCollectionReason gc_reason,
226 const char* collector_reason) {
227 start_counter_++;
228 if (start_counter_ != 1) return;
229
230 previous_ = current_;
231
232 switch (collector) {
233 case SCAVENGER:
234 current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
235 break;
236 case MINOR_MARK_COMPACTOR:
237 current_ =
238 Event(Event::MINOR_MARK_COMPACTOR, gc_reason, collector_reason);
239 break;
240 case MARK_COMPACTOR:
241 if (heap_->incremental_marking()->WasActivated()) {
242 current_ = Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason,
243 collector_reason);
244 } else {
245 current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
246 }
247 break;
248 }
249
250 current_.reduce_memory = heap_->ShouldReduceMemory();
251 current_.start_time = heap_->MonotonicallyIncreasingTimeInMs();
252 current_.start_object_size = 0;
253 current_.start_memory_size = 0;
254 current_.start_holes_size = 0;
255 current_.young_object_size = 0;
256
257 current_.incremental_marking_bytes = 0;
258 current_.incremental_marking_duration = 0;
259
260 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
261 current_.scopes[i] = 0;
262 }
263
264 Counters* counters = heap_->isolate()->counters();
265
266 if (Heap::IsYoungGenerationCollector(collector)) {
267 counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason));
268 } else {
269 counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason));
270
271 if (FLAG_trace_gc_freelists) {
272 PrintIsolate(heap_->isolate(),
273 "FreeLists statistics before collection:\n");
274 heap_->PrintFreeListsStats();
275 }
276 }
277 }
278
StartInSafepoint()279 void GCTracer::StartInSafepoint() {
280 SampleAllocation(current_.start_time, heap_->NewSpaceAllocationCounter(),
281 heap_->OldGenerationAllocationCounter(),
282 heap_->EmbedderAllocationCounter());
283
284 current_.start_object_size = heap_->SizeOfObjects();
285 current_.start_memory_size = heap_->memory_allocator()->Size();
286 current_.start_holes_size = CountTotalHolesSize(heap_);
287 current_.young_object_size =
288 heap_->new_space()->Size() + heap_->new_lo_space()->SizeOfObjects();
289 }
290
ResetIncrementalMarkingCounters()291 void GCTracer::ResetIncrementalMarkingCounters() {
292 incremental_marking_bytes_ = 0;
293 incremental_marking_duration_ = 0;
294 for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
295 incremental_marking_scopes_[i].ResetCurrentCycle();
296 }
297 }
298
StopInSafepoint()299 void GCTracer::StopInSafepoint() {
300 current_.end_object_size = heap_->SizeOfObjects();
301 current_.end_memory_size = heap_->memory_allocator()->Size();
302 current_.end_holes_size = CountTotalHolesSize(heap_);
303 current_.survived_young_object_size = heap_->SurvivedYoungObjectSize();
304 }
305
Stop(GarbageCollector collector)306 void GCTracer::Stop(GarbageCollector collector) {
307 start_counter_--;
308 if (start_counter_ != 0) {
309 if (FLAG_trace_gc_verbose) {
310 heap_->isolate()->PrintWithTimestamp(
311 "[Finished reentrant %s during %s.]\n",
312 Heap::CollectorName(collector), current_.TypeName(false));
313 }
314 return;
315 }
316
317 DCHECK_LE(0, start_counter_);
318 DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) ||
319 (collector == MINOR_MARK_COMPACTOR &&
320 current_.type == Event::MINOR_MARK_COMPACTOR) ||
321 (collector == MARK_COMPACTOR &&
322 (current_.type == Event::MARK_COMPACTOR ||
323 current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
324
325 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
326
327 AddAllocation(current_.end_time);
328
329 double duration = current_.end_time - current_.start_time;
330
331 switch (current_.type) {
332 case Event::SCAVENGER:
333 case Event::MINOR_MARK_COMPACTOR:
334 recorded_minor_gcs_total_.Push(
335 MakeBytesAndDuration(current_.young_object_size, duration));
336 recorded_minor_gcs_survived_.Push(
337 MakeBytesAndDuration(current_.survived_young_object_size, duration));
338 FetchBackgroundMinorGCCounters();
339 break;
340 case Event::INCREMENTAL_MARK_COMPACTOR:
341 current_.incremental_marking_bytes = incremental_marking_bytes_;
342 current_.incremental_marking_duration = incremental_marking_duration_;
343 for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
344 current_.incremental_marking_scopes[i] = incremental_marking_scopes_[i];
345 current_.scopes[i] = incremental_marking_scopes_[i].duration;
346 }
347
348 RecordMutatorUtilization(
349 current_.end_time, duration + current_.incremental_marking_duration);
350 RecordIncrementalMarkingSpeed(current_.incremental_marking_bytes,
351 current_.incremental_marking_duration);
352 recorded_incremental_mark_compacts_.Push(
353 MakeBytesAndDuration(current_.end_object_size, duration));
354 RecordGCSumCounters(duration);
355 ResetIncrementalMarkingCounters();
356 combined_mark_compact_speed_cache_ = 0.0;
357 FetchBackgroundMarkCompactCounters();
358 break;
359 case Event::MARK_COMPACTOR:
360 DCHECK_EQ(0u, current_.incremental_marking_bytes);
361 DCHECK_EQ(0, current_.incremental_marking_duration);
362 RecordMutatorUtilization(
363 current_.end_time, duration + current_.incremental_marking_duration);
364 recorded_mark_compacts_.Push(
365 MakeBytesAndDuration(current_.end_object_size, duration));
366 RecordGCSumCounters(duration);
367 ResetIncrementalMarkingCounters();
368 combined_mark_compact_speed_cache_ = 0.0;
369 FetchBackgroundMarkCompactCounters();
370 break;
371 case Event::START:
372 UNREACHABLE();
373 }
374 FetchBackgroundGeneralCounters();
375
376 heap_->UpdateTotalGCTime(duration);
377
378 if ((current_.type == Event::SCAVENGER ||
379 current_.type == Event::MINOR_MARK_COMPACTOR) &&
380 FLAG_trace_gc_ignore_scavenger)
381 return;
382
383 if (FLAG_trace_gc_nvp) {
384 PrintNVP();
385 } else {
386 Print();
387 }
388
389 if (FLAG_trace_gc) {
390 heap_->PrintShortHeapStatistics();
391 }
392
393 if (V8_UNLIKELY(TracingFlags::gc.load(std::memory_order_relaxed) &
394 v8::tracing::TracingCategoryObserver::ENABLED_BY_TRACING)) {
395 std::stringstream heap_stats;
396 heap_->DumpJSONHeapStatistics(heap_stats);
397
398 TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("v8.gc"), "V8.GC_Heap_Stats",
399 TRACE_EVENT_SCOPE_THREAD, "stats",
400 TRACE_STR_COPY(heap_stats.str().c_str()));
401 }
402 }
403
NotifySweepingCompleted()404 void GCTracer::NotifySweepingCompleted() {
405 if (FLAG_trace_gc_freelists) {
406 PrintIsolate(heap_->isolate(),
407 "FreeLists statistics after sweeping completed:\n");
408 heap_->PrintFreeListsStats();
409 }
410 if (FLAG_trace_allocations_origins) {
411 heap_->new_space()->PrintAllocationsOrigins();
412 heap_->old_space()->PrintAllocationsOrigins();
413 heap_->code_space()->PrintAllocationsOrigins();
414 heap_->map_space()->PrintAllocationsOrigins();
415 }
416 }
417
SampleAllocation(double current_ms,size_t new_space_counter_bytes,size_t old_generation_counter_bytes,size_t embedder_counter_bytes)418 void GCTracer::SampleAllocation(double current_ms,
419 size_t new_space_counter_bytes,
420 size_t old_generation_counter_bytes,
421 size_t embedder_counter_bytes) {
422 if (allocation_time_ms_ == 0) {
423 // It is the first sample.
424 allocation_time_ms_ = current_ms;
425 new_space_allocation_counter_bytes_ = new_space_counter_bytes;
426 old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
427 embedder_allocation_counter_bytes_ = embedder_counter_bytes;
428 return;
429 }
430 // This assumes that counters are unsigned integers so that the subtraction
431 // below works even if the new counter is less than the old counter.
432 size_t new_space_allocated_bytes =
433 new_space_counter_bytes - new_space_allocation_counter_bytes_;
434 size_t old_generation_allocated_bytes =
435 old_generation_counter_bytes - old_generation_allocation_counter_bytes_;
436 size_t embedder_allocated_bytes =
437 embedder_counter_bytes - embedder_allocation_counter_bytes_;
438 double duration = current_ms - allocation_time_ms_;
439 allocation_time_ms_ = current_ms;
440 new_space_allocation_counter_bytes_ = new_space_counter_bytes;
441 old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
442 embedder_allocation_counter_bytes_ = embedder_counter_bytes;
443 allocation_duration_since_gc_ += duration;
444 new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes;
445 old_generation_allocation_in_bytes_since_gc_ +=
446 old_generation_allocated_bytes;
447 embedder_allocation_in_bytes_since_gc_ += embedder_allocated_bytes;
448 }
449
AddAllocation(double current_ms)450 void GCTracer::AddAllocation(double current_ms) {
451 allocation_time_ms_ = current_ms;
452 if (allocation_duration_since_gc_ > 0) {
453 recorded_new_generation_allocations_.Push(
454 MakeBytesAndDuration(new_space_allocation_in_bytes_since_gc_,
455 allocation_duration_since_gc_));
456 recorded_old_generation_allocations_.Push(
457 MakeBytesAndDuration(old_generation_allocation_in_bytes_since_gc_,
458 allocation_duration_since_gc_));
459 recorded_embedder_generation_allocations_.Push(MakeBytesAndDuration(
460 embedder_allocation_in_bytes_since_gc_, allocation_duration_since_gc_));
461 }
462 allocation_duration_since_gc_ = 0;
463 new_space_allocation_in_bytes_since_gc_ = 0;
464 old_generation_allocation_in_bytes_since_gc_ = 0;
465 embedder_allocation_in_bytes_since_gc_ = 0;
466 }
467
468
AddContextDisposalTime(double time)469 void GCTracer::AddContextDisposalTime(double time) {
470 recorded_context_disposal_times_.Push(time);
471 }
472
AddCompactionEvent(double duration,size_t live_bytes_compacted)473 void GCTracer::AddCompactionEvent(double duration,
474 size_t live_bytes_compacted) {
475 recorded_compactions_.Push(
476 MakeBytesAndDuration(live_bytes_compacted, duration));
477 }
478
479
AddSurvivalRatio(double promotion_ratio)480 void GCTracer::AddSurvivalRatio(double promotion_ratio) {
481 recorded_survival_ratios_.Push(promotion_ratio);
482 }
483
AddIncrementalMarkingStep(double duration,size_t bytes)484 void GCTracer::AddIncrementalMarkingStep(double duration, size_t bytes) {
485 if (bytes > 0) {
486 incremental_marking_bytes_ += bytes;
487 incremental_marking_duration_ += duration;
488 }
489 }
490
Output(const char * format,...) const491 void GCTracer::Output(const char* format, ...) const {
492 if (FLAG_trace_gc) {
493 va_list arguments;
494 va_start(arguments, format);
495 base::OS::VPrint(format, arguments);
496 va_end(arguments);
497 }
498
499 const int kBufferSize = 256;
500 char raw_buffer[kBufferSize];
501 Vector<char> buffer(raw_buffer, kBufferSize);
502 va_list arguments2;
503 va_start(arguments2, format);
504 VSNPrintF(buffer, format, arguments2);
505 va_end(arguments2);
506
507 heap_->AddToRingBuffer(buffer.begin());
508 }
509
Print() const510 void GCTracer::Print() const {
511 double duration = current_.end_time - current_.start_time;
512 const size_t kIncrementalStatsSize = 128;
513 char incremental_buffer[kIncrementalStatsSize] = {0};
514
515 if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
516 base::OS::SNPrintF(
517 incremental_buffer, kIncrementalStatsSize,
518 " (+ %.1f ms in %d steps since start of marking, "
519 "biggest step %.1f ms, walltime since start of marking %.f ms)",
520 current_.scopes[Scope::MC_INCREMENTAL],
521 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
522 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].longest_step,
523 current_.end_time - incremental_marking_start_time_);
524 }
525
526 // Avoid PrintF as Output also appends the string to the tracing ring buffer
527 // that gets printed on OOM failures.
528 Output(
529 "[%d:%p] "
530 "%8.0f ms: "
531 "%s%s %.1f (%.1f) -> %.1f (%.1f) MB, "
532 "%.1f / %.1f ms %s (average mu = %.3f, current mu = %.3f) %s %s\n",
533 base::OS::GetCurrentProcessId(),
534 reinterpret_cast<void*>(heap_->isolate()),
535 heap_->isolate()->time_millis_since_init(), current_.TypeName(false),
536 current_.reduce_memory ? " (reduce)" : "",
537 static_cast<double>(current_.start_object_size) / MB,
538 static_cast<double>(current_.start_memory_size) / MB,
539 static_cast<double>(current_.end_object_size) / MB,
540 static_cast<double>(current_.end_memory_size) / MB, duration,
541 TotalExternalTime(), incremental_buffer,
542 AverageMarkCompactMutatorUtilization(),
543 CurrentMarkCompactMutatorUtilization(),
544 Heap::GarbageCollectionReasonToString(current_.gc_reason),
545 current_.collector_reason != nullptr ? current_.collector_reason : "");
546 }
547
548
PrintNVP() const549 void GCTracer::PrintNVP() const {
550 double duration = current_.end_time - current_.start_time;
551 double spent_in_mutator = current_.start_time - previous_.end_time;
552 size_t allocated_since_last_gc =
553 current_.start_object_size - previous_.end_object_size;
554
555 double incremental_walltime_duration = 0;
556
557 if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
558 incremental_walltime_duration =
559 current_.end_time - incremental_marking_start_time_;
560 }
561
562 switch (current_.type) {
563 case Event::SCAVENGER:
564 heap_->isolate()->PrintWithTimestamp(
565 "pause=%.1f "
566 "mutator=%.1f "
567 "gc=%s "
568 "reduce_memory=%d "
569 "stop_the_world=%.2f "
570 "heap.prologue=%.2f "
571 "heap.epilogue=%.2f "
572 "heap.epilogue.reduce_new_space=%.2f "
573 "heap.external.prologue=%.2f "
574 "heap.external.epilogue=%.2f "
575 "heap.external_weak_global_handles=%.2f "
576 "fast_promote=%.2f "
577 "complete.sweep_array_buffers=%.2f "
578 "scavenge=%.2f "
579 "scavenge.free_remembered_set=%.2f "
580 "scavenge.roots=%.2f "
581 "scavenge.weak=%.2f "
582 "scavenge.weak_global_handles.identify=%.2f "
583 "scavenge.weak_global_handles.process=%.2f "
584 "scavenge.parallel=%.2f "
585 "scavenge.update_refs=%.2f "
586 "scavenge.sweep_array_buffers=%.2f "
587 "background.scavenge.parallel=%.2f "
588 "background.array_buffer_free=%.2f "
589 "background.store_buffer=%.2f "
590 "background.unmapper=%.2f "
591 "incremental.steps_count=%d "
592 "incremental.steps_took=%.1f "
593 "scavenge_throughput=%.f "
594 "total_size_before=%zu "
595 "total_size_after=%zu "
596 "holes_size_before=%zu "
597 "holes_size_after=%zu "
598 "allocated=%zu "
599 "promoted=%zu "
600 "semi_space_copied=%zu "
601 "nodes_died_in_new=%d "
602 "nodes_copied_in_new=%d "
603 "nodes_promoted=%d "
604 "promotion_ratio=%.1f%% "
605 "average_survival_ratio=%.1f%% "
606 "promotion_rate=%.1f%% "
607 "semi_space_copy_rate=%.1f%% "
608 "new_space_allocation_throughput=%.1f "
609 "unmapper_chunks=%d "
610 "context_disposal_rate=%.1f\n",
611 duration, spent_in_mutator, current_.TypeName(true),
612 current_.reduce_memory, current_.scopes[Scope::STOP_THE_WORLD],
613 current_.scopes[Scope::HEAP_PROLOGUE],
614 current_.scopes[Scope::HEAP_EPILOGUE],
615 current_.scopes[Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE],
616 current_.scopes[Scope::HEAP_EXTERNAL_PROLOGUE],
617 current_.scopes[Scope::HEAP_EXTERNAL_EPILOGUE],
618 current_.scopes[Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES],
619 current_.scopes[Scope::SCAVENGER_FAST_PROMOTE],
620 current_.scopes[Scope::SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS],
621 current_.scopes[Scope::SCAVENGER_SCAVENGE],
622 current_.scopes[Scope::SCAVENGER_FREE_REMEMBERED_SET],
623 current_.scopes[Scope::SCAVENGER_SCAVENGE_ROOTS],
624 current_.scopes[Scope::SCAVENGER_SCAVENGE_WEAK],
625 current_
626 .scopes[Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_IDENTIFY],
627 current_
628 .scopes[Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_PROCESS],
629 current_.scopes[Scope::SCAVENGER_SCAVENGE_PARALLEL],
630 current_.scopes[Scope::SCAVENGER_SCAVENGE_UPDATE_REFS],
631 current_.scopes[Scope::SCAVENGER_SWEEP_ARRAY_BUFFERS],
632 current_.scopes[Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL],
633 current_.scopes[Scope::BACKGROUND_ARRAY_BUFFER_FREE],
634 current_.scopes[Scope::BACKGROUND_STORE_BUFFER],
635 current_.scopes[Scope::BACKGROUND_UNMAPPER],
636 current_.incremental_marking_scopes[GCTracer::Scope::MC_INCREMENTAL]
637 .steps,
638 current_.scopes[Scope::MC_INCREMENTAL],
639 ScavengeSpeedInBytesPerMillisecond(), current_.start_object_size,
640 current_.end_object_size, current_.start_holes_size,
641 current_.end_holes_size, allocated_since_last_gc,
642 heap_->promoted_objects_size(),
643 heap_->semi_space_copied_object_size(),
644 heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
645 heap_->nodes_promoted_, heap_->promotion_ratio_,
646 AverageSurvivalRatio(), heap_->promotion_rate_,
647 heap_->semi_space_copied_rate_,
648 NewSpaceAllocationThroughputInBytesPerMillisecond(),
649 heap_->memory_allocator()->unmapper()->NumberOfChunks(),
650 ContextDisposalRateInMilliseconds());
651 break;
652 case Event::MINOR_MARK_COMPACTOR:
653 heap_->isolate()->PrintWithTimestamp(
654 "pause=%.1f "
655 "mutator=%.1f "
656 "gc=%s "
657 "reduce_memory=%d "
658 "minor_mc=%.2f "
659 "finish_sweeping=%.2f "
660 "stop_the_world=%.2f "
661 "mark=%.2f "
662 "mark.seed=%.2f "
663 "mark.roots=%.2f "
664 "mark.weak=%.2f "
665 "mark.global_handles=%.2f "
666 "clear=%.2f "
667 "clear.string_table=%.2f "
668 "clear.weak_lists=%.2f "
669 "evacuate=%.2f "
670 "evacuate.copy=%.2f "
671 "evacuate.update_pointers=%.2f "
672 "evacuate.update_pointers.to_new_roots=%.2f "
673 "evacuate.update_pointers.slots=%.2f "
674 "background.mark=%.2f "
675 "background.evacuate.copy=%.2f "
676 "background.evacuate.update_pointers=%.2f "
677 "background.array_buffer_free=%.2f "
678 "background.store_buffer=%.2f "
679 "background.unmapper=%.2f "
680 "update_marking_deque=%.2f "
681 "reset_liveness=%.2f\n",
682 duration, spent_in_mutator, "mmc", current_.reduce_memory,
683 current_.scopes[Scope::MINOR_MC],
684 current_.scopes[Scope::MINOR_MC_SWEEPING],
685 current_.scopes[Scope::STOP_THE_WORLD],
686 current_.scopes[Scope::MINOR_MC_MARK],
687 current_.scopes[Scope::MINOR_MC_MARK_SEED],
688 current_.scopes[Scope::MINOR_MC_MARK_ROOTS],
689 current_.scopes[Scope::MINOR_MC_MARK_WEAK],
690 current_.scopes[Scope::MINOR_MC_MARK_GLOBAL_HANDLES],
691 current_.scopes[Scope::MINOR_MC_CLEAR],
692 current_.scopes[Scope::MINOR_MC_CLEAR_STRING_TABLE],
693 current_.scopes[Scope::MINOR_MC_CLEAR_WEAK_LISTS],
694 current_.scopes[Scope::MINOR_MC_EVACUATE],
695 current_.scopes[Scope::MINOR_MC_EVACUATE_COPY],
696 current_.scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS],
697 current_
698 .scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS],
699 current_.scopes[Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS],
700 current_.scopes[Scope::MINOR_MC_BACKGROUND_MARKING],
701 current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_COPY],
702 current_.scopes[Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS],
703 current_.scopes[Scope::BACKGROUND_ARRAY_BUFFER_FREE],
704 current_.scopes[Scope::BACKGROUND_STORE_BUFFER],
705 current_.scopes[Scope::BACKGROUND_UNMAPPER],
706 current_.scopes[Scope::MINOR_MC_MARKING_DEQUE],
707 current_.scopes[Scope::MINOR_MC_RESET_LIVENESS]);
708 break;
709 case Event::MARK_COMPACTOR:
710 case Event::INCREMENTAL_MARK_COMPACTOR:
711 heap_->isolate()->PrintWithTimestamp(
712 "pause=%.1f "
713 "mutator=%.1f "
714 "gc=%s "
715 "reduce_memory=%d "
716 "stop_the_world=%.2f "
717 "heap.prologue=%.2f "
718 "heap.embedder_tracing_epilogue=%.2f "
719 "heap.epilogue=%.2f "
720 "heap.epilogue.reduce_new_space=%.2f "
721 "heap.external.prologue=%.1f "
722 "heap.external.epilogue=%.1f "
723 "heap.external.weak_global_handles=%.1f "
724 "clear=%1.f "
725 "clear.dependent_code=%.1f "
726 "clear.maps=%.1f "
727 "clear.slots_buffer=%.1f "
728 "clear.store_buffer=%.1f "
729 "clear.string_table=%.1f "
730 "clear.weak_collections=%.1f "
731 "clear.weak_lists=%.1f "
732 "clear.weak_references=%.1f "
733 "complete.sweep_array_buffers=%.1f "
734 "epilogue=%.1f "
735 "evacuate=%.1f "
736 "evacuate.candidates=%.1f "
737 "evacuate.clean_up=%.1f "
738 "evacuate.copy=%.1f "
739 "evacuate.prologue=%.1f "
740 "evacuate.epilogue=%.1f "
741 "evacuate.rebalance=%.1f "
742 "evacuate.update_pointers=%.1f "
743 "evacuate.update_pointers.to_new_roots=%.1f "
744 "evacuate.update_pointers.slots.main=%.1f "
745 "evacuate.update_pointers.slots.map_space=%.1f "
746 "evacuate.update_pointers.weak=%.1f "
747 "finish=%.1f "
748 "finish.sweep_array_buffers=%.1f "
749 "mark=%.1f "
750 "mark.finish_incremental=%.1f "
751 "mark.roots=%.1f "
752 "mark.main=%.1f "
753 "mark.weak_closure=%.1f "
754 "mark.weak_closure.ephemeron=%.1f "
755 "mark.weak_closure.ephemeron.marking=%.1f "
756 "mark.weak_closure.ephemeron.linear=%.1f "
757 "mark.weak_closure.weak_handles=%.1f "
758 "mark.weak_closure.weak_roots=%.1f "
759 "mark.weak_closure.harmony=%.1f "
760 "mark.embedder_prologue=%.1f "
761 "mark.embedder_tracing=%.1f "
762 "prologue=%.1f "
763 "sweep=%.1f "
764 "sweep.code=%.1f "
765 "sweep.map=%.1f "
766 "sweep.old=%.1f "
767 "incremental=%.1f "
768 "incremental.finalize=%.1f "
769 "incremental.finalize.body=%.1f "
770 "incremental.finalize.external.prologue=%.1f "
771 "incremental.finalize.external.epilogue=%.1f "
772 "incremental.layout_change=%.1f "
773 "incremental.start=%.1f "
774 "incremental.sweep_array_buffers=%.1f "
775 "incremental.sweeping=%.1f "
776 "incremental.embedder_prologue=%.1f "
777 "incremental.embedder_tracing=%.1f "
778 "incremental_wrapper_tracing_longest_step=%.1f "
779 "incremental_finalize_longest_step=%.1f "
780 "incremental_finalize_steps_count=%d "
781 "incremental_longest_step=%.1f "
782 "incremental_steps_count=%d "
783 "incremental_marking_throughput=%.f "
784 "incremental_walltime_duration=%.f "
785 "background.mark=%.1f "
786 "background.sweep=%.1f "
787 "background.evacuate.copy=%.1f "
788 "background.evacuate.update_pointers=%.1f "
789 "background.array_buffer_free=%.2f "
790 "background.store_buffer=%.2f "
791 "background.unmapper=%.1f "
792 "total_size_before=%zu "
793 "total_size_after=%zu "
794 "holes_size_before=%zu "
795 "holes_size_after=%zu "
796 "allocated=%zu "
797 "promoted=%zu "
798 "semi_space_copied=%zu "
799 "nodes_died_in_new=%d "
800 "nodes_copied_in_new=%d "
801 "nodes_promoted=%d "
802 "promotion_ratio=%.1f%% "
803 "average_survival_ratio=%.1f%% "
804 "promotion_rate=%.1f%% "
805 "semi_space_copy_rate=%.1f%% "
806 "new_space_allocation_throughput=%.1f "
807 "unmapper_chunks=%d "
808 "context_disposal_rate=%.1f "
809 "compaction_speed=%.f\n",
810 duration, spent_in_mutator, current_.TypeName(true),
811 current_.reduce_memory, current_.scopes[Scope::STOP_THE_WORLD],
812 current_.scopes[Scope::HEAP_PROLOGUE],
813 current_.scopes[Scope::HEAP_EMBEDDER_TRACING_EPILOGUE],
814 current_.scopes[Scope::HEAP_EPILOGUE],
815 current_.scopes[Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE],
816 current_.scopes[Scope::HEAP_EXTERNAL_PROLOGUE],
817 current_.scopes[Scope::HEAP_EXTERNAL_EPILOGUE],
818 current_.scopes[Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES],
819 current_.scopes[Scope::MC_CLEAR],
820 current_.scopes[Scope::MC_CLEAR_DEPENDENT_CODE],
821 current_.scopes[Scope::MC_CLEAR_MAPS],
822 current_.scopes[Scope::MC_CLEAR_SLOTS_BUFFER],
823 current_.scopes[Scope::MC_CLEAR_STORE_BUFFER],
824 current_.scopes[Scope::MC_CLEAR_STRING_TABLE],
825 current_.scopes[Scope::MC_CLEAR_WEAK_COLLECTIONS],
826 current_.scopes[Scope::MC_CLEAR_WEAK_LISTS],
827 current_.scopes[Scope::MC_CLEAR_WEAK_REFERENCES],
828 current_.scopes[Scope::MC_COMPLETE_SWEEP_ARRAY_BUFFERS],
829 current_.scopes[Scope::MC_EPILOGUE],
830 current_.scopes[Scope::MC_EVACUATE],
831 current_.scopes[Scope::MC_EVACUATE_CANDIDATES],
832 current_.scopes[Scope::MC_EVACUATE_CLEAN_UP],
833 current_.scopes[Scope::MC_EVACUATE_COPY],
834 current_.scopes[Scope::MC_EVACUATE_PROLOGUE],
835 current_.scopes[Scope::MC_EVACUATE_EPILOGUE],
836 current_.scopes[Scope::MC_EVACUATE_REBALANCE],
837 current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS],
838 current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS],
839 current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAIN],
840 current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAP_SPACE],
841 current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK],
842 current_.scopes[Scope::MC_FINISH],
843 current_.scopes[Scope::MC_FINISH_SWEEP_ARRAY_BUFFERS],
844 current_.scopes[Scope::MC_MARK],
845 current_.scopes[Scope::MC_MARK_FINISH_INCREMENTAL],
846 current_.scopes[Scope::MC_MARK_ROOTS],
847 current_.scopes[Scope::MC_MARK_MAIN],
848 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE],
849 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON],
850 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_MARKING],
851 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERON_LINEAR],
852 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES],
853 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS],
854 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_HARMONY],
855 current_.scopes[Scope::MC_MARK_EMBEDDER_PROLOGUE],
856 current_.scopes[Scope::MC_MARK_EMBEDDER_TRACING],
857 current_.scopes[Scope::MC_PROLOGUE], current_.scopes[Scope::MC_SWEEP],
858 current_.scopes[Scope::MC_SWEEP_CODE],
859 current_.scopes[Scope::MC_SWEEP_MAP],
860 current_.scopes[Scope::MC_SWEEP_OLD],
861 current_.scopes[Scope::MC_INCREMENTAL],
862 current_.scopes[Scope::MC_INCREMENTAL_FINALIZE],
863 current_.scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY],
864 current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE],
865 current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE],
866 current_.scopes[Scope::MC_INCREMENTAL_LAYOUT_CHANGE],
867 current_.scopes[Scope::MC_INCREMENTAL_START],
868 current_.scopes[Scope::MC_INCREMENTAL_SWEEP_ARRAY_BUFFERS],
869 current_.scopes[Scope::MC_INCREMENTAL_SWEEPING],
870 current_.scopes[Scope::MC_INCREMENTAL_EMBEDDER_PROLOGUE],
871 current_.scopes[Scope::MC_INCREMENTAL_EMBEDDER_TRACING],
872 current_
873 .incremental_marking_scopes
874 [Scope::MC_INCREMENTAL_EMBEDDER_TRACING]
875 .longest_step,
876 current_
877 .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
878 .longest_step,
879 current_
880 .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
881 .steps,
882 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL]
883 .longest_step,
884 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
885 IncrementalMarkingSpeedInBytesPerMillisecond(),
886 incremental_walltime_duration,
887 current_.scopes[Scope::MC_BACKGROUND_MARKING],
888 current_.scopes[Scope::MC_BACKGROUND_SWEEPING],
889 current_.scopes[Scope::MC_BACKGROUND_EVACUATE_COPY],
890 current_.scopes[Scope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS],
891 current_.scopes[Scope::BACKGROUND_ARRAY_BUFFER_FREE],
892 current_.scopes[Scope::BACKGROUND_STORE_BUFFER],
893 current_.scopes[Scope::BACKGROUND_UNMAPPER],
894 current_.start_object_size, current_.end_object_size,
895 current_.start_holes_size, current_.end_holes_size,
896 allocated_since_last_gc, heap_->promoted_objects_size(),
897 heap_->semi_space_copied_object_size(),
898 heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
899 heap_->nodes_promoted_, heap_->promotion_ratio_,
900 AverageSurvivalRatio(), heap_->promotion_rate_,
901 heap_->semi_space_copied_rate_,
902 NewSpaceAllocationThroughputInBytesPerMillisecond(),
903 heap_->memory_allocator()->unmapper()->NumberOfChunks(),
904 ContextDisposalRateInMilliseconds(),
905 CompactionSpeedInBytesPerMillisecond());
906 break;
907 case Event::START:
908 break;
909 default:
910 UNREACHABLE();
911 }
912 }
913
AverageSpeed(const base::RingBuffer<BytesAndDuration> & buffer,const BytesAndDuration & initial,double time_ms)914 double GCTracer::AverageSpeed(const base::RingBuffer<BytesAndDuration>& buffer,
915 const BytesAndDuration& initial, double time_ms) {
916 BytesAndDuration sum = buffer.Sum(
917 [time_ms](BytesAndDuration a, BytesAndDuration b) {
918 if (time_ms != 0 && a.second >= time_ms) return a;
919 return std::make_pair(a.first + b.first, a.second + b.second);
920 },
921 initial);
922 uint64_t bytes = sum.first;
923 double durations = sum.second;
924 if (durations == 0.0) return 0;
925 double speed = bytes / durations;
926 const int max_speed = 1024 * MB;
927 const int min_speed = 1;
928 if (speed >= max_speed) return max_speed;
929 if (speed <= min_speed) return min_speed;
930 return speed;
931 }
932
AverageSpeed(const base::RingBuffer<BytesAndDuration> & buffer)933 double GCTracer::AverageSpeed(
934 const base::RingBuffer<BytesAndDuration>& buffer) {
935 return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0);
936 }
937
RecordIncrementalMarkingSpeed(size_t bytes,double duration)938 void GCTracer::RecordIncrementalMarkingSpeed(size_t bytes, double duration) {
939 if (duration == 0 || bytes == 0) return;
940 double current_speed = bytes / duration;
941 if (recorded_incremental_marking_speed_ == 0) {
942 recorded_incremental_marking_speed_ = current_speed;
943 } else {
944 recorded_incremental_marking_speed_ =
945 (recorded_incremental_marking_speed_ + current_speed) / 2;
946 }
947 }
948
RecordTimeToIncrementalMarkingTask(double time_to_task)949 void GCTracer::RecordTimeToIncrementalMarkingTask(double time_to_task) {
950 if (average_time_to_incremental_marking_task_ == 0.0) {
951 average_time_to_incremental_marking_task_ = time_to_task;
952 } else {
953 average_time_to_incremental_marking_task_ =
954 (average_time_to_incremental_marking_task_ + time_to_task) / 2;
955 }
956 }
957
AverageTimeToIncrementalMarkingTask() const958 double GCTracer::AverageTimeToIncrementalMarkingTask() const {
959 return average_time_to_incremental_marking_task_;
960 }
961
RecordEmbedderSpeed(size_t bytes,double duration)962 void GCTracer::RecordEmbedderSpeed(size_t bytes, double duration) {
963 if (duration == 0 || bytes == 0) return;
964 double current_speed = bytes / duration;
965 if (recorded_embedder_speed_ == 0.0) {
966 recorded_embedder_speed_ = current_speed;
967 } else {
968 recorded_embedder_speed_ = (recorded_embedder_speed_ + current_speed) / 2;
969 }
970 }
971
RecordMutatorUtilization(double mark_compact_end_time,double mark_compact_duration)972 void GCTracer::RecordMutatorUtilization(double mark_compact_end_time,
973 double mark_compact_duration) {
974 if (previous_mark_compact_end_time_ == 0) {
975 // The first event only contributes to previous_mark_compact_end_time_,
976 // because we cannot compute the mutator duration.
977 previous_mark_compact_end_time_ = mark_compact_end_time;
978 } else {
979 double total_duration =
980 mark_compact_end_time - previous_mark_compact_end_time_;
981 double mutator_duration = total_duration - mark_compact_duration;
982 if (average_mark_compact_duration_ == 0 && average_mutator_duration_ == 0) {
983 // This is the first event with mutator and mark-compact durations.
984 average_mark_compact_duration_ = mark_compact_duration;
985 average_mutator_duration_ = mutator_duration;
986 } else {
987 average_mark_compact_duration_ =
988 (average_mark_compact_duration_ + mark_compact_duration) / 2;
989 average_mutator_duration_ =
990 (average_mutator_duration_ + mutator_duration) / 2;
991 }
992 current_mark_compact_mutator_utilization_ =
993 total_duration ? mutator_duration / total_duration : 0;
994 previous_mark_compact_end_time_ = mark_compact_end_time;
995 }
996 }
997
AverageMarkCompactMutatorUtilization() const998 double GCTracer::AverageMarkCompactMutatorUtilization() const {
999 double average_total_duration =
1000 average_mark_compact_duration_ + average_mutator_duration_;
1001 if (average_total_duration == 0) return 1.0;
1002 return average_mutator_duration_ / average_total_duration;
1003 }
1004
CurrentMarkCompactMutatorUtilization() const1005 double GCTracer::CurrentMarkCompactMutatorUtilization() const {
1006 return current_mark_compact_mutator_utilization_;
1007 }
1008
IncrementalMarkingSpeedInBytesPerMillisecond() const1009 double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
1010 if (recorded_incremental_marking_speed_ != 0) {
1011 return recorded_incremental_marking_speed_;
1012 }
1013 if (incremental_marking_duration_ != 0.0) {
1014 return incremental_marking_bytes_ / incremental_marking_duration_;
1015 }
1016 return kConservativeSpeedInBytesPerMillisecond;
1017 }
1018
EmbedderSpeedInBytesPerMillisecond() const1019 double GCTracer::EmbedderSpeedInBytesPerMillisecond() const {
1020 // Note: Returning 0 is ok here as callers check for whether embedder speeds
1021 // have been recorded at all.
1022 return recorded_embedder_speed_;
1023 }
1024
ScavengeSpeedInBytesPerMillisecond(ScavengeSpeedMode mode) const1025 double GCTracer::ScavengeSpeedInBytesPerMillisecond(
1026 ScavengeSpeedMode mode) const {
1027 if (mode == kForAllObjects) {
1028 return AverageSpeed(recorded_minor_gcs_total_);
1029 } else {
1030 return AverageSpeed(recorded_minor_gcs_survived_);
1031 }
1032 }
1033
CompactionSpeedInBytesPerMillisecond() const1034 double GCTracer::CompactionSpeedInBytesPerMillisecond() const {
1035 return AverageSpeed(recorded_compactions_);
1036 }
1037
MarkCompactSpeedInBytesPerMillisecond() const1038 double GCTracer::MarkCompactSpeedInBytesPerMillisecond() const {
1039 return AverageSpeed(recorded_mark_compacts_);
1040 }
1041
FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const1042 double GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const {
1043 return AverageSpeed(recorded_incremental_mark_compacts_);
1044 }
1045
CombinedMarkCompactSpeedInBytesPerMillisecond()1046 double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() {
1047 const double kMinimumMarkingSpeed = 0.5;
1048 if (combined_mark_compact_speed_cache_ > 0)
1049 return combined_mark_compact_speed_cache_;
1050 // MarkCompact speed is more stable than incremental marking speed, because
1051 // there might not be many incremental marking steps because of concurrent
1052 // marking.
1053 combined_mark_compact_speed_cache_ = MarkCompactSpeedInBytesPerMillisecond();
1054 if (combined_mark_compact_speed_cache_ > 0)
1055 return combined_mark_compact_speed_cache_;
1056 double speed1 = IncrementalMarkingSpeedInBytesPerMillisecond();
1057 double speed2 = FinalIncrementalMarkCompactSpeedInBytesPerMillisecond();
1058 if (speed1 < kMinimumMarkingSpeed || speed2 < kMinimumMarkingSpeed) {
1059 // No data for the incremental marking speed.
1060 // Return the non-incremental mark-compact speed.
1061 combined_mark_compact_speed_cache_ =
1062 MarkCompactSpeedInBytesPerMillisecond();
1063 } else {
1064 // Combine the speed of incremental step and the speed of the final step.
1065 // 1 / (1 / speed1 + 1 / speed2) = speed1 * speed2 / (speed1 + speed2).
1066 combined_mark_compact_speed_cache_ = speed1 * speed2 / (speed1 + speed2);
1067 }
1068 return combined_mark_compact_speed_cache_;
1069 }
1070
CombineSpeedsInBytesPerMillisecond(double default_speed,double optional_speed)1071 double GCTracer::CombineSpeedsInBytesPerMillisecond(double default_speed,
1072 double optional_speed) {
1073 constexpr double kMinimumSpeed = 0.5;
1074 if (optional_speed < kMinimumSpeed) {
1075 return default_speed;
1076 }
1077 return default_speed * optional_speed / (default_speed + optional_speed);
1078 }
1079
NewSpaceAllocationThroughputInBytesPerMillisecond(double time_ms) const1080 double GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond(
1081 double time_ms) const {
1082 size_t bytes = new_space_allocation_in_bytes_since_gc_;
1083 double durations = allocation_duration_since_gc_;
1084 return AverageSpeed(recorded_new_generation_allocations_,
1085 MakeBytesAndDuration(bytes, durations), time_ms);
1086 }
1087
OldGenerationAllocationThroughputInBytesPerMillisecond(double time_ms) const1088 double GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond(
1089 double time_ms) const {
1090 size_t bytes = old_generation_allocation_in_bytes_since_gc_;
1091 double durations = allocation_duration_since_gc_;
1092 return AverageSpeed(recorded_old_generation_allocations_,
1093 MakeBytesAndDuration(bytes, durations), time_ms);
1094 }
1095
EmbedderAllocationThroughputInBytesPerMillisecond(double time_ms) const1096 double GCTracer::EmbedderAllocationThroughputInBytesPerMillisecond(
1097 double time_ms) const {
1098 size_t bytes = embedder_allocation_in_bytes_since_gc_;
1099 double durations = allocation_duration_since_gc_;
1100 return AverageSpeed(recorded_embedder_generation_allocations_,
1101 MakeBytesAndDuration(bytes, durations), time_ms);
1102 }
1103
AllocationThroughputInBytesPerMillisecond(double time_ms) const1104 double GCTracer::AllocationThroughputInBytesPerMillisecond(
1105 double time_ms) const {
1106 return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) +
1107 OldGenerationAllocationThroughputInBytesPerMillisecond(time_ms);
1108 }
1109
CurrentAllocationThroughputInBytesPerMillisecond() const1110 double GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const {
1111 return AllocationThroughputInBytesPerMillisecond(kThroughputTimeFrameMs);
1112 }
1113
CurrentOldGenerationAllocationThroughputInBytesPerMillisecond() const1114 double GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond()
1115 const {
1116 return OldGenerationAllocationThroughputInBytesPerMillisecond(
1117 kThroughputTimeFrameMs);
1118 }
1119
CurrentEmbedderAllocationThroughputInBytesPerMillisecond() const1120 double GCTracer::CurrentEmbedderAllocationThroughputInBytesPerMillisecond()
1121 const {
1122 return EmbedderAllocationThroughputInBytesPerMillisecond(
1123 kThroughputTimeFrameMs);
1124 }
1125
ContextDisposalRateInMilliseconds() const1126 double GCTracer::ContextDisposalRateInMilliseconds() const {
1127 if (recorded_context_disposal_times_.Count() <
1128 recorded_context_disposal_times_.kSize)
1129 return 0.0;
1130 double begin = heap_->MonotonicallyIncreasingTimeInMs();
1131 double end = recorded_context_disposal_times_.Sum(
1132 [](double a, double b) { return b; }, 0.0);
1133 return (begin - end) / recorded_context_disposal_times_.Count();
1134 }
1135
AverageSurvivalRatio() const1136 double GCTracer::AverageSurvivalRatio() const {
1137 if (recorded_survival_ratios_.Count() == 0) return 0.0;
1138 double sum = recorded_survival_ratios_.Sum(
1139 [](double a, double b) { return a + b; }, 0.0);
1140 return sum / recorded_survival_ratios_.Count();
1141 }
1142
SurvivalEventsRecorded() const1143 bool GCTracer::SurvivalEventsRecorded() const {
1144 return recorded_survival_ratios_.Count() > 0;
1145 }
1146
ResetSurvivalEvents()1147 void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); }
1148
NotifyIncrementalMarkingStart()1149 void GCTracer::NotifyIncrementalMarkingStart() {
1150 incremental_marking_start_time_ = heap_->MonotonicallyIncreasingTimeInMs();
1151 }
1152
FetchBackgroundMarkCompactCounters()1153 void GCTracer::FetchBackgroundMarkCompactCounters() {
1154 FetchBackgroundCounters(Scope::FIRST_MC_BACKGROUND_SCOPE,
1155 Scope::LAST_MC_BACKGROUND_SCOPE,
1156 BackgroundScope::FIRST_MC_BACKGROUND_SCOPE,
1157 BackgroundScope::LAST_MC_BACKGROUND_SCOPE);
1158 heap_->isolate()->counters()->background_marking()->AddSample(
1159 static_cast<int>(current_.scopes[Scope::MC_BACKGROUND_MARKING]));
1160 heap_->isolate()->counters()->background_sweeping()->AddSample(
1161 static_cast<int>(current_.scopes[Scope::MC_BACKGROUND_SWEEPING]));
1162 }
1163
FetchBackgroundMinorGCCounters()1164 void GCTracer::FetchBackgroundMinorGCCounters() {
1165 FetchBackgroundCounters(Scope::FIRST_MINOR_GC_BACKGROUND_SCOPE,
1166 Scope::LAST_MINOR_GC_BACKGROUND_SCOPE,
1167 BackgroundScope::FIRST_MINOR_GC_BACKGROUND_SCOPE,
1168 BackgroundScope::LAST_MINOR_GC_BACKGROUND_SCOPE);
1169 heap_->isolate()->counters()->background_scavenger()->AddSample(
1170 static_cast<int>(
1171 current_.scopes[Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL]));
1172 }
1173
FetchBackgroundGeneralCounters()1174 void GCTracer::FetchBackgroundGeneralCounters() {
1175 FetchBackgroundCounters(Scope::FIRST_GENERAL_BACKGROUND_SCOPE,
1176 Scope::LAST_GENERAL_BACKGROUND_SCOPE,
1177 BackgroundScope::FIRST_GENERAL_BACKGROUND_SCOPE,
1178 BackgroundScope::LAST_GENERAL_BACKGROUND_SCOPE);
1179 }
1180
FetchBackgroundCounters(int first_global_scope,int last_global_scope,int first_background_scope,int last_background_scope)1181 void GCTracer::FetchBackgroundCounters(int first_global_scope,
1182 int last_global_scope,
1183 int first_background_scope,
1184 int last_background_scope) {
1185 DCHECK_EQ(last_global_scope - first_global_scope,
1186 last_background_scope - first_background_scope);
1187 base::MutexGuard guard(&background_counter_mutex_);
1188 int background_mc_scopes = last_background_scope - first_background_scope + 1;
1189 for (int i = 0; i < background_mc_scopes; i++) {
1190 current_.scopes[first_global_scope + i] +=
1191 background_counter_[first_background_scope + i].total_duration_ms;
1192 background_counter_[first_background_scope + i].total_duration_ms = 0;
1193 }
1194 }
1195
AddBackgroundScopeSample(BackgroundScope::ScopeId scope,double duration)1196 void GCTracer::AddBackgroundScopeSample(BackgroundScope::ScopeId scope,
1197 double duration) {
1198 base::MutexGuard guard(&background_counter_mutex_);
1199 BackgroundCounter& counter = background_counter_[scope];
1200 counter.total_duration_ms += duration;
1201 }
1202
RecordGCPhasesHistograms(TimedHistogram * gc_timer)1203 void GCTracer::RecordGCPhasesHistograms(TimedHistogram* gc_timer) {
1204 Counters* counters = heap_->isolate()->counters();
1205 if (gc_timer == counters->gc_finalize()) {
1206 DCHECK_EQ(Scope::FIRST_TOP_MC_SCOPE, Scope::MC_CLEAR);
1207 counters->gc_finalize_clear()->AddSample(
1208 static_cast<int>(current_.scopes[Scope::MC_CLEAR]));
1209 counters->gc_finalize_epilogue()->AddSample(
1210 static_cast<int>(current_.scopes[Scope::MC_EPILOGUE]));
1211 counters->gc_finalize_evacuate()->AddSample(
1212 static_cast<int>(current_.scopes[Scope::MC_EVACUATE]));
1213 counters->gc_finalize_finish()->AddSample(
1214 static_cast<int>(current_.scopes[Scope::MC_FINISH]));
1215 counters->gc_finalize_mark()->AddSample(
1216 static_cast<int>(current_.scopes[Scope::MC_MARK]));
1217 counters->gc_finalize_prologue()->AddSample(
1218 static_cast<int>(current_.scopes[Scope::MC_PROLOGUE]));
1219 counters->gc_finalize_sweep()->AddSample(
1220 static_cast<int>(current_.scopes[Scope::MC_SWEEP]));
1221 if (incremental_marking_duration_ > 0) {
1222 heap_->isolate()->counters()->incremental_marking_sum()->AddSample(
1223 static_cast<int>(incremental_marking_duration_));
1224 }
1225 const double overall_marking_time =
1226 incremental_marking_duration_ + current_.scopes[Scope::MC_MARK];
1227 heap_->isolate()->counters()->gc_marking_sum()->AddSample(
1228 static_cast<int>(overall_marking_time));
1229
1230 // Filter out samples where
1231 // - we don't have high-resolution timers;
1232 // - size of marked objects is very small;
1233 // - marking time is rounded to 0;
1234 constexpr size_t kMinObjectSizeForReportingThroughput = 1024 * 1024;
1235 if (base::TimeTicks::IsHighResolution() &&
1236 heap_->SizeOfObjects() > kMinObjectSizeForReportingThroughput &&
1237 overall_marking_time > 0) {
1238 const double overall_v8_marking_time =
1239 overall_marking_time -
1240 current_.scopes[Scope::MC_MARK_EMBEDDER_TRACING];
1241 if (overall_v8_marking_time > 0) {
1242 const int main_thread_marking_throughput_mb_per_s =
1243 static_cast<int>(static_cast<double>(heap_->SizeOfObjects()) /
1244 overall_v8_marking_time * 1000 / 1024 / 1024);
1245 heap_->isolate()
1246 ->counters()
1247 ->gc_main_thread_marking_throughput()
1248 ->AddSample(
1249 static_cast<int>(main_thread_marking_throughput_mb_per_s));
1250 }
1251 }
1252
1253 DCHECK_EQ(Scope::LAST_TOP_MC_SCOPE, Scope::MC_SWEEP);
1254 } else if (gc_timer == counters->gc_scavenger()) {
1255 counters->gc_scavenger_scavenge_main()->AddSample(
1256 static_cast<int>(current_.scopes[Scope::SCAVENGER_SCAVENGE_PARALLEL]));
1257 counters->gc_scavenger_scavenge_roots()->AddSample(
1258 static_cast<int>(current_.scopes[Scope::SCAVENGER_SCAVENGE_ROOTS]));
1259 }
1260 }
1261
RecordGCSumCounters(double atomic_pause_duration)1262 void GCTracer::RecordGCSumCounters(double atomic_pause_duration) {
1263 base::MutexGuard guard(&background_counter_mutex_);
1264
1265 const double overall_duration =
1266 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_LAYOUT_CHANGE]
1267 .duration +
1268 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_START]
1269 .duration +
1270 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_SWEEPING]
1271 .duration +
1272 incremental_marking_duration_ +
1273 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE]
1274 .duration +
1275 atomic_pause_duration;
1276 const double background_duration =
1277 background_counter_[BackgroundScope::MC_BACKGROUND_EVACUATE_COPY]
1278 .total_duration_ms +
1279 background_counter_
1280 [BackgroundScope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS]
1281 .total_duration_ms +
1282 background_counter_[BackgroundScope::MC_BACKGROUND_MARKING]
1283 .total_duration_ms +
1284 background_counter_[BackgroundScope::MC_BACKGROUND_SWEEPING]
1285 .total_duration_ms;
1286
1287 const double marking_duration =
1288 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_LAYOUT_CHANGE]
1289 .duration +
1290 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_START]
1291 .duration +
1292 incremental_marking_duration_ +
1293 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE]
1294 .duration +
1295 current_.scopes[Scope::MC_MARK];
1296 const double marking_background_duration =
1297 background_counter_[BackgroundScope::MC_BACKGROUND_MARKING]
1298 .total_duration_ms;
1299
1300 // UMA.
1301 heap_->isolate()->counters()->gc_mark_compactor()->AddSample(
1302 static_cast<int>(overall_duration));
1303
1304 // Emit trace event counters.
1305 TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
1306 "V8.GCMarkCompactorSummary", TRACE_EVENT_SCOPE_THREAD,
1307 "duration", overall_duration, "background_duration",
1308 background_duration);
1309 TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
1310 "V8.GCMarkCompactorMarkingSummary",
1311 TRACE_EVENT_SCOPE_THREAD, "duration", marking_duration,
1312 "background_duration", marking_background_duration);
1313 }
1314
1315 } // namespace internal
1316 } // namespace v8
1317