1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/gc-tracer.h"
6
7 #include "src/counters.h"
8 #include "src/heap/heap-inl.h"
9 #include "src/isolate.h"
10
11 namespace v8 {
12 namespace internal {
13
CountTotalHolesSize(Heap * heap)14 static size_t CountTotalHolesSize(Heap* heap) {
15 size_t holes_size = 0;
16 OldSpaces spaces(heap);
17 for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) {
18 DCHECK_GE(holes_size + space->Waste() + space->Available(), holes_size);
19 holes_size += space->Waste() + space->Available();
20 }
21 return holes_size;
22 }
23
24
Scope(GCTracer * tracer,ScopeId scope)25 GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope)
26 : tracer_(tracer), scope_(scope) {
27 // All accesses to incremental_marking_scope assume that incremental marking
28 // scopes come first.
29 STATIC_ASSERT(FIRST_INCREMENTAL_SCOPE == 0);
30 start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs();
31 // TODO(cbruni): remove once we fully moved to a trace-based system.
32 if (V8_UNLIKELY(FLAG_runtime_stats)) {
33 RuntimeCallStats::Enter(
34 tracer_->heap_->isolate()->counters()->runtime_call_stats(), &timer_,
35 &RuntimeCallStats::GC);
36 }
37 }
38
~Scope()39 GCTracer::Scope::~Scope() {
40 tracer_->AddScopeSample(
41 scope_, tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_);
42 // TODO(cbruni): remove once we fully moved to a trace-based system.
43 if (V8_UNLIKELY(FLAG_runtime_stats)) {
44 RuntimeCallStats::Leave(
45 tracer_->heap_->isolate()->counters()->runtime_call_stats(), &timer_);
46 }
47 }
48
Name(ScopeId id)49 const char* GCTracer::Scope::Name(ScopeId id) {
50 #define CASE(scope) \
51 case Scope::scope: \
52 return "V8.GC_" #scope;
53 switch (id) {
54 TRACER_SCOPES(CASE)
55 case Scope::NUMBER_OF_SCOPES:
56 break;
57 }
58 #undef CASE
59 return "(unknown)";
60 }
61
Event(Type type,GarbageCollectionReason gc_reason,const char * collector_reason)62 GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason,
63 const char* collector_reason)
64 : type(type),
65 gc_reason(gc_reason),
66 collector_reason(collector_reason),
67 start_time(0.0),
68 end_time(0.0),
69 reduce_memory(false),
70 start_object_size(0),
71 end_object_size(0),
72 start_memory_size(0),
73 end_memory_size(0),
74 start_holes_size(0),
75 end_holes_size(0),
76 new_space_object_size(0),
77 survived_new_space_object_size(0),
78 incremental_marking_bytes(0),
79 incremental_marking_duration(0.0) {
80 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
81 scopes[i] = 0;
82 }
83 }
84
TypeName(bool short_name) const85 const char* GCTracer::Event::TypeName(bool short_name) const {
86 switch (type) {
87 case SCAVENGER:
88 return (short_name) ? "s" : "Scavenge";
89 case MARK_COMPACTOR:
90 case INCREMENTAL_MARK_COMPACTOR:
91 return (short_name) ? "ms" : "Mark-sweep";
92 case MINOR_MARK_COMPACTOR:
93 return (short_name) ? "mmc" : "Minor Mark-Compact";
94 case START:
95 return (short_name) ? "st" : "Start";
96 }
97 return "Unknown Event Type";
98 }
99
GCTracer(Heap * heap)100 GCTracer::GCTracer(Heap* heap)
101 : heap_(heap),
102 current_(Event::START, GarbageCollectionReason::kUnknown, nullptr),
103 previous_(current_),
104 incremental_marking_bytes_(0),
105 incremental_marking_duration_(0.0),
106 incremental_marking_start_time_(0.0),
107 recorded_incremental_marking_speed_(0.0),
108 allocation_time_ms_(0.0),
109 new_space_allocation_counter_bytes_(0),
110 old_generation_allocation_counter_bytes_(0),
111 allocation_duration_since_gc_(0.0),
112 new_space_allocation_in_bytes_since_gc_(0),
113 old_generation_allocation_in_bytes_since_gc_(0),
114 combined_mark_compact_speed_cache_(0.0),
115 start_counter_(0) {
116 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
117 }
118
ResetForTesting()119 void GCTracer::ResetForTesting() {
120 current_ = Event(Event::START, GarbageCollectionReason::kTesting, nullptr);
121 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
122 previous_ = current_;
123 ResetIncrementalMarkingCounters();
124 allocation_time_ms_ = 0.0;
125 new_space_allocation_counter_bytes_ = 0.0;
126 old_generation_allocation_counter_bytes_ = 0.0;
127 allocation_duration_since_gc_ = 0.0;
128 new_space_allocation_in_bytes_since_gc_ = 0.0;
129 old_generation_allocation_in_bytes_since_gc_ = 0.0;
130 combined_mark_compact_speed_cache_ = 0.0;
131 recorded_minor_gcs_total_.Reset();
132 recorded_minor_gcs_survived_.Reset();
133 recorded_compactions_.Reset();
134 recorded_mark_compacts_.Reset();
135 recorded_incremental_mark_compacts_.Reset();
136 recorded_new_generation_allocations_.Reset();
137 recorded_old_generation_allocations_.Reset();
138 recorded_context_disposal_times_.Reset();
139 recorded_survival_ratios_.Reset();
140 start_counter_ = 0;
141 }
142
NotifyYoungGenerationHandling(YoungGenerationHandling young_generation_handling)143 void GCTracer::NotifyYoungGenerationHandling(
144 YoungGenerationHandling young_generation_handling) {
145 DCHECK(current_.type == Event::SCAVENGER || start_counter_ > 1);
146 heap_->isolate()->counters()->young_generation_handling()->AddSample(
147 static_cast<int>(young_generation_handling));
148 }
149
Start(GarbageCollector collector,GarbageCollectionReason gc_reason,const char * collector_reason)150 void GCTracer::Start(GarbageCollector collector,
151 GarbageCollectionReason gc_reason,
152 const char* collector_reason) {
153 start_counter_++;
154 if (start_counter_ != 1) return;
155
156 previous_ = current_;
157 double start_time = heap_->MonotonicallyIncreasingTimeInMs();
158 SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(),
159 heap_->OldGenerationAllocationCounter());
160
161 switch (collector) {
162 case SCAVENGER:
163 current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
164 break;
165 case MINOR_MARK_COMPACTOR:
166 current_ =
167 Event(Event::MINOR_MARK_COMPACTOR, gc_reason, collector_reason);
168 break;
169 case MARK_COMPACTOR:
170 if (heap_->incremental_marking()->WasActivated()) {
171 current_ = Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason,
172 collector_reason);
173 } else {
174 current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
175 }
176 break;
177 }
178
179 current_.reduce_memory = heap_->ShouldReduceMemory();
180 current_.start_time = start_time;
181 current_.start_object_size = heap_->SizeOfObjects();
182 current_.start_memory_size = heap_->memory_allocator()->Size();
183 current_.start_holes_size = CountTotalHolesSize(heap_);
184 current_.new_space_object_size = heap_->new_space()->Size();
185
186 current_.incremental_marking_bytes = 0;
187 current_.incremental_marking_duration = 0;
188
189 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
190 current_.scopes[i] = 0;
191 }
192
193 size_t committed_memory = heap_->CommittedMemory() / KB;
194 size_t used_memory = current_.start_object_size / KB;
195
196 Counters* counters = heap_->isolate()->counters();
197
198 if (Heap::IsYoungGenerationCollector(collector)) {
199 counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason));
200 } else {
201 counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason));
202 }
203 counters->aggregated_memory_heap_committed()->AddSample(start_time,
204 committed_memory);
205 counters->aggregated_memory_heap_used()->AddSample(start_time, used_memory);
206 // TODO(cbruni): remove once we fully moved to a trace-based system.
207 if (V8_UNLIKELY(FLAG_runtime_stats)) {
208 RuntimeCallStats::Enter(heap_->isolate()->counters()->runtime_call_stats(),
209 &timer_, &RuntimeCallStats::GC);
210 }
211 }
212
ResetIncrementalMarkingCounters()213 void GCTracer::ResetIncrementalMarkingCounters() {
214 incremental_marking_bytes_ = 0;
215 incremental_marking_duration_ = 0;
216 for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
217 incremental_marking_scopes_[i].ResetCurrentCycle();
218 }
219 }
220
Stop(GarbageCollector collector)221 void GCTracer::Stop(GarbageCollector collector) {
222 start_counter_--;
223 if (start_counter_ != 0) {
224 heap_->isolate()->PrintWithTimestamp("[Finished reentrant %s during %s.]\n",
225 Heap::CollectorName(collector),
226 current_.TypeName(false));
227 return;
228 }
229
230 DCHECK(start_counter_ >= 0);
231 DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) ||
232 (collector == MINOR_MARK_COMPACTOR &&
233 current_.type == Event::MINOR_MARK_COMPACTOR) ||
234 (collector == MARK_COMPACTOR &&
235 (current_.type == Event::MARK_COMPACTOR ||
236 current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
237
238 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
239 current_.end_object_size = heap_->SizeOfObjects();
240 current_.end_memory_size = heap_->memory_allocator()->Size();
241 current_.end_holes_size = CountTotalHolesSize(heap_);
242 current_.survived_new_space_object_size = heap_->SurvivedNewSpaceObjectSize();
243
244 AddAllocation(current_.end_time);
245
246 size_t committed_memory = heap_->CommittedMemory() / KB;
247 size_t used_memory = current_.end_object_size / KB;
248 heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample(
249 current_.end_time, committed_memory);
250 heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample(
251 current_.end_time, used_memory);
252
253 double duration = current_.end_time - current_.start_time;
254
255 switch (current_.type) {
256 case Event::SCAVENGER:
257 case Event::MINOR_MARK_COMPACTOR:
258 recorded_minor_gcs_total_.Push(
259 MakeBytesAndDuration(current_.new_space_object_size, duration));
260 recorded_minor_gcs_survived_.Push(MakeBytesAndDuration(
261 current_.survived_new_space_object_size, duration));
262 break;
263 case Event::INCREMENTAL_MARK_COMPACTOR:
264 current_.incremental_marking_bytes = incremental_marking_bytes_;
265 current_.incremental_marking_duration = incremental_marking_duration_;
266 for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
267 current_.incremental_marking_scopes[i] = incremental_marking_scopes_[i];
268 current_.scopes[i] = incremental_marking_scopes_[i].duration;
269 }
270 RecordIncrementalMarkingSpeed(current_.incremental_marking_bytes,
271 current_.incremental_marking_duration);
272 recorded_incremental_mark_compacts_.Push(
273 MakeBytesAndDuration(current_.start_object_size, duration));
274 ResetIncrementalMarkingCounters();
275 combined_mark_compact_speed_cache_ = 0.0;
276 break;
277 case Event::MARK_COMPACTOR:
278 DCHECK_EQ(0u, current_.incremental_marking_bytes);
279 DCHECK_EQ(0, current_.incremental_marking_duration);
280 recorded_mark_compacts_.Push(
281 MakeBytesAndDuration(current_.start_object_size, duration));
282 ResetIncrementalMarkingCounters();
283 combined_mark_compact_speed_cache_ = 0.0;
284 break;
285 case Event::START:
286 UNREACHABLE();
287 }
288
289 heap_->UpdateTotalGCTime(duration);
290
291 if ((current_.type == Event::SCAVENGER ||
292 current_.type == Event::MINOR_MARK_COMPACTOR) &&
293 FLAG_trace_gc_ignore_scavenger)
294 return;
295
296 if (FLAG_trace_gc_nvp) {
297 PrintNVP();
298 } else {
299 Print();
300 }
301
302 if (FLAG_trace_gc) {
303 heap_->PrintShortHeapStatistics();
304 }
305
306 // TODO(cbruni): remove once we fully moved to a trace-based system.
307 if (V8_UNLIKELY(FLAG_runtime_stats)) {
308 RuntimeCallStats::Leave(heap_->isolate()->counters()->runtime_call_stats(),
309 &timer_);
310 }
311 }
312
313
SampleAllocation(double current_ms,size_t new_space_counter_bytes,size_t old_generation_counter_bytes)314 void GCTracer::SampleAllocation(double current_ms,
315 size_t new_space_counter_bytes,
316 size_t old_generation_counter_bytes) {
317 if (allocation_time_ms_ == 0) {
318 // It is the first sample.
319 allocation_time_ms_ = current_ms;
320 new_space_allocation_counter_bytes_ = new_space_counter_bytes;
321 old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
322 return;
323 }
324 // This assumes that counters are unsigned integers so that the subtraction
325 // below works even if the new counter is less than the old counter.
326 size_t new_space_allocated_bytes =
327 new_space_counter_bytes - new_space_allocation_counter_bytes_;
328 size_t old_generation_allocated_bytes =
329 old_generation_counter_bytes - old_generation_allocation_counter_bytes_;
330 double duration = current_ms - allocation_time_ms_;
331 allocation_time_ms_ = current_ms;
332 new_space_allocation_counter_bytes_ = new_space_counter_bytes;
333 old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
334 allocation_duration_since_gc_ += duration;
335 new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes;
336 old_generation_allocation_in_bytes_since_gc_ +=
337 old_generation_allocated_bytes;
338 }
339
340
AddAllocation(double current_ms)341 void GCTracer::AddAllocation(double current_ms) {
342 allocation_time_ms_ = current_ms;
343 if (allocation_duration_since_gc_ > 0) {
344 recorded_new_generation_allocations_.Push(
345 MakeBytesAndDuration(new_space_allocation_in_bytes_since_gc_,
346 allocation_duration_since_gc_));
347 recorded_old_generation_allocations_.Push(
348 MakeBytesAndDuration(old_generation_allocation_in_bytes_since_gc_,
349 allocation_duration_since_gc_));
350 }
351 allocation_duration_since_gc_ = 0;
352 new_space_allocation_in_bytes_since_gc_ = 0;
353 old_generation_allocation_in_bytes_since_gc_ = 0;
354 }
355
356
AddContextDisposalTime(double time)357 void GCTracer::AddContextDisposalTime(double time) {
358 recorded_context_disposal_times_.Push(time);
359 }
360
AddCompactionEvent(double duration,size_t live_bytes_compacted)361 void GCTracer::AddCompactionEvent(double duration,
362 size_t live_bytes_compacted) {
363 recorded_compactions_.Push(
364 MakeBytesAndDuration(live_bytes_compacted, duration));
365 }
366
367
AddSurvivalRatio(double promotion_ratio)368 void GCTracer::AddSurvivalRatio(double promotion_ratio) {
369 recorded_survival_ratios_.Push(promotion_ratio);
370 }
371
AddIncrementalMarkingStep(double duration,size_t bytes)372 void GCTracer::AddIncrementalMarkingStep(double duration, size_t bytes) {
373 if (bytes > 0) {
374 incremental_marking_bytes_ += bytes;
375 incremental_marking_duration_ += duration;
376 }
377 }
378
Output(const char * format,...) const379 void GCTracer::Output(const char* format, ...) const {
380 if (FLAG_trace_gc) {
381 va_list arguments;
382 va_start(arguments, format);
383 base::OS::VPrint(format, arguments);
384 va_end(arguments);
385 }
386
387 const int kBufferSize = 256;
388 char raw_buffer[kBufferSize];
389 Vector<char> buffer(raw_buffer, kBufferSize);
390 va_list arguments2;
391 va_start(arguments2, format);
392 VSNPrintF(buffer, format, arguments2);
393 va_end(arguments2);
394
395 heap_->AddToRingBuffer(buffer.start());
396 }
397
Print() const398 void GCTracer::Print() const {
399 double duration = current_.end_time - current_.start_time;
400 const size_t kIncrementalStatsSize = 128;
401 char incremental_buffer[kIncrementalStatsSize] = {0};
402
403 if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
404 base::OS::SNPrintF(
405 incremental_buffer, kIncrementalStatsSize,
406 " (+ %.1f ms in %d steps since start of marking, "
407 "biggest step %.1f ms, walltime since start of marking %.f ms)",
408 current_.scopes[Scope::MC_INCREMENTAL],
409 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
410 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].longest_step,
411 current_.end_time - incremental_marking_start_time_);
412 }
413
414 // Avoid PrintF as Output also appends the string to the tracing ring buffer
415 // that gets printed on OOM failures.
416 Output(
417 "[%d:%p] "
418 "%8.0f ms: "
419 "%s %.1f (%.1f) -> %.1f (%.1f) MB, "
420 "%.1f / %.1f ms %s %s %s\n",
421 base::OS::GetCurrentProcessId(),
422 reinterpret_cast<void*>(heap_->isolate()),
423 heap_->isolate()->time_millis_since_init(), current_.TypeName(false),
424 static_cast<double>(current_.start_object_size) / MB,
425 static_cast<double>(current_.start_memory_size) / MB,
426 static_cast<double>(current_.end_object_size) / MB,
427 static_cast<double>(current_.end_memory_size) / MB, duration,
428 TotalExternalTime(), incremental_buffer,
429 Heap::GarbageCollectionReasonToString(current_.gc_reason),
430 current_.collector_reason != nullptr ? current_.collector_reason : "");
431 }
432
433
PrintNVP() const434 void GCTracer::PrintNVP() const {
435 double duration = current_.end_time - current_.start_time;
436 double spent_in_mutator = current_.start_time - previous_.end_time;
437 size_t allocated_since_last_gc =
438 current_.start_object_size - previous_.end_object_size;
439
440 double incremental_walltime_duration = 0;
441
442 if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
443 incremental_walltime_duration =
444 current_.end_time - incremental_marking_start_time_;
445 }
446
447 switch (current_.type) {
448 case Event::SCAVENGER:
449 heap_->isolate()->PrintWithTimestamp(
450 "pause=%.1f "
451 "mutator=%.1f "
452 "gc=%s "
453 "reduce_memory=%d "
454 "scavenge=%.2f "
455 "evacuate=%.2f "
456 "old_new=%.2f "
457 "weak=%.2f "
458 "roots=%.2f "
459 "code=%.2f "
460 "semispace=%.2f "
461 "external.prologue=%.2f "
462 "external.epilogue=%.2f "
463 "external_weak_global_handles=%.2f "
464 "steps_count=%d "
465 "steps_took=%.1f "
466 "scavenge_throughput=%.f "
467 "total_size_before=%" PRIuS
468 " "
469 "total_size_after=%" PRIuS
470 " "
471 "holes_size_before=%" PRIuS
472 " "
473 "holes_size_after=%" PRIuS
474 " "
475 "allocated=%" PRIuS
476 " "
477 "promoted=%" PRIuS
478 " "
479 "semi_space_copied=%" PRIuS
480 " "
481 "nodes_died_in_new=%d "
482 "nodes_copied_in_new=%d "
483 "nodes_promoted=%d "
484 "promotion_ratio=%.1f%% "
485 "average_survival_ratio=%.1f%% "
486 "promotion_rate=%.1f%% "
487 "semi_space_copy_rate=%.1f%% "
488 "new_space_allocation_throughput=%.1f "
489 "context_disposal_rate=%.1f\n",
490 duration, spent_in_mutator, current_.TypeName(true),
491 current_.reduce_memory, current_.scopes[Scope::SCAVENGER_SCAVENGE],
492 current_.scopes[Scope::SCAVENGER_EVACUATE],
493 current_.scopes[Scope::SCAVENGER_OLD_TO_NEW_POINTERS],
494 current_.scopes[Scope::SCAVENGER_WEAK],
495 current_.scopes[Scope::SCAVENGER_ROOTS],
496 current_.scopes[Scope::SCAVENGER_CODE_FLUSH_CANDIDATES],
497 current_.scopes[Scope::SCAVENGER_SEMISPACE],
498 current_.scopes[Scope::EXTERNAL_PROLOGUE],
499 current_.scopes[Scope::EXTERNAL_EPILOGUE],
500 current_.scopes[Scope::EXTERNAL_WEAK_GLOBAL_HANDLES],
501 current_.incremental_marking_scopes[GCTracer::Scope::MC_INCREMENTAL]
502 .steps,
503 current_.scopes[Scope::MC_INCREMENTAL],
504 ScavengeSpeedInBytesPerMillisecond(), current_.start_object_size,
505 current_.end_object_size, current_.start_holes_size,
506 current_.end_holes_size, allocated_since_last_gc,
507 heap_->promoted_objects_size(),
508 heap_->semi_space_copied_object_size(),
509 heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
510 heap_->nodes_promoted_, heap_->promotion_ratio_,
511 AverageSurvivalRatio(), heap_->promotion_rate_,
512 heap_->semi_space_copied_rate_,
513 NewSpaceAllocationThroughputInBytesPerMillisecond(),
514 ContextDisposalRateInMilliseconds());
515 break;
516 case Event::MINOR_MARK_COMPACTOR:
517 heap_->isolate()->PrintWithTimestamp(
518 "pause=%.1f "
519 "mutator=%.1f "
520 "gc=%s "
521 "reduce_memory=%d "
522 "mark=%.2f "
523 "mark.roots=%.2f "
524 "mark.old_to_new=%.2f\n",
525 duration, spent_in_mutator, "mmc", current_.reduce_memory,
526 current_.scopes[Scope::MINOR_MC_MARK],
527 current_.scopes[Scope::MINOR_MC_MARK_ROOTS],
528 current_.scopes[Scope::MINOR_MC_MARK_OLD_TO_NEW_POINTERS]);
529 break;
530 case Event::MARK_COMPACTOR:
531 case Event::INCREMENTAL_MARK_COMPACTOR:
532 heap_->isolate()->PrintWithTimestamp(
533 "pause=%.1f "
534 "mutator=%.1f "
535 "gc=%s "
536 "reduce_memory=%d "
537 "clear=%1.f "
538 "clear.code_flush=%.1f "
539 "clear.dependent_code=%.1f "
540 "clear.global_handles=%.1f "
541 "clear.maps=%.1f "
542 "clear.slots_buffer=%.1f "
543 "clear.store_buffer=%.1f "
544 "clear.string_table=%.1f "
545 "clear.weak_cells=%.1f "
546 "clear.weak_collections=%.1f "
547 "clear.weak_lists=%.1f "
548 "epilogue=%.1f "
549 "evacuate=%.1f "
550 "evacuate.candidates=%.1f "
551 "evacuate.clean_up=%.1f "
552 "evacuate.copy=%.1f "
553 "evacuate.prologue=%.1f "
554 "evacuate.epilogue=%.1f "
555 "evacuate.rebalance=%.1f "
556 "evacuate.update_pointers=%.1f "
557 "evacuate.update_pointers.to_evacuated=%.1f "
558 "evacuate.update_pointers.to_new=%.1f "
559 "evacuate.update_pointers.weak=%.1f "
560 "external.prologue=%.1f "
561 "external.epilogue=%.1f "
562 "external.weak_global_handles=%.1f "
563 "finish=%.1f "
564 "mark=%.1f "
565 "mark.finish_incremental=%.1f "
566 "mark.object_grouping=%.1f "
567 "mark.prepare_code_flush=%.1f "
568 "mark.roots=%.1f "
569 "mark.weak_closure=%.1f "
570 "mark.weak_closure.ephemeral=%.1f "
571 "mark.weak_closure.weak_handles=%.1f "
572 "mark.weak_closure.weak_roots=%.1f "
573 "mark.weak_closure.harmony=%.1f "
574 "mark.wrapper_prologue=%.1f "
575 "mark.wrapper_epilogue=%.1f "
576 "mark.wrapper_tracing=%.1f "
577 "prologue=%.1f "
578 "sweep=%.1f "
579 "sweep.code=%.1f "
580 "sweep.map=%.1f "
581 "sweep.old=%.1f "
582 "incremental=%.1f "
583 "incremental.finalize=%.1f "
584 "incremental.finalize.body=%.1f "
585 "incremental.finalize.external.prologue=%.1f "
586 "incremental.finalize.external.epilogue=%.1f "
587 "incremental.finalize.object_grouping=%.1f "
588 "incremental.sweeping=%.1f "
589 "incremental.wrapper_prologue=%.1f "
590 "incremental.wrapper_tracing=%.1f "
591 "incremental_wrapper_tracing_longest_step=%.1f "
592 "incremental_finalize_longest_step=%.1f "
593 "incremental_finalize_steps_count=%d "
594 "incremental_longest_step=%.1f "
595 "incremental_steps_count=%d "
596 "incremental_marking_throughput=%.f "
597 "incremental_walltime_duration=%.f "
598 "total_size_before=%" PRIuS
599 " "
600 "total_size_after=%" PRIuS
601 " "
602 "holes_size_before=%" PRIuS
603 " "
604 "holes_size_after=%" PRIuS
605 " "
606 "allocated=%" PRIuS
607 " "
608 "promoted=%" PRIuS
609 " "
610 "semi_space_copied=%" PRIuS
611 " "
612 "nodes_died_in_new=%d "
613 "nodes_copied_in_new=%d "
614 "nodes_promoted=%d "
615 "promotion_ratio=%.1f%% "
616 "average_survival_ratio=%.1f%% "
617 "promotion_rate=%.1f%% "
618 "semi_space_copy_rate=%.1f%% "
619 "new_space_allocation_throughput=%.1f "
620 "context_disposal_rate=%.1f "
621 "compaction_speed=%.f\n",
622 duration, spent_in_mutator, current_.TypeName(true),
623 current_.reduce_memory, current_.scopes[Scope::MC_CLEAR],
624 current_.scopes[Scope::MC_CLEAR_CODE_FLUSH],
625 current_.scopes[Scope::MC_CLEAR_DEPENDENT_CODE],
626 current_.scopes[Scope::MC_CLEAR_GLOBAL_HANDLES],
627 current_.scopes[Scope::MC_CLEAR_MAPS],
628 current_.scopes[Scope::MC_CLEAR_SLOTS_BUFFER],
629 current_.scopes[Scope::MC_CLEAR_STORE_BUFFER],
630 current_.scopes[Scope::MC_CLEAR_STRING_TABLE],
631 current_.scopes[Scope::MC_CLEAR_WEAK_CELLS],
632 current_.scopes[Scope::MC_CLEAR_WEAK_COLLECTIONS],
633 current_.scopes[Scope::MC_CLEAR_WEAK_LISTS],
634 current_.scopes[Scope::MC_EPILOGUE],
635 current_.scopes[Scope::MC_EVACUATE],
636 current_.scopes[Scope::MC_EVACUATE_CANDIDATES],
637 current_.scopes[Scope::MC_EVACUATE_CLEAN_UP],
638 current_.scopes[Scope::MC_EVACUATE_COPY],
639 current_.scopes[Scope::MC_EVACUATE_PROLOGUE],
640 current_.scopes[Scope::MC_EVACUATE_EPILOGUE],
641 current_.scopes[Scope::MC_EVACUATE_REBALANCE],
642 current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS],
643 current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED],
644 current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW],
645 current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK],
646 current_.scopes[Scope::EXTERNAL_PROLOGUE],
647 current_.scopes[Scope::EXTERNAL_EPILOGUE],
648 current_.scopes[Scope::EXTERNAL_WEAK_GLOBAL_HANDLES],
649 current_.scopes[Scope::MC_FINISH], current_.scopes[Scope::MC_MARK],
650 current_.scopes[Scope::MC_MARK_FINISH_INCREMENTAL],
651 current_.scopes[Scope::MC_MARK_OBJECT_GROUPING],
652 current_.scopes[Scope::MC_MARK_PREPARE_CODE_FLUSH],
653 current_.scopes[Scope::MC_MARK_ROOTS],
654 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE],
655 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_EPHEMERAL],
656 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_HANDLES],
657 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_WEAK_ROOTS],
658 current_.scopes[Scope::MC_MARK_WEAK_CLOSURE_HARMONY],
659 current_.scopes[Scope::MC_MARK_WRAPPER_PROLOGUE],
660 current_.scopes[Scope::MC_MARK_WRAPPER_EPILOGUE],
661 current_.scopes[Scope::MC_MARK_WRAPPER_TRACING],
662 current_.scopes[Scope::MC_PROLOGUE], current_.scopes[Scope::MC_SWEEP],
663 current_.scopes[Scope::MC_SWEEP_CODE],
664 current_.scopes[Scope::MC_SWEEP_MAP],
665 current_.scopes[Scope::MC_SWEEP_OLD],
666 current_.scopes[Scope::MC_INCREMENTAL],
667 current_.scopes[Scope::MC_INCREMENTAL_FINALIZE],
668 current_.scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY],
669 current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE],
670 current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE],
671 current_.scopes[Scope::MC_INCREMENTAL_FINALIZE_OBJECT_GROUPING],
672 current_.scopes[Scope::MC_INCREMENTAL_SWEEPING],
673 current_.scopes[Scope::MC_INCREMENTAL_WRAPPER_PROLOGUE],
674 current_.scopes[Scope::MC_INCREMENTAL_WRAPPER_TRACING],
675 current_
676 .incremental_marking_scopes[Scope::MC_INCREMENTAL_WRAPPER_TRACING]
677 .longest_step,
678 current_
679 .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
680 .longest_step,
681 current_
682 .incremental_marking_scopes[Scope::MC_INCREMENTAL_FINALIZE_BODY]
683 .steps,
684 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL]
685 .longest_step,
686 current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
687 IncrementalMarkingSpeedInBytesPerMillisecond(),
688 incremental_walltime_duration, current_.start_object_size,
689 current_.end_object_size, current_.start_holes_size,
690 current_.end_holes_size, allocated_since_last_gc,
691 heap_->promoted_objects_size(),
692 heap_->semi_space_copied_object_size(),
693 heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
694 heap_->nodes_promoted_, heap_->promotion_ratio_,
695 AverageSurvivalRatio(), heap_->promotion_rate_,
696 heap_->semi_space_copied_rate_,
697 NewSpaceAllocationThroughputInBytesPerMillisecond(),
698 ContextDisposalRateInMilliseconds(),
699 CompactionSpeedInBytesPerMillisecond());
700 break;
701 case Event::START:
702 break;
703 default:
704 UNREACHABLE();
705 }
706 }
707
AverageSpeed(const base::RingBuffer<BytesAndDuration> & buffer,const BytesAndDuration & initial,double time_ms)708 double GCTracer::AverageSpeed(const base::RingBuffer<BytesAndDuration>& buffer,
709 const BytesAndDuration& initial, double time_ms) {
710 BytesAndDuration sum = buffer.Sum(
711 [time_ms](BytesAndDuration a, BytesAndDuration b) {
712 if (time_ms != 0 && a.second >= time_ms) return a;
713 return std::make_pair(a.first + b.first, a.second + b.second);
714 },
715 initial);
716 uint64_t bytes = sum.first;
717 double durations = sum.second;
718 if (durations == 0.0) return 0;
719 double speed = bytes / durations;
720 const int max_speed = 1024 * MB;
721 const int min_speed = 1;
722 if (speed >= max_speed) return max_speed;
723 if (speed <= min_speed) return min_speed;
724 return speed;
725 }
726
AverageSpeed(const base::RingBuffer<BytesAndDuration> & buffer)727 double GCTracer::AverageSpeed(
728 const base::RingBuffer<BytesAndDuration>& buffer) {
729 return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0);
730 }
731
RecordIncrementalMarkingSpeed(size_t bytes,double duration)732 void GCTracer::RecordIncrementalMarkingSpeed(size_t bytes, double duration) {
733 if (duration == 0 || bytes == 0) return;
734 double current_speed = bytes / duration;
735 if (recorded_incremental_marking_speed_ == 0) {
736 recorded_incremental_marking_speed_ = current_speed;
737 } else {
738 recorded_incremental_marking_speed_ =
739 (recorded_incremental_marking_speed_ + current_speed) / 2;
740 }
741 }
742
IncrementalMarkingSpeedInBytesPerMillisecond() const743 double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
744 const int kConservativeSpeedInBytesPerMillisecond = 128 * KB;
745 if (recorded_incremental_marking_speed_ != 0) {
746 return recorded_incremental_marking_speed_;
747 }
748 if (incremental_marking_duration_ != 0.0) {
749 return incremental_marking_bytes_ / incremental_marking_duration_;
750 }
751 return kConservativeSpeedInBytesPerMillisecond;
752 }
753
ScavengeSpeedInBytesPerMillisecond(ScavengeSpeedMode mode) const754 double GCTracer::ScavengeSpeedInBytesPerMillisecond(
755 ScavengeSpeedMode mode) const {
756 if (mode == kForAllObjects) {
757 return AverageSpeed(recorded_minor_gcs_total_);
758 } else {
759 return AverageSpeed(recorded_minor_gcs_survived_);
760 }
761 }
762
CompactionSpeedInBytesPerMillisecond() const763 double GCTracer::CompactionSpeedInBytesPerMillisecond() const {
764 return AverageSpeed(recorded_compactions_);
765 }
766
MarkCompactSpeedInBytesPerMillisecond() const767 double GCTracer::MarkCompactSpeedInBytesPerMillisecond() const {
768 return AverageSpeed(recorded_mark_compacts_);
769 }
770
FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const771 double GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const {
772 return AverageSpeed(recorded_incremental_mark_compacts_);
773 }
774
CombinedMarkCompactSpeedInBytesPerMillisecond()775 double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() {
776 if (combined_mark_compact_speed_cache_ > 0)
777 return combined_mark_compact_speed_cache_;
778 const double kMinimumMarkingSpeed = 0.5;
779 double speed1 = IncrementalMarkingSpeedInBytesPerMillisecond();
780 double speed2 = FinalIncrementalMarkCompactSpeedInBytesPerMillisecond();
781 if (speed1 < kMinimumMarkingSpeed || speed2 < kMinimumMarkingSpeed) {
782 // No data for the incremental marking speed.
783 // Return the non-incremental mark-compact speed.
784 combined_mark_compact_speed_cache_ =
785 MarkCompactSpeedInBytesPerMillisecond();
786 } else {
787 // Combine the speed of incremental step and the speed of the final step.
788 // 1 / (1 / speed1 + 1 / speed2) = speed1 * speed2 / (speed1 + speed2).
789 combined_mark_compact_speed_cache_ = speed1 * speed2 / (speed1 + speed2);
790 }
791 return combined_mark_compact_speed_cache_;
792 }
793
NewSpaceAllocationThroughputInBytesPerMillisecond(double time_ms) const794 double GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond(
795 double time_ms) const {
796 size_t bytes = new_space_allocation_in_bytes_since_gc_;
797 double durations = allocation_duration_since_gc_;
798 return AverageSpeed(recorded_new_generation_allocations_,
799 MakeBytesAndDuration(bytes, durations), time_ms);
800 }
801
OldGenerationAllocationThroughputInBytesPerMillisecond(double time_ms) const802 double GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond(
803 double time_ms) const {
804 size_t bytes = old_generation_allocation_in_bytes_since_gc_;
805 double durations = allocation_duration_since_gc_;
806 return AverageSpeed(recorded_old_generation_allocations_,
807 MakeBytesAndDuration(bytes, durations), time_ms);
808 }
809
AllocationThroughputInBytesPerMillisecond(double time_ms) const810 double GCTracer::AllocationThroughputInBytesPerMillisecond(
811 double time_ms) const {
812 return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) +
813 OldGenerationAllocationThroughputInBytesPerMillisecond(time_ms);
814 }
815
CurrentAllocationThroughputInBytesPerMillisecond() const816 double GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const {
817 return AllocationThroughputInBytesPerMillisecond(kThroughputTimeFrameMs);
818 }
819
CurrentOldGenerationAllocationThroughputInBytesPerMillisecond() const820 double GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond()
821 const {
822 return OldGenerationAllocationThroughputInBytesPerMillisecond(
823 kThroughputTimeFrameMs);
824 }
825
ContextDisposalRateInMilliseconds() const826 double GCTracer::ContextDisposalRateInMilliseconds() const {
827 if (recorded_context_disposal_times_.Count() <
828 recorded_context_disposal_times_.kSize)
829 return 0.0;
830 double begin = heap_->MonotonicallyIncreasingTimeInMs();
831 double end = recorded_context_disposal_times_.Sum(
832 [](double a, double b) { return b; }, 0.0);
833 return (begin - end) / recorded_context_disposal_times_.Count();
834 }
835
AverageSurvivalRatio() const836 double GCTracer::AverageSurvivalRatio() const {
837 if (recorded_survival_ratios_.Count() == 0) return 0.0;
838 double sum = recorded_survival_ratios_.Sum(
839 [](double a, double b) { return a + b; }, 0.0);
840 return sum / recorded_survival_ratios_.Count();
841 }
842
SurvivalEventsRecorded() const843 bool GCTracer::SurvivalEventsRecorded() const {
844 return recorded_survival_ratios_.Count() > 0;
845 }
846
ResetSurvivalEvents()847 void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); }
848
NotifyIncrementalMarkingStart()849 void GCTracer::NotifyIncrementalMarkingStart() {
850 incremental_marking_start_time_ = heap_->MonotonicallyIncreasingTimeInMs();
851 }
852
853 } // namespace internal
854 } // namespace v8
855