1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_GC_TRACER_H_
6 #define V8_HEAP_GC_TRACER_H_
7
8 #include "src/base/compiler-specific.h"
9 #include "src/base/platform/platform.h"
10 #include "src/base/ring-buffer.h"
11 #include "src/counters.h"
12 #include "src/globals.h"
13 #include "src/heap-symbols.h"
14 #include "src/heap/heap.h"
15 #include "testing/gtest/include/gtest/gtest_prod.h" // nogncheck
16
17 namespace v8 {
18 namespace internal {
19
20 typedef std::pair<uint64_t, double> BytesAndDuration;
21
MakeBytesAndDuration(uint64_t bytes,double duration)22 inline BytesAndDuration MakeBytesAndDuration(uint64_t bytes, double duration) {
23 return std::make_pair(bytes, duration);
24 }
25
26 enum ScavengeSpeedMode { kForAllObjects, kForSurvivedObjects };
27
28 #define TRACE_GC(tracer, scope_id) \
29 GCTracer::Scope::ScopeId gc_tracer_scope_id(scope_id); \
30 GCTracer::Scope gc_tracer_scope(tracer, gc_tracer_scope_id); \
31 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"), \
32 GCTracer::Scope::Name(gc_tracer_scope_id))
33
34 #define TRACE_BACKGROUND_GC(tracer, scope_id) \
35 GCTracer::BackgroundScope background_scope(tracer, scope_id); \
36 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"), \
37 GCTracer::BackgroundScope::Name(scope_id))
38
39 // GCTracer collects and prints ONE line after each garbage collector
40 // invocation IFF --trace_gc is used.
41 class V8_EXPORT_PRIVATE GCTracer {
42 public:
43 struct IncrementalMarkingInfos {
IncrementalMarkingInfosIncrementalMarkingInfos44 IncrementalMarkingInfos() : duration(0), longest_step(0), steps(0) {}
45
UpdateIncrementalMarkingInfos46 void Update(double duration) {
47 steps++;
48 this->duration += duration;
49 if (duration > longest_step) {
50 longest_step = duration;
51 }
52 }
53
ResetCurrentCycleIncrementalMarkingInfos54 void ResetCurrentCycle() {
55 duration = 0;
56 longest_step = 0;
57 steps = 0;
58 }
59
60 double duration;
61 double longest_step;
62 int steps;
63 };
64
65 class Scope {
66 public:
67 enum ScopeId {
68 #define DEFINE_SCOPE(scope) scope,
69 TRACER_SCOPES(DEFINE_SCOPE) TRACER_BACKGROUND_SCOPES(DEFINE_SCOPE)
70 #undef DEFINE_SCOPE
71 NUMBER_OF_SCOPES,
72
73 FIRST_INCREMENTAL_SCOPE = MC_INCREMENTAL,
74 LAST_INCREMENTAL_SCOPE = MC_INCREMENTAL_EXTERNAL_PROLOGUE,
75 FIRST_SCOPE = MC_INCREMENTAL,
76 NUMBER_OF_INCREMENTAL_SCOPES =
77 LAST_INCREMENTAL_SCOPE - FIRST_INCREMENTAL_SCOPE + 1,
78 FIRST_GENERAL_BACKGROUND_SCOPE = BACKGROUND_ARRAY_BUFFER_FREE,
79 LAST_GENERAL_BACKGROUND_SCOPE = BACKGROUND_UNMAPPER,
80 FIRST_MC_BACKGROUND_SCOPE = MC_BACKGROUND_EVACUATE_COPY,
81 LAST_MC_BACKGROUND_SCOPE = MC_BACKGROUND_SWEEPING,
82 FIRST_TOP_MC_SCOPE = MC_CLEAR,
83 LAST_TOP_MC_SCOPE = MC_SWEEP,
84 FIRST_MINOR_GC_BACKGROUND_SCOPE = MINOR_MC_BACKGROUND_EVACUATE_COPY,
85 LAST_MINOR_GC_BACKGROUND_SCOPE = SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL
86 };
87
88 Scope(GCTracer* tracer, ScopeId scope);
89 ~Scope();
90 static const char* Name(ScopeId id);
91
92 private:
93 GCTracer* tracer_;
94 ScopeId scope_;
95 double start_time_;
96 RuntimeCallTimer timer_;
97 RuntimeCallStats* runtime_stats_ = nullptr;
98
99 DISALLOW_COPY_AND_ASSIGN(Scope);
100 };
101
102 class V8_EXPORT_PRIVATE BackgroundScope {
103 public:
104 enum ScopeId {
105 #define DEFINE_SCOPE(scope) scope,
106 TRACER_BACKGROUND_SCOPES(DEFINE_SCOPE)
107 #undef DEFINE_SCOPE
108 NUMBER_OF_SCOPES,
109 FIRST_GENERAL_BACKGROUND_SCOPE = BACKGROUND_ARRAY_BUFFER_FREE,
110 LAST_GENERAL_BACKGROUND_SCOPE = BACKGROUND_UNMAPPER,
111 FIRST_MC_BACKGROUND_SCOPE = MC_BACKGROUND_EVACUATE_COPY,
112 LAST_MC_BACKGROUND_SCOPE = MC_BACKGROUND_SWEEPING,
113 FIRST_MINOR_GC_BACKGROUND_SCOPE = MINOR_MC_BACKGROUND_EVACUATE_COPY,
114 LAST_MINOR_GC_BACKGROUND_SCOPE = SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL
115 };
116 BackgroundScope(GCTracer* tracer, ScopeId scope);
117 ~BackgroundScope();
118
119 static const char* Name(ScopeId id);
120
121 private:
122 GCTracer* tracer_;
123 ScopeId scope_;
124 double start_time_;
125 RuntimeCallTimer timer_;
126 RuntimeCallCounter counter_;
127 bool runtime_stats_enabled_;
128 DISALLOW_COPY_AND_ASSIGN(BackgroundScope);
129 };
130
131 class Event {
132 public:
133 enum Type {
134 SCAVENGER = 0,
135 MARK_COMPACTOR = 1,
136 INCREMENTAL_MARK_COMPACTOR = 2,
137 MINOR_MARK_COMPACTOR = 3,
138 START = 4
139 };
140
141 Event(Type type, GarbageCollectionReason gc_reason,
142 const char* collector_reason);
143
144 // Returns a string describing the event type.
145 const char* TypeName(bool short_name) const;
146
147 // Type of event
148 Type type;
149
150 GarbageCollectionReason gc_reason;
151 const char* collector_reason;
152
153 // Timestamp set in the constructor.
154 double start_time;
155
156 // Timestamp set in the destructor.
157 double end_time;
158
159 // Memory reduction flag set.
160 bool reduce_memory;
161
162 // Size of objects in heap set in constructor.
163 size_t start_object_size;
164
165 // Size of objects in heap set in destructor.
166 size_t end_object_size;
167
168 // Size of memory allocated from OS set in constructor.
169 size_t start_memory_size;
170
171 // Size of memory allocated from OS set in destructor.
172 size_t end_memory_size;
173
174 // Total amount of space either wasted or contained in one of free lists
175 // before the current GC.
176 size_t start_holes_size;
177
178 // Total amount of space either wasted or contained in one of free lists
179 // after the current GC.
180 size_t end_holes_size;
181
182 // Size of new space objects in constructor.
183 size_t new_space_object_size;
184
185 // Size of survived new space objects in destructor.
186 size_t survived_new_space_object_size;
187
188 // Bytes marked incrementally for INCREMENTAL_MARK_COMPACTOR
189 size_t incremental_marking_bytes;
190
191 // Duration of incremental marking steps for INCREMENTAL_MARK_COMPACTOR.
192 double incremental_marking_duration;
193
194 // Amounts of time spent in different scopes during GC.
195 double scopes[Scope::NUMBER_OF_SCOPES];
196
197 // Holds details for incremental marking scopes.
198 IncrementalMarkingInfos
199 incremental_marking_scopes[Scope::NUMBER_OF_INCREMENTAL_SCOPES];
200 };
201
202 static const int kThroughputTimeFrameMs = 5000;
203
204 static RuntimeCallCounterId RCSCounterFromScope(Scope::ScopeId id);
205
206 explicit GCTracer(Heap* heap);
207
208 // Start collecting data.
209 void Start(GarbageCollector collector, GarbageCollectionReason gc_reason,
210 const char* collector_reason);
211
212 // Stop collecting data and print results.
213 void Stop(GarbageCollector collector);
214
215 void NotifyYoungGenerationHandling(
216 YoungGenerationHandling young_generation_handling);
217
218 // Sample and accumulate bytes allocated since the last GC.
219 void SampleAllocation(double current_ms, size_t new_space_counter_bytes,
220 size_t old_generation_counter_bytes);
221
222 // Log the accumulated new space allocation bytes.
223 void AddAllocation(double current_ms);
224
225 void AddContextDisposalTime(double time);
226
227 void AddCompactionEvent(double duration, size_t live_bytes_compacted);
228
229 void AddSurvivalRatio(double survival_ratio);
230
231 // Log an incremental marking step.
232 void AddIncrementalMarkingStep(double duration, size_t bytes);
233
234 // Compute the average incremental marking speed in bytes/millisecond.
235 // Returns 0 if no events have been recorded.
236 double IncrementalMarkingSpeedInBytesPerMillisecond() const;
237
238 // Compute the average scavenge speed in bytes/millisecond.
239 // Returns 0 if no events have been recorded.
240 double ScavengeSpeedInBytesPerMillisecond(
241 ScavengeSpeedMode mode = kForAllObjects) const;
242
243 // Compute the average compaction speed in bytes/millisecond.
244 // Returns 0 if not enough events have been recorded.
245 double CompactionSpeedInBytesPerMillisecond() const;
246
247 // Compute the average mark-sweep speed in bytes/millisecond.
248 // Returns 0 if no events have been recorded.
249 double MarkCompactSpeedInBytesPerMillisecond() const;
250
251 // Compute the average incremental mark-sweep finalize speed in
252 // bytes/millisecond.
253 // Returns 0 if no events have been recorded.
254 double FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() const;
255
256 // Compute the overall mark compact speed including incremental steps
257 // and the final mark-compact step.
258 double CombinedMarkCompactSpeedInBytesPerMillisecond();
259
260 // Allocation throughput in the new space in bytes/millisecond.
261 // Returns 0 if no allocation events have been recorded.
262 double NewSpaceAllocationThroughputInBytesPerMillisecond(
263 double time_ms = 0) const;
264
265 // Allocation throughput in the old generation in bytes/millisecond in the
266 // last time_ms milliseconds.
267 // Returns 0 if no allocation events have been recorded.
268 double OldGenerationAllocationThroughputInBytesPerMillisecond(
269 double time_ms = 0) const;
270
271 // Allocation throughput in heap in bytes/millisecond in the last time_ms
272 // milliseconds.
273 // Returns 0 if no allocation events have been recorded.
274 double AllocationThroughputInBytesPerMillisecond(double time_ms) const;
275
276 // Allocation throughput in heap in bytes/milliseconds in the last
277 // kThroughputTimeFrameMs seconds.
278 // Returns 0 if no allocation events have been recorded.
279 double CurrentAllocationThroughputInBytesPerMillisecond() const;
280
281 // Allocation throughput in old generation in bytes/milliseconds in the last
282 // kThroughputTimeFrameMs seconds.
283 // Returns 0 if no allocation events have been recorded.
284 double CurrentOldGenerationAllocationThroughputInBytesPerMillisecond() const;
285
286 // Computes the context disposal rate in milliseconds. It takes the time
287 // frame of the first recorded context disposal to the current time and
288 // divides it by the number of recorded events.
289 // Returns 0 if no events have been recorded.
290 double ContextDisposalRateInMilliseconds() const;
291
292 // Computes the average survival ratio based on the last recorded survival
293 // events.
294 // Returns 0 if no events have been recorded.
295 double AverageSurvivalRatio() const;
296
297 // Returns true if at least one survival event was recorded.
298 bool SurvivalEventsRecorded() const;
299
300 // Discard all recorded survival events.
301 void ResetSurvivalEvents();
302
303 void NotifyIncrementalMarkingStart();
304
305 // Returns average mutator utilization with respect to mark-compact
306 // garbage collections. This ignores scavenger.
307 double AverageMarkCompactMutatorUtilization() const;
308 double CurrentMarkCompactMutatorUtilization() const;
309
AddScopeSample(Scope::ScopeId scope,double duration)310 V8_INLINE void AddScopeSample(Scope::ScopeId scope, double duration) {
311 DCHECK(scope < Scope::NUMBER_OF_SCOPES);
312 if (scope >= Scope::FIRST_INCREMENTAL_SCOPE &&
313 scope <= Scope::LAST_INCREMENTAL_SCOPE) {
314 incremental_marking_scopes_[scope - Scope::FIRST_INCREMENTAL_SCOPE]
315 .Update(duration);
316 } else {
317 current_.scopes[scope] += duration;
318 }
319 }
320
321 void AddBackgroundScopeSample(BackgroundScope::ScopeId scope, double duration,
322 RuntimeCallCounter* runtime_call_counter);
323
324 void RecordMarkCompactHistograms(HistogramTimer* gc_timer);
325
326 private:
327 FRIEND_TEST(GCTracer, AverageSpeed);
328 FRIEND_TEST(GCTracerTest, AllocationThroughput);
329 FRIEND_TEST(GCTracerTest, BackgroundScavengerScope);
330 FRIEND_TEST(GCTracerTest, BackgroundMinorMCScope);
331 FRIEND_TEST(GCTracerTest, BackgroundMajorMCScope);
332 FRIEND_TEST(GCTracerTest, MultithreadedBackgroundScope);
333 FRIEND_TEST(GCTracerTest, NewSpaceAllocationThroughput);
334 FRIEND_TEST(GCTracerTest, NewSpaceAllocationThroughputWithProvidedTime);
335 FRIEND_TEST(GCTracerTest, OldGenerationAllocationThroughputWithProvidedTime);
336 FRIEND_TEST(GCTracerTest, RegularScope);
337 FRIEND_TEST(GCTracerTest, IncrementalMarkingDetails);
338 FRIEND_TEST(GCTracerTest, IncrementalScope);
339 FRIEND_TEST(GCTracerTest, IncrementalMarkingSpeed);
340 FRIEND_TEST(GCTracerTest, MutatorUtilization);
341 FRIEND_TEST(GCTracerTest, RecordMarkCompactHistograms);
342
343 struct BackgroundCounter {
344 double total_duration_ms;
345 RuntimeCallCounter runtime_call_counter;
346 };
347
348 // Returns the average speed of the events in the buffer.
349 // If the buffer is empty, the result is 0.
350 // Otherwise, the result is between 1 byte/ms and 1 GB/ms.
351 static double AverageSpeed(const base::RingBuffer<BytesAndDuration>& buffer);
352 static double AverageSpeed(const base::RingBuffer<BytesAndDuration>& buffer,
353 const BytesAndDuration& initial, double time_ms);
354
355 void ResetForTesting();
356 void ResetIncrementalMarkingCounters();
357 void RecordIncrementalMarkingSpeed(size_t bytes, double duration);
358 void RecordMutatorUtilization(double mark_compactor_end_time,
359 double mark_compactor_duration);
360
361 // Print one detailed trace line in name=value format.
362 // TODO(ernstm): Move to Heap.
363 void PrintNVP() const;
364
365 // Print one trace line.
366 // TODO(ernstm): Move to Heap.
367 void Print() const;
368
369 // Prints a line and also adds it to the heap's ring buffer so that
370 // it can be included in later crash dumps.
371 void PRINTF_FORMAT(2, 3) Output(const char* format, ...) const;
372
TotalExternalTime()373 double TotalExternalTime() const {
374 return current_.scopes[Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES] +
375 current_.scopes[Scope::HEAP_EXTERNAL_EPILOGUE] +
376 current_.scopes[Scope::HEAP_EXTERNAL_PROLOGUE] +
377 current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE] +
378 current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE];
379 }
380
381 void FetchBackgroundCounters(int first_global_scope, int last_global_scope,
382 int first_background_scope,
383 int last_background_scope);
384 void FetchBackgroundMinorGCCounters();
385 void FetchBackgroundMarkCompactCounters();
386 void FetchBackgroundGeneralCounters();
387
388 // Pointer to the heap that owns this tracer.
389 Heap* heap_;
390
391 // Current tracer event. Populated during Start/Stop cycle. Valid after Stop()
392 // has returned.
393 Event current_;
394
395 // Previous tracer event.
396 Event previous_;
397
398 // Size of incremental marking steps (in bytes) accumulated since the end of
399 // the last mark compact GC.
400 size_t incremental_marking_bytes_;
401
402 // Duration of incremental marking steps since the end of the last mark-
403 // compact event.
404 double incremental_marking_duration_;
405
406 double incremental_marking_start_time_;
407
408 double recorded_incremental_marking_speed_;
409
410 // Incremental scopes carry more information than just the duration. The infos
411 // here are merged back upon starting/stopping the GC tracer.
412 IncrementalMarkingInfos
413 incremental_marking_scopes_[Scope::NUMBER_OF_INCREMENTAL_SCOPES];
414
415
416 // Timestamp and allocation counter at the last sampled allocation event.
417 double allocation_time_ms_;
418 size_t new_space_allocation_counter_bytes_;
419 size_t old_generation_allocation_counter_bytes_;
420
421 // Accumulated duration and allocated bytes since the last GC.
422 double allocation_duration_since_gc_;
423 size_t new_space_allocation_in_bytes_since_gc_;
424 size_t old_generation_allocation_in_bytes_since_gc_;
425
426 double combined_mark_compact_speed_cache_;
427
428 // Counts how many tracers were started without stopping.
429 int start_counter_;
430
431 // Used for computing average mutator utilization.
432 double average_mutator_duration_;
433 double average_mark_compact_duration_;
434 double current_mark_compact_mutator_utilization_;
435 double previous_mark_compact_end_time_;
436
437 base::RingBuffer<BytesAndDuration> recorded_minor_gcs_total_;
438 base::RingBuffer<BytesAndDuration> recorded_minor_gcs_survived_;
439 base::RingBuffer<BytesAndDuration> recorded_compactions_;
440 base::RingBuffer<BytesAndDuration> recorded_incremental_mark_compacts_;
441 base::RingBuffer<BytesAndDuration> recorded_mark_compacts_;
442 base::RingBuffer<BytesAndDuration> recorded_new_generation_allocations_;
443 base::RingBuffer<BytesAndDuration> recorded_old_generation_allocations_;
444 base::RingBuffer<double> recorded_context_disposal_times_;
445 base::RingBuffer<double> recorded_survival_ratios_;
446
447 base::Mutex background_counter_mutex_;
448 BackgroundCounter background_counter_[BackgroundScope::NUMBER_OF_SCOPES];
449
450 DISALLOW_COPY_AND_ASSIGN(GCTracer);
451 };
452
453 } // namespace internal
454 } // namespace v8
455
456 #endif // V8_HEAP_GC_TRACER_H_
457