1 // Copyright 2022 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_GC_TRACER_INL_H_
6 #define V8_HEAP_GC_TRACER_INL_H_
7
8 #include "src/base/platform/platform.h"
9 #include "src/execution/isolate.h"
10 #include "src/heap/gc-tracer.h"
11
12 namespace v8 {
13 namespace internal {
14
IncrementalMarkingInfos()15 GCTracer::IncrementalMarkingInfos::IncrementalMarkingInfos()
16 : duration(0), longest_step(0), steps(0) {}
17
Update(double delta)18 void GCTracer::IncrementalMarkingInfos::Update(double delta) {
19 steps++;
20 duration += delta;
21 if (delta > longest_step) {
22 longest_step = delta;
23 }
24 }
25
ResetCurrentCycle()26 void GCTracer::IncrementalMarkingInfos::ResetCurrentCycle() {
27 duration = 0;
28 longest_step = 0;
29 steps = 0;
30 }
31
Scope(GCTracer * tracer,ScopeId scope,ThreadKind thread_kind)32 GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind)
33 : tracer_(tracer),
34 scope_(scope),
35 thread_kind_(thread_kind),
36 start_time_(tracer_->MonotonicallyIncreasingTimeInMs()) {
37 #ifdef V8_RUNTIME_CALL_STATS
38 if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
39 if (thread_kind_ == ThreadKind::kMain) {
40 #if DEBUG
41 AssertMainThread();
42 #endif // DEBUG
43 runtime_stats_ = tracer_->heap_->isolate_->counters()->runtime_call_stats();
44 runtime_stats_->Enter(&timer_, GCTracer::RCSCounterFromScope(scope));
45 } else {
46 runtime_call_stats_scope_.emplace(
47 tracer->worker_thread_runtime_call_stats());
48 runtime_stats_ = runtime_call_stats_scope_->Get();
49 runtime_stats_->Enter(&timer_, GCTracer::RCSCounterFromScope(scope));
50 }
51 #endif // defined(V8_RUNTIME_CALL_STATS)
52 }
53
~Scope()54 GCTracer::Scope::~Scope() {
55 double duration_ms = tracer_->MonotonicallyIncreasingTimeInMs() - start_time_;
56 tracer_->AddScopeSample(scope_, duration_ms);
57
58 if (thread_kind_ == ThreadKind::kMain) {
59 #if DEBUG
60 AssertMainThread();
61 #endif // DEBUG
62
63 if (scope_ == ScopeId::MC_INCREMENTAL ||
64 scope_ == ScopeId::MC_INCREMENTAL_START ||
65 scope_ == ScopeId::MC_INCREMENTAL_FINALIZE) {
66 auto* long_task_stats =
67 tracer_->heap_->isolate_->GetCurrentLongTaskStats();
68 long_task_stats->gc_full_incremental_wall_clock_duration_us +=
69 static_cast<int64_t>(duration_ms *
70 base::Time::kMicrosecondsPerMillisecond);
71 }
72 }
73
74 #ifdef V8_RUNTIME_CALL_STATS
75 if (V8_LIKELY(runtime_stats_ == nullptr)) return;
76 runtime_stats_->Leave(&timer_);
77 #endif // defined(V8_RUNTIME_CALL_STATS)
78 }
79
IncrementalOffset(ScopeId id)80 constexpr int GCTracer::Scope::IncrementalOffset(ScopeId id) {
81 DCHECK_LE(FIRST_INCREMENTAL_SCOPE, id);
82 DCHECK_GE(LAST_INCREMENTAL_SCOPE, id);
83 return id - FIRST_INCREMENTAL_SCOPE;
84 }
85
IsYoungGenerationEvent(Type type)86 constexpr bool GCTracer::Event::IsYoungGenerationEvent(Type type) {
87 DCHECK_NE(START, type);
88 return type == SCAVENGER || type == MINOR_MARK_COMPACTOR;
89 }
90
CurrentEpoch(Scope::ScopeId id)91 CollectionEpoch GCTracer::CurrentEpoch(Scope::ScopeId id) const {
92 return Scope::NeedsYoungEpoch(id) ? epoch_young_ : epoch_full_;
93 }
94
95 #ifdef DEBUG
IsInObservablePause()96 bool GCTracer::IsInObservablePause() const {
97 return 0.0 < start_of_observable_pause_;
98 }
99
IsConsistentWithCollector(GarbageCollector collector)100 bool GCTracer::IsConsistentWithCollector(GarbageCollector collector) const {
101 return (collector == GarbageCollector::SCAVENGER &&
102 current_.type == Event::SCAVENGER) ||
103 (collector == GarbageCollector::MINOR_MARK_COMPACTOR &&
104 current_.type == Event::MINOR_MARK_COMPACTOR) ||
105 (collector == GarbageCollector::MARK_COMPACTOR &&
106 (current_.type == Event::MARK_COMPACTOR ||
107 current_.type == Event::INCREMENTAL_MARK_COMPACTOR));
108 }
109
IsSweepingInProgress()110 bool GCTracer::IsSweepingInProgress() const {
111 return (current_.type == Event::MARK_COMPACTOR ||
112 current_.type == Event::INCREMENTAL_MARK_COMPACTOR) &&
113 current_.state == Event::State::SWEEPING;
114 }
115 #endif
116
current_scope(Scope::ScopeId id)117 constexpr double GCTracer::current_scope(Scope::ScopeId id) const {
118 if (Scope::FIRST_INCREMENTAL_SCOPE <= id &&
119 id <= Scope::LAST_INCREMENTAL_SCOPE) {
120 return incremental_scope(id).duration;
121 } else if (Scope::FIRST_BACKGROUND_SCOPE <= id &&
122 id <= Scope::LAST_BACKGROUND_SCOPE) {
123 return background_counter_[id].total_duration_ms;
124 } else {
125 DCHECK_GT(Scope::NUMBER_OF_SCOPES, id);
126 return current_.scopes[id];
127 }
128 }
129
incremental_scope(Scope::ScopeId id)130 constexpr const GCTracer::IncrementalMarkingInfos& GCTracer::incremental_scope(
131 Scope::ScopeId id) const {
132 return incremental_scopes_[Scope::IncrementalOffset(id)];
133 }
134
AddScopeSample(Scope::ScopeId id,double duration)135 void GCTracer::AddScopeSample(Scope::ScopeId id, double duration) {
136 if (Scope::FIRST_INCREMENTAL_SCOPE <= id &&
137 id <= Scope::LAST_INCREMENTAL_SCOPE) {
138 incremental_scopes_[Scope::IncrementalOffset(id)].Update(duration);
139 } else if (Scope::FIRST_BACKGROUND_SCOPE <= id &&
140 id <= Scope::LAST_BACKGROUND_SCOPE) {
141 base::MutexGuard guard(&background_counter_mutex_);
142 background_counter_[id].total_duration_ms += duration;
143 } else {
144 DCHECK_GT(Scope::NUMBER_OF_SCOPES, id);
145 current_.scopes[id] += duration;
146 }
147 }
148
149 #ifdef V8_RUNTIME_CALL_STATS
worker_thread_runtime_call_stats()150 WorkerThreadRuntimeCallStats* GCTracer::worker_thread_runtime_call_stats() {
151 return heap_->isolate_->counters()->worker_thread_runtime_call_stats();
152 }
153
RCSCounterFromScope(Scope::ScopeId id)154 RuntimeCallCounterId GCTracer::RCSCounterFromScope(Scope::ScopeId id) {
155 STATIC_ASSERT(Scope::FIRST_SCOPE == Scope::MC_INCREMENTAL);
156 return static_cast<RuntimeCallCounterId>(
157 static_cast<int>(RuntimeCallCounterId::kGC_MC_INCREMENTAL) +
158 static_cast<int>(id));
159 }
160 #endif // defined(V8_RUNTIME_CALL_STATS)
161
MonotonicallyIncreasingTimeInMs()162 double GCTracer::MonotonicallyIncreasingTimeInMs() {
163 if (V8_UNLIKELY(FLAG_predictable)) {
164 return heap_->MonotonicallyIncreasingTimeInMs();
165 } else {
166 return base::TimeTicks::Now().ToInternalValue() /
167 static_cast<double>(base::Time::kMicrosecondsPerMillisecond);
168 }
169 }
170
171 } // namespace internal
172 } // namespace v8
173
174 #endif // V8_HEAP_GC_TRACER_INL_H_
175