1 // Copyright 2009-2010 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/profiler/heap-profiler.h"
6
7 #include "src/api/api-inl.h"
8 #include "src/debug/debug.h"
9 #include "src/heap/combined-heap.h"
10 #include "src/heap/heap-inl.h"
11 #include "src/profiler/allocation-tracker.h"
12 #include "src/profiler/heap-snapshot-generator-inl.h"
13 #include "src/profiler/sampling-heap-profiler.h"
14
15 namespace v8 {
16 namespace internal {
17
HeapProfiler(Heap * heap)18 HeapProfiler::HeapProfiler(Heap* heap)
19 : ids_(new HeapObjectsMap(heap)),
20 names_(new StringsStorage()),
21 is_tracking_object_moves_(false),
22 is_taking_snapshot_(false) {}
23
24 HeapProfiler::~HeapProfiler() = default;
25
DeleteAllSnapshots()26 void HeapProfiler::DeleteAllSnapshots() {
27 snapshots_.clear();
28 MaybeClearStringsStorage();
29 }
30
MaybeClearStringsStorage()31 void HeapProfiler::MaybeClearStringsStorage() {
32 if (snapshots_.empty() && !sampling_heap_profiler_ && !allocation_tracker_ &&
33 !is_taking_snapshot_) {
34 names_.reset(new StringsStorage());
35 }
36 }
37
RemoveSnapshot(HeapSnapshot * snapshot)38 void HeapProfiler::RemoveSnapshot(HeapSnapshot* snapshot) {
39 snapshots_.erase(
40 std::find_if(snapshots_.begin(), snapshots_.end(),
41 [&](const std::unique_ptr<HeapSnapshot>& entry) {
42 return entry.get() == snapshot;
43 }));
44 }
45
AddBuildEmbedderGraphCallback(v8::HeapProfiler::BuildEmbedderGraphCallback callback,void * data)46 void HeapProfiler::AddBuildEmbedderGraphCallback(
47 v8::HeapProfiler::BuildEmbedderGraphCallback callback, void* data) {
48 build_embedder_graph_callbacks_.push_back({callback, data});
49 }
50
RemoveBuildEmbedderGraphCallback(v8::HeapProfiler::BuildEmbedderGraphCallback callback,void * data)51 void HeapProfiler::RemoveBuildEmbedderGraphCallback(
52 v8::HeapProfiler::BuildEmbedderGraphCallback callback, void* data) {
53 auto it = std::find(build_embedder_graph_callbacks_.begin(),
54 build_embedder_graph_callbacks_.end(),
55 std::make_pair(callback, data));
56 if (it != build_embedder_graph_callbacks_.end())
57 build_embedder_graph_callbacks_.erase(it);
58 }
59
BuildEmbedderGraph(Isolate * isolate,v8::EmbedderGraph * graph)60 void HeapProfiler::BuildEmbedderGraph(Isolate* isolate,
61 v8::EmbedderGraph* graph) {
62 for (const auto& cb : build_embedder_graph_callbacks_) {
63 cb.first(reinterpret_cast<v8::Isolate*>(isolate), graph, cb.second);
64 }
65 }
66
SetGetDetachednessCallback(v8::HeapProfiler::GetDetachednessCallback callback,void * data)67 void HeapProfiler::SetGetDetachednessCallback(
68 v8::HeapProfiler::GetDetachednessCallback callback, void* data) {
69 get_detachedness_callback_ = {callback, data};
70 }
71
GetDetachedness(const v8::Local<v8::Value> v8_value,uint16_t class_id)72 v8::EmbedderGraph::Node::Detachedness HeapProfiler::GetDetachedness(
73 const v8::Local<v8::Value> v8_value, uint16_t class_id) {
74 DCHECK(HasGetDetachednessCallback());
75 return get_detachedness_callback_.first(
76 reinterpret_cast<v8::Isolate*>(heap()->isolate()), v8_value, class_id,
77 get_detachedness_callback_.second);
78 }
79
TakeSnapshot(v8::ActivityControl * control,v8::HeapProfiler::ObjectNameResolver * resolver,bool treat_global_objects_as_roots)80 HeapSnapshot* HeapProfiler::TakeSnapshot(
81 v8::ActivityControl* control,
82 v8::HeapProfiler::ObjectNameResolver* resolver,
83 bool treat_global_objects_as_roots) {
84 is_taking_snapshot_ = true;
85 HeapSnapshot* result = new HeapSnapshot(this, treat_global_objects_as_roots);
86 {
87 HeapSnapshotGenerator generator(result, control, resolver, heap());
88 if (!generator.GenerateSnapshot()) {
89 delete result;
90 result = nullptr;
91 } else {
92 snapshots_.emplace_back(result);
93 }
94 }
95 ids_->RemoveDeadEntries();
96 is_tracking_object_moves_ = true;
97 is_taking_snapshot_ = false;
98
99 heap()->isolate()->debug()->feature_tracker()->Track(
100 DebugFeatureTracker::kHeapSnapshot);
101
102 return result;
103 }
104
StartSamplingHeapProfiler(uint64_t sample_interval,int stack_depth,v8::HeapProfiler::SamplingFlags flags)105 bool HeapProfiler::StartSamplingHeapProfiler(
106 uint64_t sample_interval, int stack_depth,
107 v8::HeapProfiler::SamplingFlags flags) {
108 if (sampling_heap_profiler_.get()) {
109 return false;
110 }
111 sampling_heap_profiler_.reset(new SamplingHeapProfiler(
112 heap(), names_.get(), sample_interval, stack_depth, flags));
113 return true;
114 }
115
116
StopSamplingHeapProfiler()117 void HeapProfiler::StopSamplingHeapProfiler() {
118 sampling_heap_profiler_.reset();
119 MaybeClearStringsStorage();
120 }
121
122
GetAllocationProfile()123 v8::AllocationProfile* HeapProfiler::GetAllocationProfile() {
124 if (sampling_heap_profiler_.get()) {
125 return sampling_heap_profiler_->GetAllocationProfile();
126 } else {
127 return nullptr;
128 }
129 }
130
131
StartHeapObjectsTracking(bool track_allocations)132 void HeapProfiler::StartHeapObjectsTracking(bool track_allocations) {
133 ids_->UpdateHeapObjectsMap();
134 is_tracking_object_moves_ = true;
135 DCHECK(!allocation_tracker_);
136 if (track_allocations) {
137 allocation_tracker_.reset(new AllocationTracker(ids_.get(), names_.get()));
138 heap()->AddHeapObjectAllocationTracker(this);
139 heap()->isolate()->debug()->feature_tracker()->Track(
140 DebugFeatureTracker::kAllocationTracking);
141 }
142 }
143
PushHeapObjectsStats(OutputStream * stream,int64_t * timestamp_us)144 SnapshotObjectId HeapProfiler::PushHeapObjectsStats(OutputStream* stream,
145 int64_t* timestamp_us) {
146 return ids_->PushHeapObjectsStats(stream, timestamp_us);
147 }
148
StopHeapObjectsTracking()149 void HeapProfiler::StopHeapObjectsTracking() {
150 ids_->StopHeapObjectsTracking();
151 if (allocation_tracker_) {
152 allocation_tracker_.reset();
153 MaybeClearStringsStorage();
154 heap()->RemoveHeapObjectAllocationTracker(this);
155 }
156 }
157
GetSnapshotsCount() const158 int HeapProfiler::GetSnapshotsCount() const {
159 return static_cast<int>(snapshots_.size());
160 }
161
IsTakingSnapshot() const162 bool HeapProfiler::IsTakingSnapshot() const { return is_taking_snapshot_; }
163
GetSnapshot(int index)164 HeapSnapshot* HeapProfiler::GetSnapshot(int index) {
165 return snapshots_.at(index).get();
166 }
167
GetSnapshotObjectId(Handle<Object> obj)168 SnapshotObjectId HeapProfiler::GetSnapshotObjectId(Handle<Object> obj) {
169 if (!obj->IsHeapObject())
170 return v8::HeapProfiler::kUnknownObjectId;
171 return ids_->FindEntry(HeapObject::cast(*obj).address());
172 }
173
GetSnapshotObjectId(NativeObject obj)174 SnapshotObjectId HeapProfiler::GetSnapshotObjectId(NativeObject obj) {
175 // Try to find id of regular native node first.
176 SnapshotObjectId id = ids_->FindEntry(reinterpret_cast<Address>(obj));
177 // In case no id has been found, check whether there exists an entry where the
178 // native objects has been merged into a V8 entry.
179 if (id == v8::HeapProfiler::kUnknownObjectId) {
180 id = ids_->FindMergedNativeEntry(obj);
181 }
182 return id;
183 }
184
ObjectMoveEvent(Address from,Address to,int size)185 void HeapProfiler::ObjectMoveEvent(Address from, Address to, int size) {
186 base::MutexGuard guard(&profiler_mutex_);
187 bool known_object = ids_->MoveObject(from, to, size);
188 if (!known_object && allocation_tracker_) {
189 allocation_tracker_->address_to_trace()->MoveObject(from, to, size);
190 }
191 }
192
AllocationEvent(Address addr,int size)193 void HeapProfiler::AllocationEvent(Address addr, int size) {
194 DisallowHeapAllocation no_allocation;
195 if (allocation_tracker_) {
196 allocation_tracker_->AllocationEvent(addr, size);
197 }
198 }
199
200
UpdateObjectSizeEvent(Address addr,int size)201 void HeapProfiler::UpdateObjectSizeEvent(Address addr, int size) {
202 ids_->UpdateObjectSize(addr, size);
203 }
204
FindHeapObjectById(SnapshotObjectId id)205 Handle<HeapObject> HeapProfiler::FindHeapObjectById(SnapshotObjectId id) {
206 HeapObject object;
207 CombinedHeapObjectIterator iterator(heap(),
208 HeapObjectIterator::kFilterUnreachable);
209 // Make sure that object with the given id is still reachable.
210 for (HeapObject obj = iterator.Next(); !obj.is_null();
211 obj = iterator.Next()) {
212 if (ids_->FindEntry(obj.address()) == id) {
213 DCHECK(object.is_null());
214 object = obj;
215 // Can't break -- kFilterUnreachable requires full heap traversal.
216 }
217 }
218
219 return !object.is_null() ? Handle<HeapObject>(object, isolate())
220 : Handle<HeapObject>();
221 }
222
223
ClearHeapObjectMap()224 void HeapProfiler::ClearHeapObjectMap() {
225 ids_.reset(new HeapObjectsMap(heap()));
226 if (!allocation_tracker_) is_tracking_object_moves_ = false;
227 }
228
229
heap() const230 Heap* HeapProfiler::heap() const { return ids_->heap(); }
231
isolate() const232 Isolate* HeapProfiler::isolate() const { return heap()->isolate(); }
233
QueryObjects(Handle<Context> context,debug::QueryObjectPredicate * predicate,PersistentValueVector<v8::Object> * objects)234 void HeapProfiler::QueryObjects(Handle<Context> context,
235 debug::QueryObjectPredicate* predicate,
236 PersistentValueVector<v8::Object>* objects) {
237 {
238 HandleScope handle_scope(isolate());
239 std::vector<Handle<JSTypedArray>> on_heap_typed_arrays;
240 CombinedHeapObjectIterator heap_iterator(
241 heap(), HeapObjectIterator::kFilterUnreachable);
242 for (HeapObject heap_obj = heap_iterator.Next(); !heap_obj.is_null();
243 heap_obj = heap_iterator.Next()) {
244 if (heap_obj.IsFeedbackVector()) {
245 FeedbackVector::cast(heap_obj).ClearSlots(isolate());
246 } else if (heap_obj.IsJSTypedArray() &&
247 JSTypedArray::cast(heap_obj).is_on_heap()) {
248 // Cannot call typed_array->GetBuffer() here directly because it may
249 // trigger GC. Defer that call by collecting the object in a vector.
250 on_heap_typed_arrays.push_back(
251 handle(JSTypedArray::cast(heap_obj), isolate()));
252 }
253 }
254 for (auto& typed_array : on_heap_typed_arrays) {
255 // Convert the on-heap typed array into off-heap typed array, so that
256 // its ArrayBuffer becomes valid and can be returned in the result.
257 typed_array->GetBuffer();
258 }
259 }
260 // We should return accurate information about live objects, so we need to
261 // collect all garbage first.
262 heap()->CollectAllAvailableGarbage(GarbageCollectionReason::kHeapProfiler);
263 CombinedHeapObjectIterator heap_iterator(
264 heap(), HeapObjectIterator::kFilterUnreachable);
265 for (HeapObject heap_obj = heap_iterator.Next(); !heap_obj.is_null();
266 heap_obj = heap_iterator.Next()) {
267 if (!heap_obj.IsJSObject() || heap_obj.IsExternal(isolate())) continue;
268 v8::Local<v8::Object> v8_obj(
269 Utils::ToLocal(handle(JSObject::cast(heap_obj), isolate())));
270 if (!predicate->Filter(v8_obj)) continue;
271 objects->Append(v8_obj);
272 }
273 }
274
275 } // namespace internal
276 } // namespace v8
277