• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/profiler/heap-snapshot-generator.h"
6 
7 #include <utility>
8 
9 #include "src/api/api-inl.h"
10 #include "src/base/optional.h"
11 #include "src/base/vector.h"
12 #include "src/codegen/assembler-inl.h"
13 #include "src/common/globals.h"
14 #include "src/debug/debug.h"
15 #include "src/handles/global-handles.h"
16 #include "src/heap/combined-heap.h"
17 #include "src/heap/safepoint.h"
18 #include "src/numbers/conversions.h"
19 #include "src/objects/allocation-site-inl.h"
20 #include "src/objects/api-callbacks.h"
21 #include "src/objects/cell-inl.h"
22 #include "src/objects/feedback-cell-inl.h"
23 #include "src/objects/hash-table-inl.h"
24 #include "src/objects/js-array-buffer-inl.h"
25 #include "src/objects/js-array-inl.h"
26 #include "src/objects/js-collection-inl.h"
27 #include "src/objects/js-generator-inl.h"
28 #include "src/objects/js-promise-inl.h"
29 #include "src/objects/js-regexp-inl.h"
30 #include "src/objects/js-weak-refs-inl.h"
31 #include "src/objects/literal-objects-inl.h"
32 #include "src/objects/objects-inl.h"
33 #include "src/objects/prototype.h"
34 #include "src/objects/slots-inl.h"
35 #include "src/objects/struct-inl.h"
36 #include "src/objects/transitions-inl.h"
37 #include "src/objects/visitors.h"
38 #include "src/profiler/allocation-tracker.h"
39 #include "src/profiler/heap-profiler.h"
40 #include "src/profiler/heap-snapshot-generator-inl.h"
41 
42 namespace v8 {
43 namespace internal {
44 
45 #ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
46 class HeapEntryVerifier {
47  public:
HeapEntryVerifier(HeapSnapshotGenerator * generator,HeapObject obj)48   HeapEntryVerifier(HeapSnapshotGenerator* generator, HeapObject obj)
49       : generator_(generator),
50         primary_object_(obj),
51         reference_summary_(
52             ReferenceSummary::SummarizeReferencesFrom(generator->heap(), obj)) {
53     generator->set_verifier(this);
54   }
~HeapEntryVerifier()55   ~HeapEntryVerifier() {
56     CheckAllReferencesWereChecked();
57     generator_->set_verifier(nullptr);
58   }
59 
60   // Checks that `host` retains `target`, according to the marking visitor. This
61   // allows us to verify, when adding edges to the snapshot, that they
62   // correspond to real retaining relationships.
CheckStrongReference(HeapObject host,HeapObject target)63   void CheckStrongReference(HeapObject host, HeapObject target) {
64     // All references should be from the current primary object.
65     CHECK_EQ(host, primary_object_);
66 
67     checked_objects_.insert(target);
68 
69     // Check whether there is a direct strong reference from host to target.
70     if (reference_summary_.strong_references().find(target) !=
71         reference_summary_.strong_references().end()) {
72       return;
73     }
74 
75     // There is no direct reference from host to target, but sometimes heap
76     // snapshots include references that skip one, two, or three objects, such
77     // as __proto__ on a JSObject referring to its Map's prototype, or a
78     // property getter that bypasses the property array and accessor info. At
79     // this point, we must check for those indirect references.
80     for (size_t level = 0; level < 3; ++level) {
81       const std::unordered_set<HeapObject, Object::Hasher>& indirect =
82           GetIndirectStrongReferences(level);
83       if (indirect.find(target) != indirect.end()) {
84         return;
85       }
86     }
87 
88     FATAL("Could not find any matching reference");
89   }
90 
91   // Checks that `host` has a weak reference to `target`, according to the
92   // marking visitor.
CheckWeakReference(HeapObject host,HeapObject target)93   void CheckWeakReference(HeapObject host, HeapObject target) {
94     // All references should be from the current primary object.
95     CHECK_EQ(host, primary_object_);
96 
97     checked_objects_.insert(target);
98     CHECK_NE(reference_summary_.weak_references().find(target),
99              reference_summary_.weak_references().end());
100   }
101 
102   // Marks the relationship between `host` and `target` as checked, even if the
103   // marking visitor found no such relationship. This is necessary for
104   // ephemerons, where a pair of objects is required to retain the target.
105   // Use this function with care, since it bypasses verification.
MarkReferenceCheckedWithoutChecking(HeapObject host,HeapObject target)106   void MarkReferenceCheckedWithoutChecking(HeapObject host, HeapObject target) {
107     if (host == primary_object_) {
108       checked_objects_.insert(target);
109     }
110   }
111 
112   // Verifies that all of the references found by the marking visitor were
113   // checked via a call to CheckStrongReference or CheckWeakReference, or
114   // deliberately skipped via a call to MarkReferenceCheckedWithoutChecking.
115   // This ensures that there aren't retaining relationships found by the marking
116   // visitor which were omitted from the heap snapshot.
CheckAllReferencesWereChecked()117   void CheckAllReferencesWereChecked() {
118     // Both loops below skip pointers to read-only objects, because the heap
119     // snapshot deliberately omits many of those (see IsEssentialObject).
120     // Read-only objects can't ever retain normal read-write objects, so these
121     // are fine to skip.
122     for (HeapObject obj : reference_summary_.strong_references()) {
123       if (!BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
124         CHECK_NE(checked_objects_.find(obj), checked_objects_.end());
125       }
126     }
127     for (HeapObject obj : reference_summary_.weak_references()) {
128       if (!BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
129         CHECK_NE(checked_objects_.find(obj), checked_objects_.end());
130       }
131     }
132   }
133 
134  private:
135   const std::unordered_set<HeapObject, Object::Hasher>&
GetIndirectStrongReferences(size_t level)136   GetIndirectStrongReferences(size_t level) {
137     CHECK_GE(indirect_strong_references_.size(), level);
138 
139     if (indirect_strong_references_.size() == level) {
140       // Expansion is needed.
141       indirect_strong_references_.resize(level + 1);
142       const std::unordered_set<HeapObject, Object::Hasher>& previous =
143           level == 0 ? reference_summary_.strong_references()
144                      : indirect_strong_references_[level - 1];
145       for (HeapObject obj : previous) {
146         if (BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
147           // Marking visitors don't expect to visit objects in read-only space,
148           // and will fail DCHECKs if they are used on those objects. Read-only
149           // objects can never retain anything outside read-only space, so
150           // skipping those objects doesn't weaken verification.
151           continue;
152         }
153 
154         // Indirect references should only bypass internal structures, not
155         // user-visible objects or contexts.
156         if (obj.IsJSReceiver() || obj.IsString() || obj.IsContext()) {
157           continue;
158         }
159 
160         ReferenceSummary summary =
161             ReferenceSummary::SummarizeReferencesFrom(generator_->heap(), obj);
162         indirect_strong_references_[level].insert(
163             summary.strong_references().begin(),
164             summary.strong_references().end());
165       }
166     }
167 
168     return indirect_strong_references_[level];
169   }
170 
171   DISALLOW_GARBAGE_COLLECTION(no_gc)
172   HeapSnapshotGenerator* generator_;
173   HeapObject primary_object_;
174 
175   // All objects referred to by primary_object_, according to a marking visitor.
176   ReferenceSummary reference_summary_;
177 
178   // Objects that have been checked via a call to CheckStrongReference or
179   // CheckWeakReference, or deliberately skipped via a call to
180   // MarkReferenceCheckedWithoutChecking.
181   std::unordered_set<HeapObject, Object::Hasher> checked_objects_;
182 
183   // Objects transitively retained by the primary object. The objects in the set
184   // at index i are retained by the primary object via a chain of i+1
185   // intermediate objects.
186   std::vector<std::unordered_set<HeapObject, Object::Hasher>>
187       indirect_strong_references_;
188 };
189 #endif
190 
HeapGraphEdge(Type type,const char * name,HeapEntry * from,HeapEntry * to)191 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, HeapEntry* from,
192                              HeapEntry* to)
193     : bit_field_(TypeField::encode(type) |
194                  FromIndexField::encode(from->index())),
195       to_entry_(to),
196       name_(name) {
197   DCHECK(type == kContextVariable
198       || type == kProperty
199       || type == kInternal
200       || type == kShortcut
201       || type == kWeak);
202 }
203 
HeapGraphEdge(Type type,int index,HeapEntry * from,HeapEntry * to)204 HeapGraphEdge::HeapGraphEdge(Type type, int index, HeapEntry* from,
205                              HeapEntry* to)
206     : bit_field_(TypeField::encode(type) |
207                  FromIndexField::encode(from->index())),
208       to_entry_(to),
209       index_(index) {
210   DCHECK(type == kElement || type == kHidden);
211 }
212 
HeapEntry(HeapSnapshot * snapshot,int index,Type type,const char * name,SnapshotObjectId id,size_t self_size,unsigned trace_node_id)213 HeapEntry::HeapEntry(HeapSnapshot* snapshot, int index, Type type,
214                      const char* name, SnapshotObjectId id, size_t self_size,
215                      unsigned trace_node_id)
216     : type_(type),
217       index_(index),
218       children_count_(0),
219       self_size_(self_size),
220       snapshot_(snapshot),
221       name_(name),
222       id_(id),
223       trace_node_id_(trace_node_id) {
224   DCHECK_GE(index, 0);
225 }
226 
VerifyReference(HeapGraphEdge::Type type,HeapEntry * entry,HeapSnapshotGenerator * generator,ReferenceVerification verification)227 void HeapEntry::VerifyReference(HeapGraphEdge::Type type, HeapEntry* entry,
228                                 HeapSnapshotGenerator* generator,
229                                 ReferenceVerification verification) {
230 #ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
231   if (verification == kOffHeapPointer || generator->verifier() == nullptr) {
232     // Off-heap pointers are outside the scope of this verification; we just
233     // trust the embedder to provide accurate data. If the verifier is null,
234     // then verification is disabled.
235     return;
236   }
237   if (verification == kCustomWeakPointer) {
238     // The caller declared that this is a weak pointer ignored by the marking
239     // visitor. All we can verify at this point is that the edge type declares
240     // it to be weak.
241     CHECK_EQ(type, HeapGraphEdge::kWeak);
242     return;
243   }
244   Address from_address =
245       reinterpret_cast<Address>(generator->FindHeapThingForHeapEntry(this));
246   Address to_address =
247       reinterpret_cast<Address>(generator->FindHeapThingForHeapEntry(entry));
248   if (from_address == kNullAddress || to_address == kNullAddress) {
249     // One of these entries doesn't correspond to a real heap object.
250     // Verification is not possible.
251     return;
252   }
253   HeapObject from_obj = HeapObject::cast(Object(from_address));
254   HeapObject to_obj = HeapObject::cast(Object(to_address));
255   if (BasicMemoryChunk::FromHeapObject(to_obj)->InReadOnlySpace()) {
256     // We can't verify pointers into read-only space, because marking visitors
257     // might not mark those. For example, every Map has a pointer to the
258     // MetaMap, but marking visitors don't bother with following that link.
259     // Read-only objects are immortal and can never point to things outside of
260     // read-only space, so ignoring these objects is safe from the perspective
261     // of ensuring accurate retaining paths for normal read-write objects.
262     // Therefore, do nothing.
263   } else if (verification == kEphemeron) {
264     // Ephemerons can't be verified because they aren't marked directly by the
265     // marking visitor.
266     generator->verifier()->MarkReferenceCheckedWithoutChecking(from_obj,
267                                                                to_obj);
268   } else if (type == HeapGraphEdge::kWeak) {
269     generator->verifier()->CheckWeakReference(from_obj, to_obj);
270   } else {
271     generator->verifier()->CheckStrongReference(from_obj, to_obj);
272   }
273 #endif
274 }
275 
SetNamedReference(HeapGraphEdge::Type type,const char * name,HeapEntry * entry,HeapSnapshotGenerator * generator,ReferenceVerification verification)276 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type, const char* name,
277                                   HeapEntry* entry,
278                                   HeapSnapshotGenerator* generator,
279                                   ReferenceVerification verification) {
280   ++children_count_;
281   snapshot_->edges().emplace_back(type, name, this, entry);
282   VerifyReference(type, entry, generator, verification);
283 }
284 
SetIndexedReference(HeapGraphEdge::Type type,int index,HeapEntry * entry,HeapSnapshotGenerator * generator,ReferenceVerification verification)285 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type, int index,
286                                     HeapEntry* entry,
287                                     HeapSnapshotGenerator* generator,
288                                     ReferenceVerification verification) {
289   ++children_count_;
290   snapshot_->edges().emplace_back(type, index, this, entry);
291   VerifyReference(type, entry, generator, verification);
292 }
293 
SetNamedAutoIndexReference(HeapGraphEdge::Type type,const char * description,HeapEntry * child,StringsStorage * names,HeapSnapshotGenerator * generator,ReferenceVerification verification)294 void HeapEntry::SetNamedAutoIndexReference(HeapGraphEdge::Type type,
295                                            const char* description,
296                                            HeapEntry* child,
297                                            StringsStorage* names,
298                                            HeapSnapshotGenerator* generator,
299                                            ReferenceVerification verification) {
300   int index = children_count_ + 1;
301   const char* name = description
302                          ? names->GetFormatted("%d / %s", index, description)
303                          : names->GetName(index);
304   SetNamedReference(type, name, child, generator, verification);
305 }
306 
Print(const char * prefix,const char * edge_name,int max_depth,int indent) const307 void HeapEntry::Print(const char* prefix, const char* edge_name, int max_depth,
308                       int indent) const {
309   STATIC_ASSERT(sizeof(unsigned) == sizeof(id()));
310   base::OS::Print("%6zu @%6u %*c %s%s: ", self_size(), id(), indent, ' ',
311                   prefix, edge_name);
312   if (type() != kString) {
313     base::OS::Print("%s %.40s\n", TypeAsString(), name_);
314   } else {
315     base::OS::Print("\"");
316     const char* c = name_;
317     while (*c && (c - name_) <= 40) {
318       if (*c != '\n')
319         base::OS::Print("%c", *c);
320       else
321         base::OS::Print("\\n");
322       ++c;
323     }
324     base::OS::Print("\"\n");
325   }
326   if (--max_depth == 0) return;
327   for (auto i = children_begin(); i != children_end(); ++i) {
328     HeapGraphEdge& edge = **i;
329     const char* edge_prefix = "";
330     base::EmbeddedVector<char, 64> index;
331     edge_name = index.begin();
332     switch (edge.type()) {
333       case HeapGraphEdge::kContextVariable:
334         edge_prefix = "#";
335         edge_name = edge.name();
336         break;
337       case HeapGraphEdge::kElement:
338         SNPrintF(index, "%d", edge.index());
339         break;
340       case HeapGraphEdge::kInternal:
341         edge_prefix = "$";
342         edge_name = edge.name();
343         break;
344       case HeapGraphEdge::kProperty:
345         edge_name = edge.name();
346         break;
347       case HeapGraphEdge::kHidden:
348         edge_prefix = "$";
349         SNPrintF(index, "%d", edge.index());
350         break;
351       case HeapGraphEdge::kShortcut:
352         edge_prefix = "^";
353         edge_name = edge.name();
354         break;
355       case HeapGraphEdge::kWeak:
356         edge_prefix = "w";
357         edge_name = edge.name();
358         break;
359       default:
360         SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
361     }
362     edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
363   }
364 }
365 
TypeAsString() const366 const char* HeapEntry::TypeAsString() const {
367   switch (type()) {
368     case kHidden: return "/hidden/";
369     case kObject: return "/object/";
370     case kClosure: return "/closure/";
371     case kString: return "/string/";
372     case kCode: return "/code/";
373     case kArray: return "/array/";
374     case kRegExp: return "/regexp/";
375     case kHeapNumber: return "/number/";
376     case kNative: return "/native/";
377     case kSynthetic: return "/synthetic/";
378     case kConsString: return "/concatenated string/";
379     case kSlicedString: return "/sliced string/";
380     case kSymbol: return "/symbol/";
381     case kBigInt:
382       return "/bigint/";
383     default: return "???";
384   }
385 }
386 
HeapSnapshot(HeapProfiler * profiler,bool global_objects_as_roots,bool capture_numeric_value)387 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler, bool global_objects_as_roots,
388                            bool capture_numeric_value)
389     : profiler_(profiler),
390       treat_global_objects_as_roots_(global_objects_as_roots),
391       capture_numeric_value_(capture_numeric_value) {
392   // It is very important to keep objects that form a heap snapshot
393   // as small as possible. Check assumptions about data structure sizes.
394   STATIC_ASSERT(kSystemPointerSize != 4 || sizeof(HeapGraphEdge) == 12);
395   STATIC_ASSERT(kSystemPointerSize != 8 || sizeof(HeapGraphEdge) == 24);
396   STATIC_ASSERT(kSystemPointerSize != 4 || sizeof(HeapEntry) == 32);
397 #if V8_CC_MSVC
398   STATIC_ASSERT(kSystemPointerSize != 8 || sizeof(HeapEntry) == 48);
399 #else   // !V8_CC_MSVC
400   STATIC_ASSERT(kSystemPointerSize != 8 || sizeof(HeapEntry) == 40);
401 #endif  // !V8_CC_MSVC
402   memset(&gc_subroot_entries_, 0, sizeof(gc_subroot_entries_));
403 }
404 
Delete()405 void HeapSnapshot::Delete() {
406   profiler_->RemoveSnapshot(this);
407 }
408 
RememberLastJSObjectId()409 void HeapSnapshot::RememberLastJSObjectId() {
410   max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
411 }
412 
AddSyntheticRootEntries()413 void HeapSnapshot::AddSyntheticRootEntries() {
414   AddRootEntry();
415   AddGcRootsEntry();
416   SnapshotObjectId id = HeapObjectsMap::kGcRootsFirstSubrootId;
417   for (int root = 0; root < static_cast<int>(Root::kNumberOfRoots); root++) {
418     AddGcSubrootEntry(static_cast<Root>(root), id);
419     id += HeapObjectsMap::kObjectIdStep;
420   }
421   DCHECK_EQ(HeapObjectsMap::kFirstAvailableObjectId, id);
422 }
423 
AddRootEntry()424 void HeapSnapshot::AddRootEntry() {
425   DCHECK_NULL(root_entry_);
426   DCHECK(entries_.empty());  // Root entry must be the first one.
427   root_entry_ = AddEntry(HeapEntry::kSynthetic, "",
428                          HeapObjectsMap::kInternalRootObjectId, 0, 0);
429   DCHECK_EQ(1u, entries_.size());
430   DCHECK_EQ(root_entry_, &entries_.front());
431 }
432 
AddGcRootsEntry()433 void HeapSnapshot::AddGcRootsEntry() {
434   DCHECK_NULL(gc_roots_entry_);
435   gc_roots_entry_ = AddEntry(HeapEntry::kSynthetic, "(GC roots)",
436                              HeapObjectsMap::kGcRootsObjectId, 0, 0);
437 }
438 
AddGcSubrootEntry(Root root,SnapshotObjectId id)439 void HeapSnapshot::AddGcSubrootEntry(Root root, SnapshotObjectId id) {
440   DCHECK_NULL(gc_subroot_entries_[static_cast<int>(root)]);
441   gc_subroot_entries_[static_cast<int>(root)] =
442       AddEntry(HeapEntry::kSynthetic, RootVisitor::RootName(root), id, 0, 0);
443 }
444 
AddLocation(HeapEntry * entry,int scriptId,int line,int col)445 void HeapSnapshot::AddLocation(HeapEntry* entry, int scriptId, int line,
446                                int col) {
447   locations_.emplace_back(entry->index(), scriptId, line, col);
448 }
449 
AddEntry(HeapEntry::Type type,const char * name,SnapshotObjectId id,size_t size,unsigned trace_node_id)450 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
451                                   const char* name,
452                                   SnapshotObjectId id,
453                                   size_t size,
454                                   unsigned trace_node_id) {
455   DCHECK(!is_complete());
456   entries_.emplace_back(this, static_cast<int>(entries_.size()), type, name, id,
457                         size, trace_node_id);
458   return &entries_.back();
459 }
460 
FillChildren()461 void HeapSnapshot::FillChildren() {
462   DCHECK(children().empty());
463   int children_index = 0;
464   for (HeapEntry& entry : entries()) {
465     children_index = entry.set_children_index(children_index);
466   }
467   DCHECK_EQ(edges().size(), static_cast<size_t>(children_index));
468   children().resize(edges().size());
469   for (HeapGraphEdge& edge : edges()) {
470     edge.from()->add_child(&edge);
471   }
472 }
473 
GetEntryById(SnapshotObjectId id)474 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
475   if (entries_by_id_cache_.empty()) {
476     CHECK(is_complete());
477     entries_by_id_cache_.reserve(entries_.size());
478     for (HeapEntry& entry : entries_) {
479       entries_by_id_cache_.emplace(entry.id(), &entry);
480     }
481   }
482   auto it = entries_by_id_cache_.find(id);
483   return it != entries_by_id_cache_.end() ? it->second : nullptr;
484 }
485 
Print(int max_depth)486 void HeapSnapshot::Print(int max_depth) {
487   root()->Print("", "", max_depth, 0);
488 }
489 
490 // We split IDs on evens for embedder objects (see
491 // HeapObjectsMap::GenerateId) and odds for native objects.
492 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
493 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
494     HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
495 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
496     HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
497 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
498     HeapObjectsMap::kGcRootsFirstSubrootId +
499     static_cast<int>(Root::kNumberOfRoots) * HeapObjectsMap::kObjectIdStep;
500 
HeapObjectsMap(Heap * heap)501 HeapObjectsMap::HeapObjectsMap(Heap* heap)
502     : next_id_(kFirstAvailableObjectId), heap_(heap) {
503   // The dummy element at zero index is needed as entries_map_ cannot hold
504   // an entry with zero value. Otherwise it's impossible to tell if
505   // LookupOrInsert has added a new item or just returning exisiting one
506   // having the value of zero.
507   entries_.emplace_back(0, kNullAddress, 0, true);
508 }
509 
MoveObject(Address from,Address to,int object_size)510 bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
511   DCHECK_NE(kNullAddress, to);
512   DCHECK_NE(kNullAddress, from);
513   if (from == to) return false;
514   void* from_value = entries_map_.Remove(reinterpret_cast<void*>(from),
515                                          ComputeAddressHash(from));
516   if (from_value == nullptr) {
517     // It may occur that some untracked object moves to an address X and there
518     // is a tracked object at that address. In this case we should remove the
519     // entry as we know that the object has died.
520     void* to_value = entries_map_.Remove(reinterpret_cast<void*>(to),
521                                          ComputeAddressHash(to));
522     if (to_value != nullptr) {
523       int to_entry_info_index =
524           static_cast<int>(reinterpret_cast<intptr_t>(to_value));
525       entries_.at(to_entry_info_index).addr = kNullAddress;
526     }
527   } else {
528     base::HashMap::Entry* to_entry = entries_map_.LookupOrInsert(
529         reinterpret_cast<void*>(to), ComputeAddressHash(to));
530     if (to_entry->value != nullptr) {
531       // We found the existing entry with to address for an old object.
532       // Without this operation we will have two EntryInfo's with the same
533       // value in addr field. It is bad because later at RemoveDeadEntries
534       // one of this entry will be removed with the corresponding entries_map_
535       // entry.
536       int to_entry_info_index =
537           static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
538       entries_.at(to_entry_info_index).addr = kNullAddress;
539     }
540     int from_entry_info_index =
541         static_cast<int>(reinterpret_cast<intptr_t>(from_value));
542     entries_.at(from_entry_info_index).addr = to;
543     // Size of an object can change during its life, so to keep information
544     // about the object in entries_ consistent, we have to adjust size when the
545     // object is migrated.
546     if (FLAG_heap_profiler_trace_objects) {
547       PrintF("Move object from %p to %p old size %6d new size %6d\n",
548              reinterpret_cast<void*>(from), reinterpret_cast<void*>(to),
549              entries_.at(from_entry_info_index).size, object_size);
550     }
551     entries_.at(from_entry_info_index).size = object_size;
552     to_entry->value = from_value;
553   }
554   return from_value != nullptr;
555 }
556 
557 
UpdateObjectSize(Address addr,int size)558 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
559   FindOrAddEntry(addr, size, false);
560 }
561 
562 
FindEntry(Address addr)563 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
564   base::HashMap::Entry* entry = entries_map_.Lookup(
565       reinterpret_cast<void*>(addr), ComputeAddressHash(addr));
566   if (entry == nullptr) return v8::HeapProfiler::kUnknownObjectId;
567   int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
568   EntryInfo& entry_info = entries_.at(entry_index);
569   DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy());
570   return entry_info.id;
571 }
572 
573 
FindOrAddEntry(Address addr,unsigned int size,bool accessed)574 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
575                                                 unsigned int size,
576                                                 bool accessed) {
577   DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy());
578   base::HashMap::Entry* entry = entries_map_.LookupOrInsert(
579       reinterpret_cast<void*>(addr), ComputeAddressHash(addr));
580   if (entry->value != nullptr) {
581     int entry_index =
582         static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
583     EntryInfo& entry_info = entries_.at(entry_index);
584     entry_info.accessed = accessed;
585     if (FLAG_heap_profiler_trace_objects) {
586       PrintF("Update object size : %p with old size %d and new size %d\n",
587              reinterpret_cast<void*>(addr), entry_info.size, size);
588     }
589     entry_info.size = size;
590     return entry_info.id;
591   }
592   entry->value = reinterpret_cast<void*>(entries_.size());
593   SnapshotObjectId id = get_next_id();
594   entries_.push_back(EntryInfo(id, addr, size, accessed));
595   DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy());
596   return id;
597 }
598 
FindMergedNativeEntry(NativeObject addr)599 SnapshotObjectId HeapObjectsMap::FindMergedNativeEntry(NativeObject addr) {
600   auto it = merged_native_entries_map_.find(addr);
601   if (it == merged_native_entries_map_.end())
602     return v8::HeapProfiler::kUnknownObjectId;
603   return entries_[it->second].id;
604 }
605 
AddMergedNativeEntry(NativeObject addr,Address canonical_addr)606 void HeapObjectsMap::AddMergedNativeEntry(NativeObject addr,
607                                           Address canonical_addr) {
608   base::HashMap::Entry* entry =
609       entries_map_.Lookup(reinterpret_cast<void*>(canonical_addr),
610                           ComputeAddressHash(canonical_addr));
611   auto result = merged_native_entries_map_.insert(
612       {addr, reinterpret_cast<size_t>(entry->value)});
613   if (!result.second) {
614     result.first->second = reinterpret_cast<size_t>(entry->value);
615   }
616 }
617 
StopHeapObjectsTracking()618 void HeapObjectsMap::StopHeapObjectsTracking() { time_intervals_.clear(); }
619 
UpdateHeapObjectsMap()620 void HeapObjectsMap::UpdateHeapObjectsMap() {
621   if (FLAG_heap_profiler_trace_objects) {
622     PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
623            entries_map_.occupancy());
624   }
625   heap_->PreciseCollectAllGarbage(Heap::kNoGCFlags,
626                                   GarbageCollectionReason::kHeapProfiler);
627   PtrComprCageBase cage_base(heap_->isolate());
628   CombinedHeapObjectIterator iterator(heap_);
629   for (HeapObject obj = iterator.Next(); !obj.is_null();
630        obj = iterator.Next()) {
631     int object_size = obj.Size(cage_base);
632     FindOrAddEntry(obj.address(), object_size);
633     if (FLAG_heap_profiler_trace_objects) {
634       PrintF("Update object      : %p %6d. Next address is %p\n",
635              reinterpret_cast<void*>(obj.address()), object_size,
636              reinterpret_cast<void*>(obj.address() + object_size));
637     }
638   }
639   RemoveDeadEntries();
640   if (FLAG_heap_profiler_trace_objects) {
641     PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
642            entries_map_.occupancy());
643   }
644 }
645 
PushHeapObjectsStats(OutputStream * stream,int64_t * timestamp_us)646 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream,
647                                                       int64_t* timestamp_us) {
648   UpdateHeapObjectsMap();
649   time_intervals_.emplace_back(next_id_);
650   int prefered_chunk_size = stream->GetChunkSize();
651   std::vector<v8::HeapStatsUpdate> stats_buffer;
652   DCHECK(!entries_.empty());
653   EntryInfo* entry_info = &entries_.front();
654   EntryInfo* end_entry_info = &entries_.back() + 1;
655   for (size_t time_interval_index = 0;
656        time_interval_index < time_intervals_.size(); ++time_interval_index) {
657     TimeInterval& time_interval = time_intervals_[time_interval_index];
658     SnapshotObjectId time_interval_id = time_interval.id;
659     uint32_t entries_size = 0;
660     EntryInfo* start_entry_info = entry_info;
661     while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
662       entries_size += entry_info->size;
663       ++entry_info;
664     }
665     uint32_t entries_count =
666         static_cast<uint32_t>(entry_info - start_entry_info);
667     if (time_interval.count != entries_count ||
668         time_interval.size != entries_size) {
669       stats_buffer.emplace_back(static_cast<uint32_t>(time_interval_index),
670                                 time_interval.count = entries_count,
671                                 time_interval.size = entries_size);
672       if (static_cast<int>(stats_buffer.size()) >= prefered_chunk_size) {
673         OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
674             &stats_buffer.front(), static_cast<int>(stats_buffer.size()));
675         if (result == OutputStream::kAbort) return last_assigned_id();
676         stats_buffer.clear();
677       }
678     }
679   }
680   DCHECK(entry_info == end_entry_info);
681   if (!stats_buffer.empty()) {
682     OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
683         &stats_buffer.front(), static_cast<int>(stats_buffer.size()));
684     if (result == OutputStream::kAbort) return last_assigned_id();
685   }
686   stream->EndOfStream();
687   if (timestamp_us) {
688     *timestamp_us =
689         (time_intervals_.back().timestamp - time_intervals_.front().timestamp)
690             .InMicroseconds();
691   }
692   return last_assigned_id();
693 }
694 
695 
RemoveDeadEntries()696 void HeapObjectsMap::RemoveDeadEntries() {
697   DCHECK(entries_.size() > 0 && entries_.at(0).id == 0 &&
698          entries_.at(0).addr == kNullAddress);
699 
700   // Build up temporary reverse map.
701   std::unordered_map<size_t, NativeObject> reverse_merged_native_entries_map;
702   for (const auto& it : merged_native_entries_map_) {
703     auto result =
704         reverse_merged_native_entries_map.emplace(it.second, it.first);
705     DCHECK(result.second);
706     USE(result);
707   }
708 
709   size_t first_free_entry = 1;
710   for (size_t i = 1; i < entries_.size(); ++i) {
711     EntryInfo& entry_info = entries_.at(i);
712     auto merged_reverse_it = reverse_merged_native_entries_map.find(i);
713     if (entry_info.accessed) {
714       if (first_free_entry != i) {
715         entries_.at(first_free_entry) = entry_info;
716       }
717       entries_.at(first_free_entry).accessed = false;
718       base::HashMap::Entry* entry =
719           entries_map_.Lookup(reinterpret_cast<void*>(entry_info.addr),
720                               ComputeAddressHash(entry_info.addr));
721       DCHECK(entry);
722       entry->value = reinterpret_cast<void*>(first_free_entry);
723       if (merged_reverse_it != reverse_merged_native_entries_map.end()) {
724         auto it = merged_native_entries_map_.find(merged_reverse_it->second);
725         DCHECK_NE(merged_native_entries_map_.end(), it);
726         it->second = first_free_entry;
727       }
728       ++first_free_entry;
729     } else {
730       if (entry_info.addr) {
731         entries_map_.Remove(reinterpret_cast<void*>(entry_info.addr),
732                             ComputeAddressHash(entry_info.addr));
733         if (merged_reverse_it != reverse_merged_native_entries_map.end()) {
734           merged_native_entries_map_.erase(merged_reverse_it->second);
735         }
736       }
737     }
738   }
739   entries_.erase(entries_.begin() + first_free_entry, entries_.end());
740 
741   DCHECK(static_cast<uint32_t>(entries_.size()) - 1 ==
742          entries_map_.occupancy());
743 }
744 
V8HeapExplorer(HeapSnapshot * snapshot,SnapshottingProgressReportingInterface * progress,v8::HeapProfiler::ObjectNameResolver * resolver)745 V8HeapExplorer::V8HeapExplorer(HeapSnapshot* snapshot,
746                                SnapshottingProgressReportingInterface* progress,
747                                v8::HeapProfiler::ObjectNameResolver* resolver)
748     : heap_(snapshot->profiler()->heap_object_map()->heap()),
749       snapshot_(snapshot),
750       names_(snapshot_->profiler()->names()),
751       heap_object_map_(snapshot_->profiler()->heap_object_map()),
752       progress_(progress),
753       generator_(nullptr),
754       global_object_name_resolver_(resolver) {}
755 
AllocateEntry(HeapThing ptr)756 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
757   return AddEntry(HeapObject::cast(Object(reinterpret_cast<Address>(ptr))));
758 }
759 
AllocateEntry(Smi smi)760 HeapEntry* V8HeapExplorer::AllocateEntry(Smi smi) {
761   SnapshotObjectId id = heap_object_map_->get_next_id();
762   HeapEntry* entry =
763       snapshot_->AddEntry(HeapEntry::kHeapNumber, "smi number", id, 0, 0);
764   // XXX: Smis do not appear in CombinedHeapObjectIterator, so we need to
765   // extract the references here
766   ExtractNumberReference(entry, smi);
767   return entry;
768 }
769 
ExtractLocation(HeapEntry * entry,HeapObject object)770 void V8HeapExplorer::ExtractLocation(HeapEntry* entry, HeapObject object) {
771   if (object.IsJSFunction()) {
772     JSFunction func = JSFunction::cast(object);
773     ExtractLocationForJSFunction(entry, func);
774 
775   } else if (object.IsJSGeneratorObject()) {
776     JSGeneratorObject gen = JSGeneratorObject::cast(object);
777     ExtractLocationForJSFunction(entry, gen.function());
778 
779   } else if (object.IsJSObject()) {
780     JSObject obj = JSObject::cast(object);
781     JSFunction maybe_constructor = GetConstructor(heap_->isolate(), obj);
782 
783     if (!maybe_constructor.is_null()) {
784       ExtractLocationForJSFunction(entry, maybe_constructor);
785     }
786   }
787 }
788 
ExtractLocationForJSFunction(HeapEntry * entry,JSFunction func)789 void V8HeapExplorer::ExtractLocationForJSFunction(HeapEntry* entry,
790                                                   JSFunction func) {
791   if (!func.shared().script().IsScript()) return;
792   Script script = Script::cast(func.shared().script());
793   int scriptId = script.id();
794   int start = func.shared().StartPosition();
795   Script::PositionInfo info;
796   script.GetPositionInfo(start, &info, Script::WITH_OFFSET);
797   snapshot_->AddLocation(entry, scriptId, info.line, info.column);
798 }
799 
AddEntry(HeapObject object)800 HeapEntry* V8HeapExplorer::AddEntry(HeapObject object) {
801   if (object.IsJSFunction()) {
802     JSFunction func = JSFunction::cast(object);
803     SharedFunctionInfo shared = func.shared();
804     const char* name = names_->GetName(shared.Name());
805     return AddEntry(object, HeapEntry::kClosure, name);
806   } else if (object.IsJSBoundFunction()) {
807     return AddEntry(object, HeapEntry::kClosure, "native_bind");
808   } else if (object.IsJSRegExp()) {
809     JSRegExp re = JSRegExp::cast(object);
810     return AddEntry(object, HeapEntry::kRegExp, names_->GetName(re.source()));
811   } else if (object.IsJSObject()) {
812     // TODO(v8:12674) Fix and run full gcmole.
813     DisableGCMole no_gcmole;
814     const char* name = names_->GetName(
815         GetConstructorName(heap_->isolate(), JSObject::cast(object)));
816     if (object.IsJSGlobalObject()) {
817       auto it = global_object_tag_map_.find(JSGlobalObject::cast(object));
818       if (it != global_object_tag_map_.end()) {
819         name = names_->GetFormatted("%s / %s", name, it->second);
820       }
821     }
822     return AddEntry(object, HeapEntry::kObject, name);
823   } else if (object.IsString()) {
824     String string = String::cast(object);
825     if (string.IsConsString()) {
826       return AddEntry(object, HeapEntry::kConsString, "(concatenated string)");
827     } else if (string.IsSlicedString()) {
828       return AddEntry(object, HeapEntry::kSlicedString, "(sliced string)");
829     } else {
830       return AddEntry(object, HeapEntry::kString,
831                       names_->GetName(String::cast(object)));
832     }
833   } else if (object.IsSymbol()) {
834     if (Symbol::cast(object).is_private())
835       return AddEntry(object, HeapEntry::kHidden, "private symbol");
836     else
837       return AddEntry(object, HeapEntry::kSymbol, "symbol");
838   } else if (object.IsBigInt()) {
839     return AddEntry(object, HeapEntry::kBigInt, "bigint");
840   } else if (object.IsCode()) {
841     return AddEntry(object, HeapEntry::kCode, "");
842   } else if (object.IsSharedFunctionInfo()) {
843     String name = SharedFunctionInfo::cast(object).Name();
844     return AddEntry(object, HeapEntry::kCode, names_->GetName(name));
845   } else if (object.IsScript()) {
846     Object name = Script::cast(object).name();
847     return AddEntry(object, HeapEntry::kCode,
848                     name.IsString() ? names_->GetName(String::cast(name)) : "");
849   } else if (object.IsNativeContext()) {
850     return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
851   } else if (object.IsContext()) {
852     return AddEntry(object, HeapEntry::kObject, "system / Context");
853   } else if (object.IsHeapNumber()) {
854     return AddEntry(object, HeapEntry::kHeapNumber, "heap number");
855   }
856   return AddEntry(object, GetSystemEntryType(object),
857                   GetSystemEntryName(object));
858 }
859 
AddEntry(HeapObject object,HeapEntry::Type type,const char * name)860 HeapEntry* V8HeapExplorer::AddEntry(HeapObject object, HeapEntry::Type type,
861                                     const char* name) {
862   if (FLAG_heap_profiler_show_hidden_objects && type == HeapEntry::kHidden) {
863     type = HeapEntry::kNative;
864   }
865   PtrComprCageBase cage_base(isolate());
866   return AddEntry(object.address(), type, name, object.Size(cage_base));
867 }
868 
AddEntry(Address address,HeapEntry::Type type,const char * name,size_t size)869 HeapEntry* V8HeapExplorer::AddEntry(Address address,
870                                     HeapEntry::Type type,
871                                     const char* name,
872                                     size_t size) {
873   SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry(
874       address, static_cast<unsigned int>(size));
875   unsigned trace_node_id = 0;
876   if (AllocationTracker* allocation_tracker =
877       snapshot_->profiler()->allocation_tracker()) {
878     trace_node_id =
879         allocation_tracker->address_to_trace()->GetTraceNodeId(address);
880   }
881   return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
882 }
883 
GetSystemEntryName(HeapObject object)884 const char* V8HeapExplorer::GetSystemEntryName(HeapObject object) {
885   if (object.IsMap()) {
886     switch (Map::cast(object).instance_type()) {
887 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
888         case instance_type: return "system / Map (" #Name ")";
889       STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
890 #undef MAKE_STRING_MAP_CASE
891         default: return "system / Map";
892     }
893   }
894 
895   InstanceType type = object.map().instance_type();
896 
897   // Empty string names are special: TagObject can overwrite them, and devtools
898   // will report them as "(internal array)".
899   if (InstanceTypeChecker::IsFixedArray(type) ||
900       InstanceTypeChecker::IsFixedDoubleArray(type) ||
901       InstanceTypeChecker::IsByteArray(type)) {
902     return "";
903   }
904 
905   switch (type) {
906 #define MAKE_TORQUE_CASE(Name, TYPE) \
907   case TYPE:                         \
908     return "system / " #Name;
909     // The following lists include every non-String instance type.
910     // This includes a few types that already have non-"system" names assigned
911     // by AddEntry, but this is a convenient way to avoid manual upkeep here.
912     TORQUE_INSTANCE_CHECKERS_SINGLE_FULLY_DEFINED(MAKE_TORQUE_CASE)
913     TORQUE_INSTANCE_CHECKERS_MULTIPLE_FULLY_DEFINED(MAKE_TORQUE_CASE)
914     TORQUE_INSTANCE_CHECKERS_SINGLE_ONLY_DECLARED(MAKE_TORQUE_CASE)
915     TORQUE_INSTANCE_CHECKERS_MULTIPLE_ONLY_DECLARED(MAKE_TORQUE_CASE)
916 #undef MAKE_TORQUE_CASE
917 
918     // Strings were already handled by AddEntry.
919 #define MAKE_STRING_CASE(instance_type, size, name, Name) \
920   case instance_type:                                     \
921     UNREACHABLE();
922     STRING_TYPE_LIST(MAKE_STRING_CASE)
923 #undef MAKE_STRING_CASE
924   }
925 }
926 
GetSystemEntryType(HeapObject object)927 HeapEntry::Type V8HeapExplorer::GetSystemEntryType(HeapObject object) {
928   InstanceType type = object.map().instance_type();
929   if (InstanceTypeChecker::IsAllocationSite(type) ||
930       InstanceTypeChecker::IsArrayBoilerplateDescription(type) ||
931       InstanceTypeChecker::IsBytecodeArray(type) ||
932       InstanceTypeChecker::IsClosureFeedbackCellArray(type) ||
933       InstanceTypeChecker::IsCodeDataContainer(type) ||
934       InstanceTypeChecker::IsFeedbackCell(type) ||
935       InstanceTypeChecker::IsFeedbackMetadata(type) ||
936       InstanceTypeChecker::IsFeedbackVector(type) ||
937       InstanceTypeChecker::IsInterpreterData(type) ||
938       InstanceTypeChecker::IsLoadHandler(type) ||
939       InstanceTypeChecker::IsObjectBoilerplateDescription(type) ||
940       InstanceTypeChecker::IsPreparseData(type) ||
941       InstanceTypeChecker::IsRegExpBoilerplateDescription(type) ||
942       InstanceTypeChecker::IsScopeInfo(type) ||
943       InstanceTypeChecker::IsStoreHandler(type) ||
944       InstanceTypeChecker::IsTemplateObjectDescription(type) ||
945       InstanceTypeChecker::IsTurbofanType(type) ||
946       InstanceTypeChecker::IsUncompiledData(type)) {
947     return HeapEntry::kCode;
948   }
949 
950   // This check must come second, because some subtypes of FixedArray are
951   // determined above to represent code content.
952   if (InstanceTypeChecker::IsFixedArray(type) ||
953       InstanceTypeChecker::IsFixedDoubleArray(type) ||
954       InstanceTypeChecker::IsByteArray(type)) {
955     return HeapEntry::kArray;
956   }
957 
958   return HeapEntry::kHidden;
959 }
960 
EstimateObjectsCount()961 uint32_t V8HeapExplorer::EstimateObjectsCount() {
962   CombinedHeapObjectIterator it(heap_, HeapObjectIterator::kFilterUnreachable);
963   uint32_t objects_count = 0;
964   // Avoid overflowing the objects count. In worst case, we will show the same
965   // progress for a longer period of time, but we do not expect to have that
966   // many objects.
967   while (!it.Next().is_null() &&
968          objects_count != std::numeric_limits<uint32_t>::max())
969     ++objects_count;
970   return objects_count;
971 }
972 
973 class IndexedReferencesExtractor : public ObjectVisitorWithCageBases {
974  public:
IndexedReferencesExtractor(V8HeapExplorer * generator,HeapObject parent_obj,HeapEntry * parent)975   IndexedReferencesExtractor(V8HeapExplorer* generator, HeapObject parent_obj,
976                              HeapEntry* parent)
977       : ObjectVisitorWithCageBases(generator->isolate()),
978         generator_(generator),
979         parent_obj_(parent_obj),
980         parent_start_(parent_obj_.RawMaybeWeakField(0)),
981         parent_end_(
982             parent_obj_.RawMaybeWeakField(parent_obj_.Size(cage_base()))),
983         parent_(parent),
984         next_index_(0) {}
VisitPointers(HeapObject host,ObjectSlot start,ObjectSlot end)985   void VisitPointers(HeapObject host, ObjectSlot start,
986                      ObjectSlot end) override {
987     VisitPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end));
988   }
VisitMapPointer(HeapObject object)989   void VisitMapPointer(HeapObject object) override {
990     VisitSlotImpl(cage_base(), object.map_slot());
991   }
VisitPointers(HeapObject host,MaybeObjectSlot start,MaybeObjectSlot end)992   void VisitPointers(HeapObject host, MaybeObjectSlot start,
993                      MaybeObjectSlot end) override {
994     // [start,end) must be a sub-region of [parent_start_, parent_end), i.e.
995     // all the slots must point inside the object.
996     CHECK_LE(parent_start_, start);
997     CHECK_LE(end, parent_end_);
998     for (MaybeObjectSlot slot = start; slot < end; ++slot) {
999       VisitSlotImpl(cage_base(), slot);
1000     }
1001   }
1002 
VisitCodePointer(HeapObject host,CodeObjectSlot slot)1003   void VisitCodePointer(HeapObject host, CodeObjectSlot slot) override {
1004     CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
1005     VisitSlotImpl(code_cage_base(), slot);
1006   }
1007 
VisitCodeTarget(Code host,RelocInfo * rinfo)1008   void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
1009     Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
1010     VisitHeapObjectImpl(target, -1);
1011   }
1012 
VisitEmbeddedPointer(Code host,RelocInfo * rinfo)1013   void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
1014     HeapObject object = rinfo->target_object(cage_base());
1015     if (host.IsWeakObject(object)) {
1016       generator_->SetWeakReference(parent_, next_index_++, object, {});
1017     } else {
1018       VisitHeapObjectImpl(object, -1);
1019     }
1020   }
1021 
1022  private:
1023   template <typename TSlot>
VisitSlotImpl(PtrComprCageBase cage_base,TSlot slot)1024   V8_INLINE void VisitSlotImpl(PtrComprCageBase cage_base, TSlot slot) {
1025     int field_index =
1026         static_cast<int>(MaybeObjectSlot(slot.address()) - parent_start_);
1027     if (generator_->visited_fields_[field_index]) {
1028       generator_->visited_fields_[field_index] = false;
1029     } else {
1030       HeapObject heap_object;
1031       auto loaded_value = slot.load(cage_base);
1032       if (loaded_value.GetHeapObjectIfStrong(&heap_object)) {
1033         VisitHeapObjectImpl(heap_object, field_index);
1034       } else if (loaded_value.GetHeapObjectIfWeak(&heap_object)) {
1035         generator_->SetWeakReference(parent_, next_index_++, heap_object, {});
1036       }
1037     }
1038   }
1039 
VisitHeapObjectImpl(HeapObject heap_object,int field_index)1040   V8_INLINE void VisitHeapObjectImpl(HeapObject heap_object, int field_index) {
1041     DCHECK_LE(-1, field_index);
1042     // The last parameter {field_offset} is only used to check some well-known
1043     // skipped references, so passing -1 * kTaggedSize for objects embedded
1044     // into code is fine.
1045     generator_->SetHiddenReference(parent_obj_, parent_, next_index_++,
1046                                    heap_object, field_index * kTaggedSize);
1047   }
1048 
1049   V8HeapExplorer* generator_;
1050   HeapObject parent_obj_;
1051   MaybeObjectSlot parent_start_;
1052   MaybeObjectSlot parent_end_;
1053   HeapEntry* parent_;
1054   int next_index_;
1055 };
1056 
ExtractReferences(HeapEntry * entry,HeapObject obj)1057 void V8HeapExplorer::ExtractReferences(HeapEntry* entry, HeapObject obj) {
1058   if (obj.IsJSGlobalProxy()) {
1059     ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1060   } else if (obj.IsJSArrayBuffer()) {
1061     ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
1062   } else if (obj.IsJSObject()) {
1063     if (obj.IsJSWeakSet()) {
1064       ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj));
1065     } else if (obj.IsJSWeakMap()) {
1066       ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj));
1067     } else if (obj.IsJSSet()) {
1068       ExtractJSCollectionReferences(entry, JSSet::cast(obj));
1069     } else if (obj.IsJSMap()) {
1070       ExtractJSCollectionReferences(entry, JSMap::cast(obj));
1071     } else if (obj.IsJSPromise()) {
1072       ExtractJSPromiseReferences(entry, JSPromise::cast(obj));
1073     } else if (obj.IsJSGeneratorObject()) {
1074       ExtractJSGeneratorObjectReferences(entry, JSGeneratorObject::cast(obj));
1075     } else if (obj.IsJSWeakRef()) {
1076       ExtractJSWeakRefReferences(entry, JSWeakRef::cast(obj));
1077     }
1078     ExtractJSObjectReferences(entry, JSObject::cast(obj));
1079   } else if (obj.IsString()) {
1080     ExtractStringReferences(entry, String::cast(obj));
1081   } else if (obj.IsSymbol()) {
1082     ExtractSymbolReferences(entry, Symbol::cast(obj));
1083   } else if (obj.IsMap()) {
1084     ExtractMapReferences(entry, Map::cast(obj));
1085   } else if (obj.IsSharedFunctionInfo()) {
1086     ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1087   } else if (obj.IsScript()) {
1088     ExtractScriptReferences(entry, Script::cast(obj));
1089   } else if (obj.IsAccessorInfo()) {
1090     ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj));
1091   } else if (obj.IsAccessorPair()) {
1092     ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1093   } else if (obj.IsCode()) {
1094     ExtractCodeReferences(entry, Code::cast(obj));
1095   } else if (obj.IsCell()) {
1096     ExtractCellReferences(entry, Cell::cast(obj));
1097   } else if (obj.IsFeedbackCell()) {
1098     ExtractFeedbackCellReferences(entry, FeedbackCell::cast(obj));
1099   } else if (obj.IsPropertyCell()) {
1100     ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1101   } else if (obj.IsAllocationSite()) {
1102     ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1103   } else if (obj.IsArrayBoilerplateDescription()) {
1104     ExtractArrayBoilerplateDescriptionReferences(
1105         entry, ArrayBoilerplateDescription::cast(obj));
1106   } else if (obj.IsRegExpBoilerplateDescription()) {
1107     ExtractRegExpBoilerplateDescriptionReferences(
1108         entry, RegExpBoilerplateDescription::cast(obj));
1109   } else if (obj.IsFeedbackVector()) {
1110     ExtractFeedbackVectorReferences(entry, FeedbackVector::cast(obj));
1111   } else if (obj.IsDescriptorArray()) {
1112     ExtractDescriptorArrayReferences(entry, DescriptorArray::cast(obj));
1113   } else if (obj.IsWeakFixedArray()) {
1114     ExtractWeakArrayReferences(WeakFixedArray::kHeaderSize, entry,
1115                                WeakFixedArray::cast(obj));
1116   } else if (obj.IsWeakArrayList()) {
1117     ExtractWeakArrayReferences(WeakArrayList::kHeaderSize, entry,
1118                                WeakArrayList::cast(obj));
1119   } else if (obj.IsContext()) {
1120     ExtractContextReferences(entry, Context::cast(obj));
1121   } else if (obj.IsEphemeronHashTable()) {
1122     ExtractEphemeronHashTableReferences(entry, EphemeronHashTable::cast(obj));
1123   } else if (obj.IsFixedArray()) {
1124     ExtractFixedArrayReferences(entry, FixedArray::cast(obj));
1125   } else if (obj.IsWeakCell()) {
1126     ExtractWeakCellReferences(entry, WeakCell::cast(obj));
1127   } else if (obj.IsHeapNumber()) {
1128     if (snapshot_->capture_numeric_value()) {
1129       ExtractNumberReference(entry, obj);
1130     }
1131   } else if (obj.IsBytecodeArray()) {
1132     ExtractBytecodeArrayReferences(entry, BytecodeArray::cast(obj));
1133   } else if (obj.IsScopeInfo()) {
1134     ExtractScopeInfoReferences(entry, ScopeInfo::cast(obj));
1135   }
1136 }
1137 
ExtractJSGlobalProxyReferences(HeapEntry * entry,JSGlobalProxy proxy)1138 void V8HeapExplorer::ExtractJSGlobalProxyReferences(HeapEntry* entry,
1139                                                     JSGlobalProxy proxy) {
1140   SetInternalReference(entry, "native_context", proxy.native_context(),
1141                        JSGlobalProxy::kNativeContextOffset);
1142 }
1143 
ExtractJSObjectReferences(HeapEntry * entry,JSObject js_obj)1144 void V8HeapExplorer::ExtractJSObjectReferences(HeapEntry* entry,
1145                                                JSObject js_obj) {
1146   HeapObject obj = js_obj;
1147   ExtractPropertyReferences(js_obj, entry);
1148   ExtractElementReferences(js_obj, entry);
1149   ExtractInternalReferences(js_obj, entry);
1150   Isolate* isolate = Isolate::FromHeap(heap_);
1151   PrototypeIterator iter(isolate, js_obj);
1152   ReadOnlyRoots roots(isolate);
1153   SetPropertyReference(entry, roots.proto_string(), iter.GetCurrent());
1154   if (obj.IsJSBoundFunction()) {
1155     JSBoundFunction js_fun = JSBoundFunction::cast(obj);
1156     TagObject(js_fun.bound_arguments(), "(bound arguments)");
1157     SetInternalReference(entry, "bindings", js_fun.bound_arguments(),
1158                          JSBoundFunction::kBoundArgumentsOffset);
1159     SetInternalReference(entry, "bound_this", js_fun.bound_this(),
1160                          JSBoundFunction::kBoundThisOffset);
1161     SetInternalReference(entry, "bound_function",
1162                          js_fun.bound_target_function(),
1163                          JSBoundFunction::kBoundTargetFunctionOffset);
1164     FixedArray bindings = js_fun.bound_arguments();
1165     for (int i = 0; i < bindings.length(); i++) {
1166       const char* reference_name = names_->GetFormatted("bound_argument_%d", i);
1167       SetNativeBindReference(entry, reference_name, bindings.get(i));
1168     }
1169   } else if (obj.IsJSFunction()) {
1170     JSFunction js_fun = JSFunction::cast(js_obj);
1171     if (js_fun.has_prototype_slot()) {
1172       Object proto_or_map = js_fun.prototype_or_initial_map(kAcquireLoad);
1173       if (!proto_or_map.IsTheHole(isolate)) {
1174         if (!proto_or_map.IsMap()) {
1175           SetPropertyReference(entry, roots.prototype_string(), proto_or_map,
1176                                nullptr,
1177                                JSFunction::kPrototypeOrInitialMapOffset);
1178         } else {
1179           SetPropertyReference(entry, roots.prototype_string(),
1180                                js_fun.prototype());
1181           SetInternalReference(entry, "initial_map", proto_or_map,
1182                                JSFunction::kPrototypeOrInitialMapOffset);
1183         }
1184       }
1185     }
1186     SharedFunctionInfo shared_info = js_fun.shared();
1187     TagObject(js_fun.raw_feedback_cell(), "(function feedback cell)");
1188     SetInternalReference(entry, "feedback_cell", js_fun.raw_feedback_cell(),
1189                          JSFunction::kFeedbackCellOffset);
1190     TagObject(shared_info, "(shared function info)");
1191     SetInternalReference(entry, "shared", shared_info,
1192                          JSFunction::kSharedFunctionInfoOffset);
1193     TagObject(js_fun.context(), "(context)");
1194     SetInternalReference(entry, "context", js_fun.context(),
1195                          JSFunction::kContextOffset);
1196     SetInternalReference(entry, "code", js_fun.code(), JSFunction::kCodeOffset);
1197   } else if (obj.IsJSGlobalObject()) {
1198     JSGlobalObject global_obj = JSGlobalObject::cast(obj);
1199     SetInternalReference(entry, "native_context", global_obj.native_context(),
1200                          JSGlobalObject::kNativeContextOffset);
1201     SetInternalReference(entry, "global_proxy", global_obj.global_proxy(),
1202                          JSGlobalObject::kGlobalProxyOffset);
1203     STATIC_ASSERT(JSGlobalObject::kHeaderSize - JSObject::kHeaderSize ==
1204                   2 * kTaggedSize);
1205   } else if (obj.IsJSArrayBufferView()) {
1206     JSArrayBufferView view = JSArrayBufferView::cast(obj);
1207     SetInternalReference(entry, "buffer", view.buffer(),
1208                          JSArrayBufferView::kBufferOffset);
1209   }
1210 
1211   TagObject(js_obj.raw_properties_or_hash(), "(object properties)");
1212   SetInternalReference(entry, "properties", js_obj.raw_properties_or_hash(),
1213                        JSObject::kPropertiesOrHashOffset);
1214 
1215   TagObject(js_obj.elements(), "(object elements)");
1216   SetInternalReference(entry, "elements", js_obj.elements(),
1217                        JSObject::kElementsOffset);
1218 }
1219 
ExtractStringReferences(HeapEntry * entry,String string)1220 void V8HeapExplorer::ExtractStringReferences(HeapEntry* entry, String string) {
1221   if (string.IsConsString()) {
1222     ConsString cs = ConsString::cast(string);
1223     SetInternalReference(entry, "first", cs.first(), ConsString::kFirstOffset);
1224     SetInternalReference(entry, "second", cs.second(),
1225                          ConsString::kSecondOffset);
1226   } else if (string.IsSlicedString()) {
1227     SlicedString ss = SlicedString::cast(string);
1228     SetInternalReference(entry, "parent", ss.parent(),
1229                          SlicedString::kParentOffset);
1230   } else if (string.IsThinString()) {
1231     ThinString ts = ThinString::cast(string);
1232     SetInternalReference(entry, "actual", ts.actual(),
1233                          ThinString::kActualOffset);
1234   }
1235 }
1236 
ExtractSymbolReferences(HeapEntry * entry,Symbol symbol)1237 void V8HeapExplorer::ExtractSymbolReferences(HeapEntry* entry, Symbol symbol) {
1238   SetInternalReference(entry, "name", symbol.description(),
1239                        Symbol::kDescriptionOffset);
1240 }
1241 
ExtractJSCollectionReferences(HeapEntry * entry,JSCollection collection)1242 void V8HeapExplorer::ExtractJSCollectionReferences(HeapEntry* entry,
1243                                                    JSCollection collection) {
1244   SetInternalReference(entry, "table", collection.table(),
1245                        JSCollection::kTableOffset);
1246 }
1247 
ExtractJSWeakCollectionReferences(HeapEntry * entry,JSWeakCollection obj)1248 void V8HeapExplorer::ExtractJSWeakCollectionReferences(HeapEntry* entry,
1249                                                        JSWeakCollection obj) {
1250   SetInternalReference(entry, "table", obj.table(),
1251                        JSWeakCollection::kTableOffset);
1252 }
1253 
ExtractEphemeronHashTableReferences(HeapEntry * entry,EphemeronHashTable table)1254 void V8HeapExplorer::ExtractEphemeronHashTableReferences(
1255     HeapEntry* entry, EphemeronHashTable table) {
1256   for (InternalIndex i : table.IterateEntries()) {
1257     int key_index = EphemeronHashTable::EntryToIndex(i) +
1258                     EphemeronHashTable::kEntryKeyIndex;
1259     int value_index = EphemeronHashTable::EntryToValueIndex(i);
1260     Object key = table.get(key_index);
1261     Object value = table.get(value_index);
1262     SetWeakReference(entry, key_index, key, table.OffsetOfElementAt(key_index));
1263     SetWeakReference(entry, value_index, value,
1264                      table.OffsetOfElementAt(value_index));
1265     HeapEntry* key_entry = GetEntry(key);
1266     HeapEntry* value_entry = GetEntry(value);
1267     HeapEntry* table_entry = GetEntry(table);
1268     if (key_entry && value_entry && !key.IsUndefined()) {
1269       const char* edge_name = names_->GetFormatted(
1270           "part of key (%s @%u) -> value (%s @%u) pair in WeakMap (table @%u)",
1271           key_entry->name(), key_entry->id(), value_entry->name(),
1272           value_entry->id(), table_entry->id());
1273       key_entry->SetNamedAutoIndexReference(HeapGraphEdge::kInternal, edge_name,
1274                                             value_entry, names_, generator_,
1275                                             HeapEntry::kEphemeron);
1276       table_entry->SetNamedAutoIndexReference(
1277           HeapGraphEdge::kInternal, edge_name, value_entry, names_, generator_,
1278           HeapEntry::kEphemeron);
1279     }
1280   }
1281 }
1282 
1283 // These static arrays are used to prevent excessive code-size in
1284 // ExtractContextReferences below, which would happen if we called
1285 // SetInternalReference for every native context field in a macro.
1286 static const struct {
1287   int index;
1288   const char* name;
1289 } native_context_names[] = {
1290 #define CONTEXT_FIELD_INDEX_NAME(index, _, name) {Context::index, #name},
1291     NATIVE_CONTEXT_FIELDS(CONTEXT_FIELD_INDEX_NAME)
1292 #undef CONTEXT_FIELD_INDEX_NAME
1293 };
1294 
ExtractContextReferences(HeapEntry * entry,Context context)1295 void V8HeapExplorer::ExtractContextReferences(HeapEntry* entry,
1296                                               Context context) {
1297   DisallowGarbageCollection no_gc;
1298   if (!context.IsNativeContext() && context.is_declaration_context()) {
1299     ScopeInfo scope_info = context.scope_info();
1300     // Add context allocated locals.
1301     for (auto it : ScopeInfo::IterateLocalNames(&scope_info, no_gc)) {
1302       int idx = scope_info.ContextHeaderLength() + it->index();
1303       SetContextReference(entry, it->name(), context.get(idx),
1304                           Context::OffsetOfElementAt(idx));
1305     }
1306     if (scope_info.HasContextAllocatedFunctionName()) {
1307       String name = String::cast(scope_info.FunctionName());
1308       int idx = scope_info.FunctionContextSlotIndex(name);
1309       if (idx >= 0) {
1310         SetContextReference(entry, name, context.get(idx),
1311                             Context::OffsetOfElementAt(idx));
1312       }
1313     }
1314   }
1315 
1316   SetInternalReference(
1317       entry, "scope_info", context.get(Context::SCOPE_INFO_INDEX),
1318       FixedArray::OffsetOfElementAt(Context::SCOPE_INFO_INDEX));
1319   SetInternalReference(entry, "previous", context.get(Context::PREVIOUS_INDEX),
1320                        FixedArray::OffsetOfElementAt(Context::PREVIOUS_INDEX));
1321   if (context.has_extension()) {
1322     SetInternalReference(
1323         entry, "extension", context.get(Context::EXTENSION_INDEX),
1324         FixedArray::OffsetOfElementAt(Context::EXTENSION_INDEX));
1325   }
1326 
1327   if (context.IsNativeContext()) {
1328     TagObject(context.normalized_map_cache(), "(context norm. map cache)");
1329     TagObject(context.embedder_data(), "(context data)");
1330     for (size_t i = 0; i < arraysize(native_context_names); i++) {
1331       int index = native_context_names[i].index;
1332       const char* name = native_context_names[i].name;
1333       SetInternalReference(entry, name, context.get(index),
1334                            FixedArray::OffsetOfElementAt(index));
1335     }
1336 
1337     SetWeakReference(entry, "optimized_code_list",
1338                      context.get(Context::OPTIMIZED_CODE_LIST),
1339                      Context::OffsetOfElementAt(Context::OPTIMIZED_CODE_LIST),
1340                      HeapEntry::kCustomWeakPointer);
1341     SetWeakReference(entry, "deoptimized_code_list",
1342                      context.get(Context::DEOPTIMIZED_CODE_LIST),
1343                      Context::OffsetOfElementAt(Context::DEOPTIMIZED_CODE_LIST),
1344                      HeapEntry::kCustomWeakPointer);
1345     STATIC_ASSERT(Context::OPTIMIZED_CODE_LIST == Context::FIRST_WEAK_SLOT);
1346     STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 ==
1347                   Context::NATIVE_CONTEXT_SLOTS);
1348     STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 3 ==
1349                   Context::NATIVE_CONTEXT_SLOTS);
1350   }
1351 }
1352 
ExtractMapReferences(HeapEntry * entry,Map map)1353 void V8HeapExplorer::ExtractMapReferences(HeapEntry* entry, Map map) {
1354   MaybeObject maybe_raw_transitions_or_prototype_info = map.raw_transitions();
1355   HeapObject raw_transitions_or_prototype_info;
1356   if (maybe_raw_transitions_or_prototype_info->GetHeapObjectIfWeak(
1357           &raw_transitions_or_prototype_info)) {
1358     DCHECK(raw_transitions_or_prototype_info.IsMap());
1359     SetWeakReference(entry, "transition", raw_transitions_or_prototype_info,
1360                      Map::kTransitionsOrPrototypeInfoOffset);
1361   } else if (maybe_raw_transitions_or_prototype_info->GetHeapObjectIfStrong(
1362                  &raw_transitions_or_prototype_info)) {
1363     if (raw_transitions_or_prototype_info.IsTransitionArray()) {
1364       TransitionArray transitions =
1365           TransitionArray::cast(raw_transitions_or_prototype_info);
1366       if (map.CanTransition() && transitions.HasPrototypeTransitions()) {
1367         TagObject(transitions.GetPrototypeTransitions(),
1368                   "(prototype transitions)");
1369       }
1370       TagObject(transitions, "(transition array)");
1371       SetInternalReference(entry, "transitions", transitions,
1372                            Map::kTransitionsOrPrototypeInfoOffset);
1373     } else if (raw_transitions_or_prototype_info.IsFixedArray()) {
1374       TagObject(raw_transitions_or_prototype_info, "(transition)");
1375       SetInternalReference(entry, "transition",
1376                            raw_transitions_or_prototype_info,
1377                            Map::kTransitionsOrPrototypeInfoOffset);
1378     } else if (map.is_prototype_map()) {
1379       TagObject(raw_transitions_or_prototype_info, "prototype_info");
1380       SetInternalReference(entry, "prototype_info",
1381                            raw_transitions_or_prototype_info,
1382                            Map::kTransitionsOrPrototypeInfoOffset);
1383     }
1384   }
1385   DescriptorArray descriptors = map.instance_descriptors();
1386   TagObject(descriptors, "(map descriptors)");
1387   SetInternalReference(entry, "descriptors", descriptors,
1388                        Map::kInstanceDescriptorsOffset);
1389   SetInternalReference(entry, "prototype", map.prototype(),
1390                        Map::kPrototypeOffset);
1391   if (map.IsContextMap()) {
1392     Object native_context = map.native_context();
1393     TagObject(native_context, "(native context)");
1394     SetInternalReference(entry, "native_context", native_context,
1395                          Map::kConstructorOrBackPointerOrNativeContextOffset);
1396   } else {
1397     Object constructor_or_back_pointer = map.constructor_or_back_pointer();
1398     if (constructor_or_back_pointer.IsMap()) {
1399       TagObject(constructor_or_back_pointer, "(back pointer)");
1400       SetInternalReference(entry, "back_pointer", constructor_or_back_pointer,
1401                            Map::kConstructorOrBackPointerOrNativeContextOffset);
1402     } else if (constructor_or_back_pointer.IsFunctionTemplateInfo()) {
1403       TagObject(constructor_or_back_pointer, "(constructor function data)");
1404       SetInternalReference(entry, "constructor_function_data",
1405                            constructor_or_back_pointer,
1406                            Map::kConstructorOrBackPointerOrNativeContextOffset);
1407     } else {
1408       SetInternalReference(entry, "constructor", constructor_or_back_pointer,
1409                            Map::kConstructorOrBackPointerOrNativeContextOffset);
1410     }
1411   }
1412   TagObject(map.dependent_code(), "(dependent code)");
1413   SetInternalReference(entry, "dependent_code", map.dependent_code(),
1414                        Map::kDependentCodeOffset);
1415 }
1416 
ExtractSharedFunctionInfoReferences(HeapEntry * entry,SharedFunctionInfo shared)1417 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1418     HeapEntry* entry, SharedFunctionInfo shared) {
1419   std::unique_ptr<char[]> name = shared.DebugNameCStr();
1420   if (name[0] != '\0') {
1421     TagObject(FromCodeT(shared.GetCode()),
1422               names_->GetFormatted("(code for %s)", name.get()));
1423   } else {
1424     TagObject(FromCodeT(shared.GetCode()),
1425               names_->GetFormatted("(%s code)",
1426                                    CodeKindToString(shared.GetCode().kind())));
1427   }
1428 
1429   Object name_or_scope_info = shared.name_or_scope_info(kAcquireLoad);
1430   if (name_or_scope_info.IsScopeInfo()) {
1431     TagObject(name_or_scope_info, "(function scope info)");
1432   }
1433   SetInternalReference(entry, "name_or_scope_info", name_or_scope_info,
1434                        SharedFunctionInfo::kNameOrScopeInfoOffset);
1435   SetInternalReference(entry, "script_or_debug_info",
1436                        shared.script_or_debug_info(kAcquireLoad),
1437                        SharedFunctionInfo::kScriptOrDebugInfoOffset);
1438   SetInternalReference(entry, "function_data",
1439                        shared.function_data(kAcquireLoad),
1440                        SharedFunctionInfo::kFunctionDataOffset);
1441   SetInternalReference(
1442       entry, "raw_outer_scope_info_or_feedback_metadata",
1443       shared.raw_outer_scope_info_or_feedback_metadata(),
1444       SharedFunctionInfo::kOuterScopeInfoOrFeedbackMetadataOffset);
1445 }
1446 
ExtractScriptReferences(HeapEntry * entry,Script script)1447 void V8HeapExplorer::ExtractScriptReferences(HeapEntry* entry, Script script) {
1448   SetInternalReference(entry, "source", script.source(), Script::kSourceOffset);
1449   SetInternalReference(entry, "name", script.name(), Script::kNameOffset);
1450   SetInternalReference(entry, "context_data", script.context_data(),
1451                        Script::kContextDataOffset);
1452   TagObject(script.line_ends(), "(script line ends)", HeapEntry::kCode);
1453   SetInternalReference(entry, "line_ends", script.line_ends(),
1454                        Script::kLineEndsOffset);
1455   TagObject(script.shared_function_infos(), "(shared function infos)",
1456             HeapEntry::kCode);
1457   TagObject(script.host_defined_options(), "(host-defined options)",
1458             HeapEntry::kCode);
1459 }
1460 
ExtractAccessorInfoReferences(HeapEntry * entry,AccessorInfo accessor_info)1461 void V8HeapExplorer::ExtractAccessorInfoReferences(HeapEntry* entry,
1462                                                    AccessorInfo accessor_info) {
1463   SetInternalReference(entry, "name", accessor_info.name(),
1464                        AccessorInfo::kNameOffset);
1465   SetInternalReference(entry, "expected_receiver_type",
1466                        accessor_info.expected_receiver_type(),
1467                        AccessorInfo::kExpectedReceiverTypeOffset);
1468   SetInternalReference(entry, "getter", accessor_info.getter(),
1469                        AccessorInfo::kGetterOffset);
1470   SetInternalReference(entry, "setter", accessor_info.setter(),
1471                        AccessorInfo::kSetterOffset);
1472   SetInternalReference(entry, "data", accessor_info.data(),
1473                        AccessorInfo::kDataOffset);
1474 }
1475 
ExtractAccessorPairReferences(HeapEntry * entry,AccessorPair accessors)1476 void V8HeapExplorer::ExtractAccessorPairReferences(HeapEntry* entry,
1477                                                    AccessorPair accessors) {
1478   SetInternalReference(entry, "getter", accessors.getter(),
1479                        AccessorPair::kGetterOffset);
1480   SetInternalReference(entry, "setter", accessors.setter(),
1481                        AccessorPair::kSetterOffset);
1482 }
1483 
ExtractJSWeakRefReferences(HeapEntry * entry,JSWeakRef js_weak_ref)1484 void V8HeapExplorer::ExtractJSWeakRefReferences(HeapEntry* entry,
1485                                                 JSWeakRef js_weak_ref) {
1486   SetWeakReference(entry, "target", js_weak_ref.target(),
1487                    JSWeakRef::kTargetOffset);
1488 }
1489 
ExtractWeakCellReferences(HeapEntry * entry,WeakCell weak_cell)1490 void V8HeapExplorer::ExtractWeakCellReferences(HeapEntry* entry,
1491                                                WeakCell weak_cell) {
1492   SetWeakReference(entry, "target", weak_cell.target(),
1493                    WeakCell::kTargetOffset);
1494   SetWeakReference(entry, "unregister_token", weak_cell.unregister_token(),
1495                    WeakCell::kUnregisterTokenOffset);
1496 }
1497 
TagBuiltinCodeObject(CodeT code,const char * name)1498 void V8HeapExplorer::TagBuiltinCodeObject(CodeT code, const char* name) {
1499   TagObject(FromCodeT(code), names_->GetFormatted("(%s builtin)", name));
1500 }
1501 
ExtractCodeReferences(HeapEntry * entry,Code code)1502 void V8HeapExplorer::ExtractCodeReferences(HeapEntry* entry, Code code) {
1503   TagObject(code.relocation_info(), "(code relocation info)", HeapEntry::kCode);
1504   SetInternalReference(entry, "relocation_info", code.relocation_info(),
1505                        Code::kRelocationInfoOffset);
1506 
1507   if (code.kind() == CodeKind::BASELINE) {
1508     TagObject(code.bytecode_or_interpreter_data(), "(interpreter data)");
1509     SetInternalReference(entry, "interpreter_data",
1510                          code.bytecode_or_interpreter_data(),
1511                          Code::kDeoptimizationDataOrInterpreterDataOffset);
1512     TagObject(code.bytecode_offset_table(), "(bytecode offset table)",
1513               HeapEntry::kCode);
1514     SetInternalReference(entry, "bytecode_offset_table",
1515                          code.bytecode_offset_table(),
1516                          Code::kPositionTableOffset);
1517   } else {
1518     DeoptimizationData deoptimization_data =
1519         DeoptimizationData::cast(code.deoptimization_data());
1520     TagObject(deoptimization_data, "(code deopt data)", HeapEntry::kCode);
1521     SetInternalReference(entry, "deoptimization_data", deoptimization_data,
1522                          Code::kDeoptimizationDataOrInterpreterDataOffset);
1523     if (deoptimization_data.length() > 0) {
1524       TagObject(deoptimization_data.TranslationByteArray(), "(code deopt data)",
1525                 HeapEntry::kCode);
1526       TagObject(deoptimization_data.LiteralArray(), "(code deopt data)",
1527                 HeapEntry::kCode);
1528       TagObject(deoptimization_data.InliningPositions(), "(code deopt data)",
1529                 HeapEntry::kCode);
1530     }
1531     TagObject(code.source_position_table(), "(source position table)",
1532               HeapEntry::kCode);
1533     SetInternalReference(entry, "source_position_table",
1534                          code.source_position_table(),
1535                          Code::kPositionTableOffset);
1536   }
1537 }
1538 
ExtractCellReferences(HeapEntry * entry,Cell cell)1539 void V8HeapExplorer::ExtractCellReferences(HeapEntry* entry, Cell cell) {
1540   SetInternalReference(entry, "value", cell.value(), Cell::kValueOffset);
1541 }
1542 
ExtractFeedbackCellReferences(HeapEntry * entry,FeedbackCell feedback_cell)1543 void V8HeapExplorer::ExtractFeedbackCellReferences(HeapEntry* entry,
1544                                                    FeedbackCell feedback_cell) {
1545   TagObject(feedback_cell, "(feedback cell)");
1546   SetInternalReference(entry, "value", feedback_cell.value(),
1547                        FeedbackCell::kValueOffset);
1548 }
1549 
ExtractPropertyCellReferences(HeapEntry * entry,PropertyCell cell)1550 void V8HeapExplorer::ExtractPropertyCellReferences(HeapEntry* entry,
1551                                                    PropertyCell cell) {
1552   SetInternalReference(entry, "value", cell.value(),
1553                        PropertyCell::kValueOffset);
1554   TagObject(cell.dependent_code(), "(dependent code)");
1555   SetInternalReference(entry, "dependent_code", cell.dependent_code(),
1556                        PropertyCell::kDependentCodeOffset);
1557 }
1558 
ExtractAllocationSiteReferences(HeapEntry * entry,AllocationSite site)1559 void V8HeapExplorer::ExtractAllocationSiteReferences(HeapEntry* entry,
1560                                                      AllocationSite site) {
1561   SetInternalReference(entry, "transition_info",
1562                        site.transition_info_or_boilerplate(),
1563                        AllocationSite::kTransitionInfoOrBoilerplateOffset);
1564   SetInternalReference(entry, "nested_site", site.nested_site(),
1565                        AllocationSite::kNestedSiteOffset);
1566   TagObject(site.dependent_code(), "(dependent code)", HeapEntry::kCode);
1567   SetInternalReference(entry, "dependent_code", site.dependent_code(),
1568                        AllocationSite::kDependentCodeOffset);
1569 }
1570 
ExtractArrayBoilerplateDescriptionReferences(HeapEntry * entry,ArrayBoilerplateDescription value)1571 void V8HeapExplorer::ExtractArrayBoilerplateDescriptionReferences(
1572     HeapEntry* entry, ArrayBoilerplateDescription value) {
1573   FixedArrayBase constant_elements = value.constant_elements();
1574   SetInternalReference(entry, "constant_elements", constant_elements,
1575                        ArrayBoilerplateDescription::kConstantElementsOffset);
1576   TagObject(constant_elements, "(constant elements)", HeapEntry::kCode);
1577 }
1578 
ExtractRegExpBoilerplateDescriptionReferences(HeapEntry * entry,RegExpBoilerplateDescription value)1579 void V8HeapExplorer::ExtractRegExpBoilerplateDescriptionReferences(
1580     HeapEntry* entry, RegExpBoilerplateDescription value) {
1581   TagObject(value.data(), "(RegExp data)", HeapEntry::kCode);
1582 }
1583 
1584 class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
1585  public:
JSArrayBufferDataEntryAllocator(size_t size,V8HeapExplorer * explorer)1586   JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer)
1587       : size_(size)
1588       , explorer_(explorer) {
1589   }
AllocateEntry(HeapThing ptr)1590   HeapEntry* AllocateEntry(HeapThing ptr) override {
1591     return explorer_->AddEntry(reinterpret_cast<Address>(ptr),
1592                                HeapEntry::kNative, "system / JSArrayBufferData",
1593                                size_);
1594   }
AllocateEntry(Smi smi)1595   HeapEntry* AllocateEntry(Smi smi) override {
1596     DCHECK(false);
1597     return nullptr;
1598   }
1599 
1600  private:
1601   size_t size_;
1602   V8HeapExplorer* explorer_;
1603 };
1604 
ExtractJSArrayBufferReferences(HeapEntry * entry,JSArrayBuffer buffer)1605 void V8HeapExplorer::ExtractJSArrayBufferReferences(HeapEntry* entry,
1606                                                     JSArrayBuffer buffer) {
1607   // Setup a reference to a native memory backing_store object.
1608   if (!buffer.backing_store()) return;
1609   size_t data_size = buffer.byte_length();
1610   JSArrayBufferDataEntryAllocator allocator(data_size, this);
1611   HeapEntry* data_entry =
1612       generator_->FindOrAddEntry(buffer.backing_store(), &allocator);
1613   entry->SetNamedReference(HeapGraphEdge::kInternal, "backing_store",
1614                            data_entry, generator_, HeapEntry::kOffHeapPointer);
1615 }
1616 
ExtractJSPromiseReferences(HeapEntry * entry,JSPromise promise)1617 void V8HeapExplorer::ExtractJSPromiseReferences(HeapEntry* entry,
1618                                                 JSPromise promise) {
1619   SetInternalReference(entry, "reactions_or_result",
1620                        promise.reactions_or_result(),
1621                        JSPromise::kReactionsOrResultOffset);
1622 }
1623 
ExtractJSGeneratorObjectReferences(HeapEntry * entry,JSGeneratorObject generator)1624 void V8HeapExplorer::ExtractJSGeneratorObjectReferences(
1625     HeapEntry* entry, JSGeneratorObject generator) {
1626   SetInternalReference(entry, "function", generator.function(),
1627                        JSGeneratorObject::kFunctionOffset);
1628   SetInternalReference(entry, "context", generator.context(),
1629                        JSGeneratorObject::kContextOffset);
1630   SetInternalReference(entry, "receiver", generator.receiver(),
1631                        JSGeneratorObject::kReceiverOffset);
1632   SetInternalReference(entry, "parameters_and_registers",
1633                        generator.parameters_and_registers(),
1634                        JSGeneratorObject::kParametersAndRegistersOffset);
1635 }
1636 
ExtractFixedArrayReferences(HeapEntry * entry,FixedArray array)1637 void V8HeapExplorer::ExtractFixedArrayReferences(HeapEntry* entry,
1638                                                  FixedArray array) {
1639   for (int i = 0, l = array.length(); i < l; ++i) {
1640     DCHECK(!HasWeakHeapObjectTag(array.get(i)));
1641     SetInternalReference(entry, i, array.get(i), array.OffsetOfElementAt(i));
1642   }
1643 }
1644 
ExtractNumberReference(HeapEntry * entry,Object number)1645 void V8HeapExplorer::ExtractNumberReference(HeapEntry* entry, Object number) {
1646   DCHECK(number.IsNumber());
1647 
1648   // Must be large enough to fit any double, int, or size_t.
1649   char arr[32];
1650   base::Vector<char> buffer(arr, arraysize(arr));
1651 
1652   const char* string;
1653   if (number.IsSmi()) {
1654     int int_value = Smi::ToInt(number);
1655     string = IntToCString(int_value, buffer);
1656   } else {
1657     double double_value = HeapNumber::cast(number).value();
1658     string = DoubleToCString(double_value, buffer);
1659   }
1660 
1661   const char* name = names_->GetCopy(string);
1662 
1663   SnapshotObjectId id = heap_object_map_->get_next_id();
1664   HeapEntry* child_entry =
1665       snapshot_->AddEntry(HeapEntry::kString, name, id, 0, 0);
1666   entry->SetNamedReference(HeapGraphEdge::kInternal, "value", child_entry,
1667                            generator_);
1668 }
1669 
ExtractBytecodeArrayReferences(HeapEntry * entry,BytecodeArray bytecode)1670 void V8HeapExplorer::ExtractBytecodeArrayReferences(HeapEntry* entry,
1671                                                     BytecodeArray bytecode) {
1672   RecursivelyTagConstantPool(bytecode.constant_pool(), "(constant pool)",
1673                              HeapEntry::kCode, 3);
1674   TagObject(bytecode.handler_table(), "(handler table)", HeapEntry::kCode);
1675   TagObject(bytecode.source_position_table(kAcquireLoad),
1676             "(source position table)", HeapEntry::kCode);
1677 }
1678 
ExtractScopeInfoReferences(HeapEntry * entry,ScopeInfo info)1679 void V8HeapExplorer::ExtractScopeInfoReferences(HeapEntry* entry,
1680                                                 ScopeInfo info) {
1681   if (!info.HasInlinedLocalNames()) {
1682     TagObject(info.context_local_names_hashtable(), "(context local names)",
1683               HeapEntry::kCode);
1684   }
1685 }
1686 
ExtractFeedbackVectorReferences(HeapEntry * entry,FeedbackVector feedback_vector)1687 void V8HeapExplorer::ExtractFeedbackVectorReferences(
1688     HeapEntry* entry, FeedbackVector feedback_vector) {
1689   MaybeObject code = feedback_vector.maybe_optimized_code();
1690   HeapObject code_heap_object;
1691   if (code->GetHeapObjectIfWeak(&code_heap_object)) {
1692     SetWeakReference(entry, "optimized code", code_heap_object,
1693                      FeedbackVector::kMaybeOptimizedCodeOffset);
1694   }
1695   for (int i = 0; i < feedback_vector.length(); ++i) {
1696     MaybeObject maybe_entry = *(feedback_vector.slots_start() + i);
1697     HeapObject entry;
1698     if (maybe_entry.GetHeapObjectIfStrong(&entry) &&
1699         (entry.map(isolate()).instance_type() == WEAK_FIXED_ARRAY_TYPE ||
1700          entry.IsFixedArrayExact())) {
1701       TagObject(entry, "(feedback)", HeapEntry::kCode);
1702     }
1703   }
1704 }
1705 
ExtractDescriptorArrayReferences(HeapEntry * entry,DescriptorArray array)1706 void V8HeapExplorer::ExtractDescriptorArrayReferences(HeapEntry* entry,
1707                                                       DescriptorArray array) {
1708   SetInternalReference(entry, "enum_cache", array.enum_cache(),
1709                        DescriptorArray::kEnumCacheOffset);
1710   MaybeObjectSlot start = MaybeObjectSlot(array.GetDescriptorSlot(0));
1711   MaybeObjectSlot end = MaybeObjectSlot(
1712       array.GetDescriptorSlot(array.number_of_all_descriptors()));
1713   for (int i = 0; start + i < end; ++i) {
1714     MaybeObjectSlot slot = start + i;
1715     int offset = static_cast<int>(slot.address() - array.address());
1716     MaybeObject object = *slot;
1717     HeapObject heap_object;
1718     if (object->GetHeapObjectIfWeak(&heap_object)) {
1719       SetWeakReference(entry, i, heap_object, offset);
1720     } else if (object->GetHeapObjectIfStrong(&heap_object)) {
1721       SetInternalReference(entry, i, heap_object, offset);
1722     }
1723   }
1724 }
1725 
1726 template <typename T>
ExtractWeakArrayReferences(int header_size,HeapEntry * entry,T array)1727 void V8HeapExplorer::ExtractWeakArrayReferences(int header_size,
1728                                                 HeapEntry* entry, T array) {
1729   for (int i = 0; i < array.length(); ++i) {
1730     MaybeObject object = array.Get(i);
1731     HeapObject heap_object;
1732     if (object->GetHeapObjectIfWeak(&heap_object)) {
1733       SetWeakReference(entry, i, heap_object, header_size + i * kTaggedSize);
1734     } else if (object->GetHeapObjectIfStrong(&heap_object)) {
1735       SetInternalReference(entry, i, heap_object,
1736                            header_size + i * kTaggedSize);
1737     }
1738   }
1739 }
1740 
ExtractPropertyReferences(JSObject js_obj,HeapEntry * entry)1741 void V8HeapExplorer::ExtractPropertyReferences(JSObject js_obj,
1742                                                HeapEntry* entry) {
1743   Isolate* isolate = js_obj.GetIsolate();
1744   if (js_obj.HasFastProperties()) {
1745     DescriptorArray descs = js_obj.map().instance_descriptors(isolate);
1746     for (InternalIndex i : js_obj.map().IterateOwnDescriptors()) {
1747       PropertyDetails details = descs.GetDetails(i);
1748       switch (details.location()) {
1749         case PropertyLocation::kField: {
1750           if (!snapshot_->capture_numeric_value()) {
1751             Representation r = details.representation();
1752             if (r.IsSmi() || r.IsDouble()) break;
1753           }
1754 
1755           Name k = descs.GetKey(i);
1756           FieldIndex field_index = FieldIndex::ForDescriptor(js_obj.map(), i);
1757           Object value = js_obj.RawFastPropertyAt(field_index);
1758           int field_offset =
1759               field_index.is_inobject() ? field_index.offset() : -1;
1760 
1761           SetDataOrAccessorPropertyReference(details.kind(), entry, k, value,
1762                                              nullptr, field_offset);
1763           break;
1764         }
1765         case PropertyLocation::kDescriptor:
1766           SetDataOrAccessorPropertyReference(
1767               details.kind(), entry, descs.GetKey(i), descs.GetStrongValue(i));
1768           break;
1769       }
1770     }
1771   } else if (js_obj.IsJSGlobalObject()) {
1772     // We assume that global objects can only have slow properties.
1773     GlobalDictionary dictionary =
1774         JSGlobalObject::cast(js_obj).global_dictionary(kAcquireLoad);
1775     ReadOnlyRoots roots(isolate);
1776     for (InternalIndex i : dictionary.IterateEntries()) {
1777       if (!dictionary.IsKey(roots, dictionary.KeyAt(i))) continue;
1778       PropertyCell cell = dictionary.CellAt(i);
1779       Name name = cell.name();
1780       Object value = cell.value();
1781       PropertyDetails details = cell.property_details();
1782       SetDataOrAccessorPropertyReference(details.kind(), entry, name, value);
1783     }
1784   } else if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL) {
1785     // SwissNameDictionary::IterateEntries creates a Handle, which should not
1786     // leak out of here.
1787     HandleScope scope(isolate);
1788 
1789     SwissNameDictionary dictionary = js_obj.property_dictionary_swiss();
1790     ReadOnlyRoots roots(isolate);
1791     for (InternalIndex i : dictionary.IterateEntries()) {
1792       Object k = dictionary.KeyAt(i);
1793       if (!dictionary.IsKey(roots, k)) continue;
1794       Object value = dictionary.ValueAt(i);
1795       PropertyDetails details = dictionary.DetailsAt(i);
1796       SetDataOrAccessorPropertyReference(details.kind(), entry, Name::cast(k),
1797                                          value);
1798     }
1799   } else {
1800     NameDictionary dictionary = js_obj.property_dictionary();
1801     ReadOnlyRoots roots(isolate);
1802     for (InternalIndex i : dictionary.IterateEntries()) {
1803       Object k = dictionary.KeyAt(i);
1804       if (!dictionary.IsKey(roots, k)) continue;
1805       Object value = dictionary.ValueAt(i);
1806       PropertyDetails details = dictionary.DetailsAt(i);
1807       SetDataOrAccessorPropertyReference(details.kind(), entry, Name::cast(k),
1808                                          value);
1809     }
1810   }
1811 }
1812 
ExtractAccessorPairProperty(HeapEntry * entry,Name key,Object callback_obj,int field_offset)1813 void V8HeapExplorer::ExtractAccessorPairProperty(HeapEntry* entry, Name key,
1814                                                  Object callback_obj,
1815                                                  int field_offset) {
1816   if (!callback_obj.IsAccessorPair()) return;
1817   AccessorPair accessors = AccessorPair::cast(callback_obj);
1818   SetPropertyReference(entry, key, accessors, nullptr, field_offset);
1819   Object getter = accessors.getter();
1820   if (!getter.IsOddball()) {
1821     SetPropertyReference(entry, key, getter, "get %s");
1822   }
1823   Object setter = accessors.setter();
1824   if (!setter.IsOddball()) {
1825     SetPropertyReference(entry, key, setter, "set %s");
1826   }
1827 }
1828 
ExtractElementReferences(JSObject js_obj,HeapEntry * entry)1829 void V8HeapExplorer::ExtractElementReferences(JSObject js_obj,
1830                                               HeapEntry* entry) {
1831   ReadOnlyRoots roots = js_obj.GetReadOnlyRoots();
1832   if (js_obj.HasObjectElements()) {
1833     FixedArray elements = FixedArray::cast(js_obj.elements());
1834     int length = js_obj.IsJSArray() ? Smi::ToInt(JSArray::cast(js_obj).length())
1835                                     : elements.length();
1836     for (int i = 0; i < length; ++i) {
1837       if (!elements.get(i).IsTheHole(roots)) {
1838         SetElementReference(entry, i, elements.get(i));
1839       }
1840     }
1841   } else if (js_obj.HasDictionaryElements()) {
1842     NumberDictionary dictionary = js_obj.element_dictionary();
1843     for (InternalIndex i : dictionary.IterateEntries()) {
1844       Object k = dictionary.KeyAt(i);
1845       if (!dictionary.IsKey(roots, k)) continue;
1846       DCHECK(k.IsNumber());
1847       uint32_t index = static_cast<uint32_t>(k.Number());
1848       SetElementReference(entry, index, dictionary.ValueAt(i));
1849     }
1850   }
1851 }
1852 
ExtractInternalReferences(JSObject js_obj,HeapEntry * entry)1853 void V8HeapExplorer::ExtractInternalReferences(JSObject js_obj,
1854                                                HeapEntry* entry) {
1855   int length = js_obj.GetEmbedderFieldCount();
1856   for (int i = 0; i < length; ++i) {
1857     Object o = js_obj.GetEmbedderField(i);
1858     SetInternalReference(entry, i, o, js_obj.GetEmbedderFieldOffset(i));
1859   }
1860 }
1861 
GetConstructor(Isolate * isolate,JSReceiver receiver)1862 JSFunction V8HeapExplorer::GetConstructor(Isolate* isolate,
1863                                           JSReceiver receiver) {
1864   DisallowGarbageCollection no_gc;
1865   HandleScope scope(isolate);
1866   MaybeHandle<JSFunction> maybe_constructor =
1867       JSReceiver::GetConstructor(isolate, handle(receiver, isolate));
1868 
1869   if (maybe_constructor.is_null()) return JSFunction();
1870 
1871   return *maybe_constructor.ToHandleChecked();
1872 }
1873 
GetConstructorName(Isolate * isolate,JSObject object)1874 String V8HeapExplorer::GetConstructorName(Isolate* isolate, JSObject object) {
1875   if (object.IsJSFunction()) return ReadOnlyRoots(isolate).closure_string();
1876   DisallowGarbageCollection no_gc;
1877   HandleScope scope(isolate);
1878   return *JSReceiver::GetConstructorName(isolate, handle(object, isolate));
1879 }
1880 
GetEntry(Object obj)1881 HeapEntry* V8HeapExplorer::GetEntry(Object obj) {
1882   if (obj.IsHeapObject()) {
1883     return generator_->FindOrAddEntry(reinterpret_cast<void*>(obj.ptr()), this);
1884   }
1885 
1886   DCHECK(obj.IsSmi());
1887   if (!snapshot_->capture_numeric_value()) {
1888     return nullptr;
1889   }
1890   return generator_->FindOrAddEntry(Smi::cast(obj), this);
1891 }
1892 
1893 class RootsReferencesExtractor : public RootVisitor {
1894  public:
RootsReferencesExtractor(V8HeapExplorer * explorer)1895   explicit RootsReferencesExtractor(V8HeapExplorer* explorer)
1896       : explorer_(explorer), visiting_weak_roots_(false) {}
1897 
SetVisitingWeakRoots()1898   void SetVisitingWeakRoots() { visiting_weak_roots_ = true; }
1899 
VisitRootPointer(Root root,const char * description,FullObjectSlot object)1900   void VisitRootPointer(Root root, const char* description,
1901                         FullObjectSlot object) override {
1902     if (root == Root::kBuiltins) {
1903       explorer_->TagBuiltinCodeObject(CodeT::cast(*object), description);
1904     }
1905     explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_,
1906                                      *object);
1907   }
1908 
VisitRootPointers(Root root,const char * description,FullObjectSlot start,FullObjectSlot end)1909   void VisitRootPointers(Root root, const char* description,
1910                          FullObjectSlot start, FullObjectSlot end) override {
1911     for (FullObjectSlot p = start; p < end; ++p) {
1912       DCHECK(!MapWord::IsPacked(p.Relaxed_Load().ptr()));
1913       VisitRootPointer(root, description, p);
1914     }
1915   }
1916 
VisitRootPointers(Root root,const char * description,OffHeapObjectSlot start,OffHeapObjectSlot end)1917   void VisitRootPointers(Root root, const char* description,
1918                          OffHeapObjectSlot start,
1919                          OffHeapObjectSlot end) override {
1920     DCHECK_EQ(root, Root::kStringTable);
1921     PtrComprCageBase cage_base(explorer_->heap_->isolate());
1922     for (OffHeapObjectSlot p = start; p < end; ++p) {
1923       explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_,
1924                                        p.load(cage_base));
1925     }
1926   }
1927 
VisitRunningCode(FullObjectSlot p)1928   void VisitRunningCode(FullObjectSlot p) override {
1929     // Must match behavior in
1930     // MarkCompactCollector::RootMarkingVisitor::VisitRunningCode, which treats
1931     // deoptimization literals in running code as stack roots.
1932     Code code = Code::cast(*p);
1933     if (code.kind() != CodeKind::BASELINE) {
1934       DeoptimizationData deopt_data =
1935           DeoptimizationData::cast(code.deoptimization_data());
1936       if (deopt_data.length() > 0) {
1937         DeoptimizationLiteralArray literals = deopt_data.LiteralArray();
1938         int literals_length = literals.length();
1939         for (int i = 0; i < literals_length; ++i) {
1940           MaybeObject maybe_literal = literals.Get(i);
1941           HeapObject heap_literal;
1942           if (maybe_literal.GetHeapObject(&heap_literal)) {
1943             VisitRootPointer(Root::kStackRoots, nullptr,
1944                              FullObjectSlot(&heap_literal));
1945           }
1946         }
1947       }
1948     }
1949 
1950     // Finally visit the Code itself.
1951     VisitRootPointer(Root::kStackRoots, nullptr, p);
1952   }
1953 
1954  private:
1955   V8HeapExplorer* explorer_;
1956   bool visiting_weak_roots_;
1957 };
1958 
IterateAndExtractReferences(HeapSnapshotGenerator * generator)1959 bool V8HeapExplorer::IterateAndExtractReferences(
1960     HeapSnapshotGenerator* generator) {
1961   generator_ = generator;
1962 
1963   // Create references to the synthetic roots.
1964   SetRootGcRootsReference();
1965   for (int root = 0; root < static_cast<int>(Root::kNumberOfRoots); root++) {
1966     SetGcRootsReference(static_cast<Root>(root));
1967   }
1968 
1969   // Make sure builtin code objects get their builtin tags
1970   // first. Otherwise a particular JSFunction object could set
1971   // its custom name to a generic builtin.
1972   RootsReferencesExtractor extractor(this);
1973   ReadOnlyRoots(heap_).Iterate(&extractor);
1974   heap_->IterateRoots(&extractor, base::EnumSet<SkipRoot>{SkipRoot::kWeak});
1975   // TODO(v8:11800): The heap snapshot generator incorrectly considers the weak
1976   // string tables as strong retainers. Move IterateWeakRoots after
1977   // SetVisitingWeakRoots.
1978   heap_->IterateWeakRoots(&extractor, {});
1979   extractor.SetVisitingWeakRoots();
1980   heap_->IterateWeakGlobalHandles(&extractor);
1981 
1982   bool interrupted = false;
1983 
1984   CombinedHeapObjectIterator iterator(heap_,
1985                                       HeapObjectIterator::kFilterUnreachable);
1986   PtrComprCageBase cage_base(heap_->isolate());
1987   // Heap iteration with filtering must be finished in any case.
1988   for (HeapObject obj = iterator.Next(); !obj.is_null();
1989        obj = iterator.Next(), progress_->ProgressStep()) {
1990     if (interrupted) continue;
1991 
1992     size_t max_pointer = obj.Size(cage_base) / kTaggedSize;
1993     if (max_pointer > visited_fields_.size()) {
1994       // Clear the current bits.
1995       std::vector<bool>().swap(visited_fields_);
1996       // Reallocate to right size.
1997       visited_fields_.resize(max_pointer, false);
1998     }
1999 
2000 #ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
2001     std::unique_ptr<HeapEntryVerifier> verifier;
2002     // MarkingVisitorBase doesn't expect that we will ever visit read-only
2003     // objects, and fails DCHECKs if we attempt to. Read-only objects can
2004     // never retain read-write objects, so there is no risk in skipping
2005     // verification for them.
2006     if (FLAG_heap_snapshot_verify &&
2007         !BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
2008       verifier = std::make_unique<HeapEntryVerifier>(generator, obj);
2009     }
2010 #endif
2011 
2012     HeapEntry* entry = GetEntry(obj);
2013     ExtractReferences(entry, obj);
2014     SetInternalReference(entry, "map", obj.map(cage_base),
2015                          HeapObject::kMapOffset);
2016     // Extract unvisited fields as hidden references and restore tags
2017     // of visited fields.
2018     IndexedReferencesExtractor refs_extractor(this, obj, entry);
2019     obj.Iterate(cage_base, &refs_extractor);
2020 
2021     // Ensure visited_fields_ doesn't leak to the next object.
2022     for (size_t i = 0; i < max_pointer; ++i) {
2023       DCHECK(!visited_fields_[i]);
2024     }
2025 
2026     // Extract location for specific object types
2027     ExtractLocation(entry, obj);
2028 
2029     if (!progress_->ProgressReport(false)) interrupted = true;
2030   }
2031 
2032   generator_ = nullptr;
2033   return interrupted ? false : progress_->ProgressReport(true);
2034 }
2035 
IsEssentialObject(Object object)2036 bool V8HeapExplorer::IsEssentialObject(Object object) {
2037   Isolate* isolate = heap_->isolate();
2038   ReadOnlyRoots roots(isolate);
2039   return object.IsHeapObject() && !object.IsOddball(isolate) &&
2040          object != roots.empty_byte_array() &&
2041          object != roots.empty_fixed_array() &&
2042          object != roots.empty_weak_fixed_array() &&
2043          object != roots.empty_descriptor_array() &&
2044          object != roots.fixed_array_map() && object != roots.cell_map() &&
2045          object != roots.global_property_cell_map() &&
2046          object != roots.shared_function_info_map() &&
2047          object != roots.free_space_map() &&
2048          object != roots.one_pointer_filler_map() &&
2049          object != roots.two_pointer_filler_map();
2050 }
2051 
IsEssentialHiddenReference(Object parent,int field_offset)2052 bool V8HeapExplorer::IsEssentialHiddenReference(Object parent,
2053                                                 int field_offset) {
2054   if (parent.IsAllocationSite() &&
2055       field_offset == AllocationSite::kWeakNextOffset)
2056     return false;
2057   if (parent.IsCodeDataContainer() &&
2058       field_offset == CodeDataContainer::kNextCodeLinkOffset)
2059     return false;
2060   if (parent.IsContext() &&
2061       field_offset == Context::OffsetOfElementAt(Context::NEXT_CONTEXT_LINK))
2062     return false;
2063   if (parent.IsJSFinalizationRegistry() &&
2064       field_offset == JSFinalizationRegistry::kNextDirtyOffset)
2065     return false;
2066   return true;
2067 }
2068 
SetContextReference(HeapEntry * parent_entry,String reference_name,Object child_obj,int field_offset)2069 void V8HeapExplorer::SetContextReference(HeapEntry* parent_entry,
2070                                          String reference_name,
2071                                          Object child_obj, int field_offset) {
2072   HeapEntry* child_entry = GetEntry(child_obj);
2073   if (child_entry == nullptr) return;
2074   parent_entry->SetNamedReference(HeapGraphEdge::kContextVariable,
2075                                   names_->GetName(reference_name), child_entry,
2076                                   generator_);
2077   MarkVisitedField(field_offset);
2078 }
2079 
MarkVisitedField(int offset)2080 void V8HeapExplorer::MarkVisitedField(int offset) {
2081   if (offset < 0) return;
2082   int index = offset / kTaggedSize;
2083   DCHECK(!visited_fields_[index]);
2084   visited_fields_[index] = true;
2085 }
2086 
SetNativeBindReference(HeapEntry * parent_entry,const char * reference_name,Object child_obj)2087 void V8HeapExplorer::SetNativeBindReference(HeapEntry* parent_entry,
2088                                             const char* reference_name,
2089                                             Object child_obj) {
2090   HeapEntry* child_entry = GetEntry(child_obj);
2091   if (child_entry == nullptr) return;
2092   parent_entry->SetNamedReference(HeapGraphEdge::kShortcut, reference_name,
2093                                   child_entry, generator_);
2094 }
2095 
SetElementReference(HeapEntry * parent_entry,int index,Object child_obj)2096 void V8HeapExplorer::SetElementReference(HeapEntry* parent_entry, int index,
2097                                          Object child_obj) {
2098   HeapEntry* child_entry = GetEntry(child_obj);
2099   if (child_entry == nullptr) return;
2100   parent_entry->SetIndexedReference(HeapGraphEdge::kElement, index, child_entry,
2101                                     generator_);
2102 }
2103 
SetInternalReference(HeapEntry * parent_entry,const char * reference_name,Object child_obj,int field_offset)2104 void V8HeapExplorer::SetInternalReference(HeapEntry* parent_entry,
2105                                           const char* reference_name,
2106                                           Object child_obj, int field_offset) {
2107   if (!IsEssentialObject(child_obj)) {
2108     return;
2109   }
2110   HeapEntry* child_entry = GetEntry(child_obj);
2111   DCHECK_NOT_NULL(child_entry);
2112   parent_entry->SetNamedReference(HeapGraphEdge::kInternal, reference_name,
2113                                   child_entry, generator_);
2114   MarkVisitedField(field_offset);
2115 }
2116 
SetInternalReference(HeapEntry * parent_entry,int index,Object child_obj,int field_offset)2117 void V8HeapExplorer::SetInternalReference(HeapEntry* parent_entry, int index,
2118                                           Object child_obj, int field_offset) {
2119   if (!IsEssentialObject(child_obj)) {
2120     return;
2121   }
2122   HeapEntry* child_entry = GetEntry(child_obj);
2123   DCHECK_NOT_NULL(child_entry);
2124   parent_entry->SetNamedReference(HeapGraphEdge::kInternal,
2125                                   names_->GetName(index), child_entry,
2126                                   generator_);
2127   MarkVisitedField(field_offset);
2128 }
2129 
SetHiddenReference(HeapObject parent_obj,HeapEntry * parent_entry,int index,Object child_obj,int field_offset)2130 void V8HeapExplorer::SetHiddenReference(HeapObject parent_obj,
2131                                         HeapEntry* parent_entry, int index,
2132                                         Object child_obj, int field_offset) {
2133   DCHECK_EQ(parent_entry, GetEntry(parent_obj));
2134   DCHECK(!MapWord::IsPacked(child_obj.ptr()));
2135   if (!IsEssentialObject(child_obj)) {
2136     return;
2137   }
2138   HeapEntry* child_entry = GetEntry(child_obj);
2139   DCHECK_NOT_NULL(child_entry);
2140   if (IsEssentialHiddenReference(parent_obj, field_offset)) {
2141     parent_entry->SetIndexedReference(HeapGraphEdge::kHidden, index,
2142                                       child_entry, generator_);
2143   }
2144 }
2145 
SetWeakReference(HeapEntry * parent_entry,const char * reference_name,Object child_obj,int field_offset,HeapEntry::ReferenceVerification verification)2146 void V8HeapExplorer::SetWeakReference(
2147     HeapEntry* parent_entry, const char* reference_name, Object child_obj,
2148     int field_offset, HeapEntry::ReferenceVerification verification) {
2149   if (!IsEssentialObject(child_obj)) {
2150     return;
2151   }
2152   HeapEntry* child_entry = GetEntry(child_obj);
2153   DCHECK_NOT_NULL(child_entry);
2154   parent_entry->SetNamedReference(HeapGraphEdge::kWeak, reference_name,
2155                                   child_entry, generator_, verification);
2156   MarkVisitedField(field_offset);
2157 }
2158 
SetWeakReference(HeapEntry * parent_entry,int index,Object child_obj,base::Optional<int> field_offset)2159 void V8HeapExplorer::SetWeakReference(HeapEntry* parent_entry, int index,
2160                                       Object child_obj,
2161                                       base::Optional<int> field_offset) {
2162   if (!IsEssentialObject(child_obj)) {
2163     return;
2164   }
2165   HeapEntry* child_entry = GetEntry(child_obj);
2166   DCHECK_NOT_NULL(child_entry);
2167   parent_entry->SetNamedReference(HeapGraphEdge::kWeak,
2168                                   names_->GetFormatted("%d", index),
2169                                   child_entry, generator_);
2170   if (field_offset.has_value()) {
2171     MarkVisitedField(*field_offset);
2172   }
2173 }
2174 
SetDataOrAccessorPropertyReference(PropertyKind kind,HeapEntry * parent_entry,Name reference_name,Object child_obj,const char * name_format_string,int field_offset)2175 void V8HeapExplorer::SetDataOrAccessorPropertyReference(
2176     PropertyKind kind, HeapEntry* parent_entry, Name reference_name,
2177     Object child_obj, const char* name_format_string, int field_offset) {
2178   if (kind == PropertyKind::kAccessor) {
2179     ExtractAccessorPairProperty(parent_entry, reference_name, child_obj,
2180                                 field_offset);
2181   } else {
2182     SetPropertyReference(parent_entry, reference_name, child_obj,
2183                          name_format_string, field_offset);
2184   }
2185 }
2186 
SetPropertyReference(HeapEntry * parent_entry,Name reference_name,Object child_obj,const char * name_format_string,int field_offset)2187 void V8HeapExplorer::SetPropertyReference(HeapEntry* parent_entry,
2188                                           Name reference_name, Object child_obj,
2189                                           const char* name_format_string,
2190                                           int field_offset) {
2191   HeapEntry* child_entry = GetEntry(child_obj);
2192   if (child_entry == nullptr) return;
2193   HeapGraphEdge::Type type =
2194       reference_name.IsSymbol() || String::cast(reference_name).length() > 0
2195           ? HeapGraphEdge::kProperty
2196           : HeapGraphEdge::kInternal;
2197   const char* name =
2198       name_format_string != nullptr && reference_name.IsString()
2199           ? names_->GetFormatted(
2200                 name_format_string,
2201                 String::cast(reference_name)
2202                     .ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)
2203                     .get())
2204           : names_->GetName(reference_name);
2205 
2206   parent_entry->SetNamedReference(type, name, child_entry, generator_);
2207   MarkVisitedField(field_offset);
2208 }
2209 
SetRootGcRootsReference()2210 void V8HeapExplorer::SetRootGcRootsReference() {
2211   snapshot_->root()->SetIndexedAutoIndexReference(
2212       HeapGraphEdge::kElement, snapshot_->gc_roots(), generator_);
2213 }
2214 
SetUserGlobalReference(Object child_obj)2215 void V8HeapExplorer::SetUserGlobalReference(Object child_obj) {
2216   HeapEntry* child_entry = GetEntry(child_obj);
2217   DCHECK_NOT_NULL(child_entry);
2218   snapshot_->root()->SetNamedAutoIndexReference(
2219       HeapGraphEdge::kShortcut, nullptr, child_entry, names_, generator_);
2220 }
2221 
SetGcRootsReference(Root root)2222 void V8HeapExplorer::SetGcRootsReference(Root root) {
2223   snapshot_->gc_roots()->SetIndexedAutoIndexReference(
2224       HeapGraphEdge::kElement, snapshot_->gc_subroot(root), generator_);
2225 }
2226 
SetGcSubrootReference(Root root,const char * description,bool is_weak,Object child_obj)2227 void V8HeapExplorer::SetGcSubrootReference(Root root, const char* description,
2228                                            bool is_weak, Object child_obj) {
2229   if (child_obj.IsSmi()) {
2230     // TODO(arenevier): if we handle smis here, the snapshot gets 2 to 3 times
2231     // slower on large heaps. According to perf, The bulk of the extra works
2232     // happens in TemplateHashMapImpl::Probe method, when tyring to get
2233     // names->GetFormatted("%d / %s", index, description)
2234     return;
2235   }
2236   HeapEntry* child_entry = GetEntry(child_obj);
2237   if (child_entry == nullptr) return;
2238   const char* name = GetStrongGcSubrootName(child_obj);
2239   HeapGraphEdge::Type edge_type =
2240       is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kInternal;
2241   if (name != nullptr) {
2242     snapshot_->gc_subroot(root)->SetNamedReference(edge_type, name, child_entry,
2243                                                    generator_);
2244   } else {
2245     snapshot_->gc_subroot(root)->SetNamedAutoIndexReference(
2246         edge_type, description, child_entry, names_, generator_);
2247   }
2248 
2249   // For full heap snapshots we do not emit user roots but rather rely on
2250   // regular GC roots to retain objects.
2251   if (!snapshot_->treat_global_objects_as_roots()) return;
2252 
2253   // Add a shortcut to JS global object reference at snapshot root.
2254   // That allows the user to easily find global objects. They are
2255   // also used as starting points in distance calculations.
2256   if (is_weak || !child_obj.IsNativeContext()) return;
2257 
2258   JSGlobalObject global = Context::cast(child_obj).global_object();
2259   if (!global.IsJSGlobalObject()) return;
2260 
2261   if (!user_roots_.insert(global).second) return;
2262 
2263   SetUserGlobalReference(global);
2264 }
2265 
GetStrongGcSubrootName(Object object)2266 const char* V8HeapExplorer::GetStrongGcSubrootName(Object object) {
2267   if (strong_gc_subroot_names_.empty()) {
2268     Isolate* isolate = Isolate::FromHeap(heap_);
2269     for (RootIndex root_index = RootIndex::kFirstStrongOrReadOnlyRoot;
2270          root_index <= RootIndex::kLastStrongOrReadOnlyRoot; ++root_index) {
2271       const char* name = RootsTable::name(root_index);
2272       strong_gc_subroot_names_.emplace(isolate->root(root_index), name);
2273     }
2274     CHECK(!strong_gc_subroot_names_.empty());
2275   }
2276   auto it = strong_gc_subroot_names_.find(object);
2277   return it != strong_gc_subroot_names_.end() ? it->second : nullptr;
2278 }
2279 
TagObject(Object obj,const char * tag,base::Optional<HeapEntry::Type> type)2280 void V8HeapExplorer::TagObject(Object obj, const char* tag,
2281                                base::Optional<HeapEntry::Type> type) {
2282   if (IsEssentialObject(obj)) {
2283     HeapEntry* entry = GetEntry(obj);
2284     if (entry->name()[0] == '\0') {
2285       entry->set_name(tag);
2286     }
2287     if (type.has_value()) {
2288       entry->set_type(*type);
2289     }
2290   }
2291 }
2292 
RecursivelyTagConstantPool(Object obj,const char * tag,HeapEntry::Type type,int recursion_limit)2293 void V8HeapExplorer::RecursivelyTagConstantPool(Object obj, const char* tag,
2294                                                 HeapEntry::Type type,
2295                                                 int recursion_limit) {
2296   --recursion_limit;
2297   if (obj.IsFixedArrayExact(isolate())) {
2298     FixedArray arr = FixedArray::cast(obj);
2299     TagObject(arr, tag, type);
2300     if (recursion_limit <= 0) return;
2301     for (int i = 0; i < arr.length(); ++i) {
2302       RecursivelyTagConstantPool(arr.get(i), tag, type, recursion_limit);
2303     }
2304   } else if (obj.IsNameDictionary(isolate()) ||
2305              obj.IsNumberDictionary(isolate())) {
2306     TagObject(obj, tag, type);
2307   }
2308 }
2309 
2310 class GlobalObjectsEnumerator : public RootVisitor {
2311  public:
GlobalObjectsEnumerator(Isolate * isolate)2312   explicit GlobalObjectsEnumerator(Isolate* isolate) : isolate_(isolate) {}
2313 
VisitRootPointers(Root root,const char * description,FullObjectSlot start,FullObjectSlot end)2314   void VisitRootPointers(Root root, const char* description,
2315                          FullObjectSlot start, FullObjectSlot end) override {
2316     VisitRootPointersImpl(root, description, start, end);
2317   }
2318 
VisitRootPointers(Root root,const char * description,OffHeapObjectSlot start,OffHeapObjectSlot end)2319   void VisitRootPointers(Root root, const char* description,
2320                          OffHeapObjectSlot start,
2321                          OffHeapObjectSlot end) override {
2322     VisitRootPointersImpl(root, description, start, end);
2323   }
2324 
count() const2325   int count() const { return static_cast<int>(objects_.size()); }
at(int i)2326   Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2327 
2328  private:
2329   template <typename TSlot>
VisitRootPointersImpl(Root root,const char * description,TSlot start,TSlot end)2330   void VisitRootPointersImpl(Root root, const char* description, TSlot start,
2331                              TSlot end) {
2332     for (TSlot p = start; p < end; ++p) {
2333       DCHECK(!MapWord::IsPacked(p.Relaxed_Load(isolate_).ptr()));
2334       Object o = p.load(isolate_);
2335       if (!o.IsNativeContext(isolate_)) continue;
2336       JSObject proxy = Context::cast(o).global_proxy();
2337       if (!proxy.IsJSGlobalProxy(isolate_)) continue;
2338       Object global = proxy.map(isolate_).prototype(isolate_);
2339       if (!global.IsJSGlobalObject(isolate_)) continue;
2340       objects_.push_back(handle(JSGlobalObject::cast(global), isolate_));
2341     }
2342   }
2343 
2344   Isolate* isolate_;
2345   std::vector<Handle<JSGlobalObject>> objects_;
2346 };
2347 
2348 
2349 // Modifies heap. Must not be run during heap traversal.
CollectGlobalObjectsTags()2350 void V8HeapExplorer::CollectGlobalObjectsTags() {
2351   if (!global_object_name_resolver_) return;
2352 
2353   Isolate* isolate = Isolate::FromHeap(heap_);
2354   GlobalObjectsEnumerator enumerator(isolate);
2355   isolate->global_handles()->IterateAllRoots(&enumerator);
2356   for (int i = 0, l = enumerator.count(); i < l; ++i) {
2357     Handle<JSGlobalObject> obj = enumerator.at(i);
2358     const char* tag = global_object_name_resolver_->GetName(
2359         Utils::ToLocal(Handle<JSObject>::cast(obj)));
2360     if (tag) {
2361       global_object_tag_pairs_.emplace_back(obj, tag);
2362     }
2363   }
2364 }
2365 
MakeGlobalObjectTagMap(const SafepointScope & safepoint_scope)2366 void V8HeapExplorer::MakeGlobalObjectTagMap(
2367     const SafepointScope& safepoint_scope) {
2368   for (const auto& pair : global_object_tag_pairs_) {
2369     global_object_tag_map_.emplace(*pair.first, pair.second);
2370   }
2371 }
2372 
2373 class EmbedderGraphImpl : public EmbedderGraph {
2374  public:
2375   struct Edge {
2376     Node* from;
2377     Node* to;
2378     const char* name;
2379   };
2380 
2381   class V8NodeImpl : public Node {
2382    public:
V8NodeImpl(Object object)2383     explicit V8NodeImpl(Object object) : object_(object) {}
GetObject()2384     Object GetObject() { return object_; }
2385 
2386     // Node overrides.
IsEmbedderNode()2387     bool IsEmbedderNode() override { return false; }
Name()2388     const char* Name() override {
2389       // The name should be retrieved via GetObject().
2390       UNREACHABLE();
2391     }
SizeInBytes()2392     size_t SizeInBytes() override {
2393       // The size should be retrieved via GetObject().
2394       UNREACHABLE();
2395     }
2396 
2397    private:
2398     Object object_;
2399   };
2400 
V8Node(const v8::Local<v8::Value> & value)2401   Node* V8Node(const v8::Local<v8::Value>& value) final {
2402     Handle<Object> object = v8::Utils::OpenHandle(*value);
2403     DCHECK(!object.is_null());
2404     return AddNode(std::unique_ptr<Node>(new V8NodeImpl(*object)));
2405   }
2406 
AddNode(std::unique_ptr<Node> node)2407   Node* AddNode(std::unique_ptr<Node> node) final {
2408     Node* result = node.get();
2409     nodes_.push_back(std::move(node));
2410     return result;
2411   }
2412 
AddEdge(Node * from,Node * to,const char * name)2413   void AddEdge(Node* from, Node* to, const char* name) final {
2414     edges_.push_back({from, to, name});
2415   }
2416 
nodes()2417   const std::vector<std::unique_ptr<Node>>& nodes() { return nodes_; }
edges()2418   const std::vector<Edge>& edges() { return edges_; }
2419 
2420  private:
2421   std::vector<std::unique_ptr<Node>> nodes_;
2422   std::vector<Edge> edges_;
2423 };
2424 
2425 class EmbedderGraphEntriesAllocator : public HeapEntriesAllocator {
2426  public:
EmbedderGraphEntriesAllocator(HeapSnapshot * snapshot)2427   explicit EmbedderGraphEntriesAllocator(HeapSnapshot* snapshot)
2428       : snapshot_(snapshot),
2429         names_(snapshot_->profiler()->names()),
2430         heap_object_map_(snapshot_->profiler()->heap_object_map()) {}
2431   HeapEntry* AllocateEntry(HeapThing ptr) override;
2432   HeapEntry* AllocateEntry(Smi smi) override;
2433 
2434  private:
2435   HeapSnapshot* snapshot_;
2436   StringsStorage* names_;
2437   HeapObjectsMap* heap_object_map_;
2438 };
2439 
2440 namespace {
2441 
EmbedderGraphNodeName(StringsStorage * names,EmbedderGraphImpl::Node * node)2442 const char* EmbedderGraphNodeName(StringsStorage* names,
2443                                   EmbedderGraphImpl::Node* node) {
2444   const char* prefix = node->NamePrefix();
2445   return prefix ? names->GetFormatted("%s %s", prefix, node->Name())
2446                 : names->GetCopy(node->Name());
2447 }
2448 
EmbedderGraphNodeType(EmbedderGraphImpl::Node * node)2449 HeapEntry::Type EmbedderGraphNodeType(EmbedderGraphImpl::Node* node) {
2450   return node->IsRootNode() ? HeapEntry::kSynthetic : HeapEntry::kNative;
2451 }
2452 
2453 // Merges the names of an embedder node and its wrapper node.
2454 // If the wrapper node name contains a tag suffix (part after '/') then the
2455 // result is the embedder node name concatenated with the tag suffix.
2456 // Otherwise, the result is the embedder node name.
MergeNames(StringsStorage * names,const char * embedder_name,const char * wrapper_name)2457 const char* MergeNames(StringsStorage* names, const char* embedder_name,
2458                        const char* wrapper_name) {
2459   const char* suffix = strchr(wrapper_name, '/');
2460   return suffix ? names->GetFormatted("%s %s", embedder_name, suffix)
2461                 : embedder_name;
2462 }
2463 
2464 }  // anonymous namespace
2465 
AllocateEntry(HeapThing ptr)2466 HeapEntry* EmbedderGraphEntriesAllocator::AllocateEntry(HeapThing ptr) {
2467   EmbedderGraphImpl::Node* node =
2468       reinterpret_cast<EmbedderGraphImpl::Node*>(ptr);
2469   DCHECK(node->IsEmbedderNode());
2470   size_t size = node->SizeInBytes();
2471   Address lookup_address = reinterpret_cast<Address>(node->GetNativeObject());
2472   SnapshotObjectId id =
2473       (lookup_address) ? heap_object_map_->FindOrAddEntry(lookup_address, 0)
2474                        : static_cast<SnapshotObjectId>(
2475                              reinterpret_cast<uintptr_t>(node) << 1);
2476   auto* heap_entry = snapshot_->AddEntry(EmbedderGraphNodeType(node),
2477                                          EmbedderGraphNodeName(names_, node),
2478                                          id, static_cast<int>(size), 0);
2479   heap_entry->set_detachedness(node->GetDetachedness());
2480   return heap_entry;
2481 }
2482 
AllocateEntry(Smi smi)2483 HeapEntry* EmbedderGraphEntriesAllocator::AllocateEntry(Smi smi) {
2484   DCHECK(false);
2485   return nullptr;
2486 }
2487 
NativeObjectsExplorer(HeapSnapshot * snapshot,SnapshottingProgressReportingInterface * progress)2488 NativeObjectsExplorer::NativeObjectsExplorer(
2489     HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
2490     : isolate_(
2491           Isolate::FromHeap(snapshot->profiler()->heap_object_map()->heap())),
2492       snapshot_(snapshot),
2493       names_(snapshot_->profiler()->names()),
2494       heap_object_map_(snapshot_->profiler()->heap_object_map()),
2495       embedder_graph_entries_allocator_(
2496           new EmbedderGraphEntriesAllocator(snapshot)) {}
2497 
MergeNodeIntoEntry(HeapEntry * entry,EmbedderGraph::Node * original_node,EmbedderGraph::Node * wrapper_node)2498 void NativeObjectsExplorer::MergeNodeIntoEntry(
2499     HeapEntry* entry, EmbedderGraph::Node* original_node,
2500     EmbedderGraph::Node* wrapper_node) {
2501   // The wrapper node may be an embedder node (for testing purposes) or a V8
2502   // node (production code).
2503   if (!wrapper_node->IsEmbedderNode()) {
2504     // For V8 nodes only we can add a lookup.
2505     EmbedderGraphImpl::V8NodeImpl* v8_node =
2506         static_cast<EmbedderGraphImpl::V8NodeImpl*>(wrapper_node);
2507     Object object = v8_node->GetObject();
2508     DCHECK(!object.IsSmi());
2509     if (original_node->GetNativeObject()) {
2510       HeapObject heap_object = HeapObject::cast(object);
2511       heap_object_map_->AddMergedNativeEntry(original_node->GetNativeObject(),
2512                                              heap_object.address());
2513       DCHECK_EQ(entry->id(), heap_object_map_->FindMergedNativeEntry(
2514                                  original_node->GetNativeObject()));
2515     }
2516   }
2517   entry->set_detachedness(original_node->GetDetachedness());
2518   entry->set_name(MergeNames(
2519       names_, EmbedderGraphNodeName(names_, original_node), entry->name()));
2520   entry->set_type(EmbedderGraphNodeType(original_node));
2521   DCHECK_GE(entry->self_size() + original_node->SizeInBytes(),
2522             entry->self_size());
2523   entry->add_self_size(original_node->SizeInBytes());
2524 }
2525 
EntryForEmbedderGraphNode(EmbedderGraphImpl::Node * node)2526 HeapEntry* NativeObjectsExplorer::EntryForEmbedderGraphNode(
2527     EmbedderGraphImpl::Node* node) {
2528   // Return the entry for the wrapper node if present.
2529   if (node->WrapperNode()) {
2530     node = node->WrapperNode();
2531   }
2532   // Node is EmbedderNode.
2533   if (node->IsEmbedderNode()) {
2534     return generator_->FindOrAddEntry(node,
2535                                       embedder_graph_entries_allocator_.get());
2536   }
2537   // Node is V8NodeImpl.
2538   Object object =
2539       static_cast<EmbedderGraphImpl::V8NodeImpl*>(node)->GetObject();
2540   if (object.IsSmi()) return nullptr;
2541   auto* entry = generator_->FindEntry(
2542       reinterpret_cast<void*>(Object::cast(object).ptr()));
2543   return entry;
2544 }
2545 
IterateAndExtractReferences(HeapSnapshotGenerator * generator)2546 bool NativeObjectsExplorer::IterateAndExtractReferences(
2547     HeapSnapshotGenerator* generator) {
2548   generator_ = generator;
2549 
2550   if (FLAG_heap_profiler_use_embedder_graph &&
2551       snapshot_->profiler()->HasBuildEmbedderGraphCallback()) {
2552     v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_));
2553     DisallowGarbageCollection no_gc;
2554     EmbedderGraphImpl graph;
2555     snapshot_->profiler()->BuildEmbedderGraph(isolate_, &graph);
2556     for (const auto& node : graph.nodes()) {
2557       // Only add embedder nodes as V8 nodes have been added already by the
2558       // V8HeapExplorer.
2559       if (!node->IsEmbedderNode()) continue;
2560 
2561       if (auto* entry = EntryForEmbedderGraphNode(node.get())) {
2562         if (node->IsRootNode()) {
2563           snapshot_->root()->SetIndexedAutoIndexReference(
2564               HeapGraphEdge::kElement, entry, generator_,
2565               HeapEntry::kOffHeapPointer);
2566         }
2567         if (node->WrapperNode()) {
2568           MergeNodeIntoEntry(entry, node.get(), node->WrapperNode());
2569         }
2570       }
2571     }
2572     // Fill edges of the graph.
2573     for (const auto& edge : graph.edges()) {
2574       // |from| and |to| can be nullptr if the corresponding node is a V8 node
2575       // pointing to a Smi.
2576       HeapEntry* from = EntryForEmbedderGraphNode(edge.from);
2577       if (!from) continue;
2578       HeapEntry* to = EntryForEmbedderGraphNode(edge.to);
2579       if (!to) continue;
2580       if (edge.name == nullptr) {
2581         from->SetIndexedAutoIndexReference(HeapGraphEdge::kElement, to,
2582                                            generator_,
2583                                            HeapEntry::kOffHeapPointer);
2584       } else {
2585         from->SetNamedReference(HeapGraphEdge::kInternal,
2586                                 names_->GetCopy(edge.name), to, generator_,
2587                                 HeapEntry::kOffHeapPointer);
2588       }
2589     }
2590   }
2591   generator_ = nullptr;
2592   return true;
2593 }
2594 
HeapSnapshotGenerator(HeapSnapshot * snapshot,v8::ActivityControl * control,v8::HeapProfiler::ObjectNameResolver * resolver,Heap * heap)2595 HeapSnapshotGenerator::HeapSnapshotGenerator(
2596     HeapSnapshot* snapshot, v8::ActivityControl* control,
2597     v8::HeapProfiler::ObjectNameResolver* resolver, Heap* heap)
2598     : snapshot_(snapshot),
2599       control_(control),
2600       v8_heap_explorer_(snapshot_, this, resolver),
2601       dom_explorer_(snapshot_, this),
2602       heap_(heap) {}
2603 
2604 namespace {
2605 class V8_NODISCARD NullContextForSnapshotScope {
2606  public:
NullContextForSnapshotScope(Isolate * isolate)2607   explicit NullContextForSnapshotScope(Isolate* isolate)
2608       : isolate_(isolate), prev_(isolate->context()) {
2609     isolate_->set_context(Context());
2610   }
~NullContextForSnapshotScope()2611   ~NullContextForSnapshotScope() { isolate_->set_context(prev_); }
2612 
2613  private:
2614   Isolate* isolate_;
2615   Context prev_;
2616 };
2617 }  // namespace
2618 
GenerateSnapshot()2619 bool HeapSnapshotGenerator::GenerateSnapshot() {
2620   Isolate* isolate = Isolate::FromHeap(heap_);
2621   base::Optional<HandleScope> handle_scope(base::in_place, isolate);
2622   v8_heap_explorer_.CollectGlobalObjectsTags();
2623 
2624   heap_->CollectAllAvailableGarbage(GarbageCollectionReason::kHeapProfiler);
2625 
2626   NullContextForSnapshotScope null_context_scope(isolate);
2627   SafepointScope scope(heap_);
2628   v8_heap_explorer_.MakeGlobalObjectTagMap(scope);
2629   handle_scope.reset();
2630 
2631 #ifdef VERIFY_HEAP
2632   Heap* debug_heap = heap_;
2633   if (FLAG_verify_heap) {
2634     debug_heap->Verify();
2635   }
2636 #endif
2637 
2638   InitProgressCounter();
2639 
2640 #ifdef VERIFY_HEAP
2641   if (FLAG_verify_heap) {
2642     debug_heap->Verify();
2643   }
2644 #endif
2645 
2646   snapshot_->AddSyntheticRootEntries();
2647 
2648   if (!FillReferences()) return false;
2649 
2650   snapshot_->FillChildren();
2651   snapshot_->RememberLastJSObjectId();
2652 
2653   progress_counter_ = progress_total_;
2654   if (!ProgressReport(true)) return false;
2655   return true;
2656 }
2657 
ProgressStep()2658 void HeapSnapshotGenerator::ProgressStep() {
2659   // Only increment the progress_counter_ until
2660   // equal to progress_total -1 == progress_counter.
2661   // This ensures that intermediate ProgressReport calls will never signal
2662   // that the work is finished (i.e. progress_counter_ == progress_total_).
2663   // Only the forced ProgressReport() at the end of GenerateSnapshot() should,
2664   // after setting progress_counter_ = progress_total_, signal that the
2665   // work is finished because signalling finished twice
2666   // breaks the DevTools frontend.
2667   if (control_ != nullptr && progress_total_ > progress_counter_ + 1) {
2668     ++progress_counter_;
2669   }
2670 }
2671 
ProgressReport(bool force)2672 bool HeapSnapshotGenerator::ProgressReport(bool force) {
2673   const int kProgressReportGranularity = 10000;
2674   if (control_ != nullptr &&
2675       (force || progress_counter_ % kProgressReportGranularity == 0)) {
2676     return control_->ReportProgressValue(progress_counter_, progress_total_) ==
2677            v8::ActivityControl::kContinue;
2678   }
2679   return true;
2680 }
2681 
InitProgressCounter()2682 void HeapSnapshotGenerator::InitProgressCounter() {
2683   if (control_ == nullptr) return;
2684   progress_total_ = v8_heap_explorer_.EstimateObjectsCount();
2685   progress_counter_ = 0;
2686 }
2687 
FillReferences()2688 bool HeapSnapshotGenerator::FillReferences() {
2689   return v8_heap_explorer_.IterateAndExtractReferences(this) &&
2690          dom_explorer_.IterateAndExtractReferences(this);
2691 }
2692 
2693 template<int bytes> struct MaxDecimalDigitsIn;
2694 template <>
2695 struct MaxDecimalDigitsIn<1> {
2696   static const int kSigned = 3;
2697   static const int kUnsigned = 3;
2698 };
2699 template<> struct MaxDecimalDigitsIn<4> {
2700   static const int kSigned = 11;
2701   static const int kUnsigned = 10;
2702 };
2703 template<> struct MaxDecimalDigitsIn<8> {
2704   static const int kSigned = 20;
2705   static const int kUnsigned = 20;
2706 };
2707 
2708 class OutputStreamWriter {
2709  public:
OutputStreamWriter(v8::OutputStream * stream)2710   explicit OutputStreamWriter(v8::OutputStream* stream)
2711       : stream_(stream),
2712         chunk_size_(stream->GetChunkSize()),
2713         chunk_(chunk_size_),
2714         chunk_pos_(0),
2715         aborted_(false) {
2716     DCHECK_GT(chunk_size_, 0);
2717   }
aborted()2718   bool aborted() { return aborted_; }
AddCharacter(char c)2719   void AddCharacter(char c) {
2720     DCHECK_NE(c, '\0');
2721     DCHECK(chunk_pos_ < chunk_size_);
2722     chunk_[chunk_pos_++] = c;
2723     MaybeWriteChunk();
2724   }
AddString(const char * s)2725   void AddString(const char* s) {
2726     size_t len = strlen(s);
2727     DCHECK_GE(kMaxInt, len);
2728     AddSubstring(s, static_cast<int>(len));
2729   }
AddSubstring(const char * s,int n)2730   void AddSubstring(const char* s, int n) {
2731     if (n <= 0) return;
2732     DCHECK_LE(n, strlen(s));
2733     const char* s_end = s + n;
2734     while (s < s_end) {
2735       int s_chunk_size =
2736           std::min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2737       DCHECK_GT(s_chunk_size, 0);
2738       MemCopy(chunk_.begin() + chunk_pos_, s, s_chunk_size);
2739       s += s_chunk_size;
2740       chunk_pos_ += s_chunk_size;
2741       MaybeWriteChunk();
2742     }
2743   }
AddNumber(unsigned n)2744   void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
Finalize()2745   void Finalize() {
2746     if (aborted_) return;
2747     DCHECK(chunk_pos_ < chunk_size_);
2748     if (chunk_pos_ != 0) {
2749       WriteChunk();
2750     }
2751     stream_->EndOfStream();
2752   }
2753 
2754  private:
2755   template<typename T>
AddNumberImpl(T n,const char * format)2756   void AddNumberImpl(T n, const char* format) {
2757     // Buffer for the longest value plus trailing \0
2758     static const int kMaxNumberSize =
2759         MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2760     if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2761       int result = SNPrintF(
2762           chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2763       DCHECK_NE(result, -1);
2764       chunk_pos_ += result;
2765       MaybeWriteChunk();
2766     } else {
2767       base::EmbeddedVector<char, kMaxNumberSize> buffer;
2768       int result = SNPrintF(buffer, format, n);
2769       USE(result);
2770       DCHECK_NE(result, -1);
2771       AddString(buffer.begin());
2772     }
2773   }
MaybeWriteChunk()2774   void MaybeWriteChunk() {
2775     DCHECK(chunk_pos_ <= chunk_size_);
2776     if (chunk_pos_ == chunk_size_) {
2777       WriteChunk();
2778     }
2779   }
WriteChunk()2780   void WriteChunk() {
2781     if (aborted_) return;
2782     if (stream_->WriteAsciiChunk(chunk_.begin(), chunk_pos_) ==
2783         v8::OutputStream::kAbort)
2784       aborted_ = true;
2785     chunk_pos_ = 0;
2786   }
2787 
2788   v8::OutputStream* stream_;
2789   int chunk_size_;
2790   base::ScopedVector<char> chunk_;
2791   int chunk_pos_;
2792   bool aborted_;
2793 };
2794 
2795 
2796 // type, name|index, to_node.
2797 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2798 // type, name, id, self_size, edge_count, trace_node_id, detachedness.
2799 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 7;
2800 
Serialize(v8::OutputStream * stream)2801 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2802   if (AllocationTracker* allocation_tracker =
2803       snapshot_->profiler()->allocation_tracker()) {
2804     allocation_tracker->PrepareForSerialization();
2805   }
2806   DCHECK_NULL(writer_);
2807   writer_ = new OutputStreamWriter(stream);
2808   SerializeImpl();
2809   delete writer_;
2810   writer_ = nullptr;
2811 }
2812 
2813 
SerializeImpl()2814 void HeapSnapshotJSONSerializer::SerializeImpl() {
2815   DCHECK_EQ(0, snapshot_->root()->index());
2816   writer_->AddCharacter('{');
2817   writer_->AddString("\"snapshot\":{");
2818   SerializeSnapshot();
2819   if (writer_->aborted()) return;
2820   writer_->AddString("},\n");
2821   writer_->AddString("\"nodes\":[");
2822   SerializeNodes();
2823   if (writer_->aborted()) return;
2824   writer_->AddString("],\n");
2825   writer_->AddString("\"edges\":[");
2826   SerializeEdges();
2827   if (writer_->aborted()) return;
2828   writer_->AddString("],\n");
2829 
2830   writer_->AddString("\"trace_function_infos\":[");
2831   SerializeTraceNodeInfos();
2832   if (writer_->aborted()) return;
2833   writer_->AddString("],\n");
2834   writer_->AddString("\"trace_tree\":[");
2835   SerializeTraceTree();
2836   if (writer_->aborted()) return;
2837   writer_->AddString("],\n");
2838 
2839   writer_->AddString("\"samples\":[");
2840   SerializeSamples();
2841   if (writer_->aborted()) return;
2842   writer_->AddString("],\n");
2843 
2844   writer_->AddString("\"locations\":[");
2845   SerializeLocations();
2846   if (writer_->aborted()) return;
2847   writer_->AddString("],\n");
2848 
2849   writer_->AddString("\"strings\":[");
2850   SerializeStrings();
2851   if (writer_->aborted()) return;
2852   writer_->AddCharacter(']');
2853   writer_->AddCharacter('}');
2854   writer_->Finalize();
2855 }
2856 
2857 
GetStringId(const char * s)2858 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2859   base::HashMap::Entry* cache_entry =
2860       strings_.LookupOrInsert(const_cast<char*>(s), StringHash(s));
2861   if (cache_entry->value == nullptr) {
2862     cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2863   }
2864   return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2865 }
2866 
2867 
2868 namespace {
2869 
2870 template<size_t size> struct ToUnsigned;
2871 
2872 template <>
2873 struct ToUnsigned<1> {
2874   using Type = uint8_t;
2875 };
2876 
2877 template<> struct ToUnsigned<4> {
2878   using Type = uint32_t;
2879 };
2880 
2881 template<> struct ToUnsigned<8> {
2882   using Type = uint64_t;
2883 };
2884 
2885 }  // namespace
2886 
2887 template <typename T>
utoa_impl(T value,const base::Vector<char> & buffer,int buffer_pos)2888 static int utoa_impl(T value, const base::Vector<char>& buffer,
2889                      int buffer_pos) {
2890   STATIC_ASSERT(static_cast<T>(-1) > 0);  // Check that T is unsigned
2891   int number_of_digits = 0;
2892   T t = value;
2893   do {
2894     ++number_of_digits;
2895   } while (t /= 10);
2896 
2897   buffer_pos += number_of_digits;
2898   int result = buffer_pos;
2899   do {
2900     int last_digit = static_cast<int>(value % 10);
2901     buffer[--buffer_pos] = '0' + last_digit;
2902     value /= 10;
2903   } while (value);
2904   return result;
2905 }
2906 
2907 template <typename T>
utoa(T value,const base::Vector<char> & buffer,int buffer_pos)2908 static int utoa(T value, const base::Vector<char>& buffer, int buffer_pos) {
2909   typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2910   STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value));
2911   return utoa_impl(unsigned_value, buffer, buffer_pos);
2912 }
2913 
SerializeEdge(HeapGraphEdge * edge,bool first_edge)2914 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2915                                                bool first_edge) {
2916   // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2917   static const int kBufferSize =
2918       MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2;
2919   base::EmbeddedVector<char, kBufferSize> buffer;
2920   int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2921       || edge->type() == HeapGraphEdge::kHidden
2922       ? edge->index() : GetStringId(edge->name());
2923   int buffer_pos = 0;
2924   if (!first_edge) {
2925     buffer[buffer_pos++] = ',';
2926   }
2927   buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2928   buffer[buffer_pos++] = ',';
2929   buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2930   buffer[buffer_pos++] = ',';
2931   buffer_pos = utoa(to_node_index(edge->to()), buffer, buffer_pos);
2932   buffer[buffer_pos++] = '\n';
2933   buffer[buffer_pos++] = '\0';
2934   writer_->AddString(buffer.begin());
2935 }
2936 
SerializeEdges()2937 void HeapSnapshotJSONSerializer::SerializeEdges() {
2938   std::vector<HeapGraphEdge*>& edges = snapshot_->children();
2939   for (size_t i = 0; i < edges.size(); ++i) {
2940     DCHECK(i == 0 ||
2941            edges[i - 1]->from()->index() <= edges[i]->from()->index());
2942     SerializeEdge(edges[i], i == 0);
2943     if (writer_->aborted()) return;
2944   }
2945 }
2946 
SerializeNode(const HeapEntry * entry)2947 void HeapSnapshotJSONSerializer::SerializeNode(const HeapEntry* entry) {
2948   // The buffer needs space for 5 unsigned ints, 1 size_t, 1 uint8_t, 7 commas,
2949   // \n and \0
2950   static const int kBufferSize =
2951       5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned +
2952       MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned +
2953       MaxDecimalDigitsIn<sizeof(uint8_t)>::kUnsigned + 7 + 1 + 1;
2954   base::EmbeddedVector<char, kBufferSize> buffer;
2955   int buffer_pos = 0;
2956   if (to_node_index(entry) != 0) {
2957     buffer[buffer_pos++] = ',';
2958   }
2959   buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2960   buffer[buffer_pos++] = ',';
2961   buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2962   buffer[buffer_pos++] = ',';
2963   buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2964   buffer[buffer_pos++] = ',';
2965   buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2966   buffer[buffer_pos++] = ',';
2967   buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2968   buffer[buffer_pos++] = ',';
2969   buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2970   buffer[buffer_pos++] = ',';
2971   buffer_pos = utoa(entry->detachedness(), buffer, buffer_pos);
2972   buffer[buffer_pos++] = '\n';
2973   buffer[buffer_pos++] = '\0';
2974   writer_->AddString(buffer.begin());
2975 }
2976 
SerializeNodes()2977 void HeapSnapshotJSONSerializer::SerializeNodes() {
2978   const std::deque<HeapEntry>& entries = snapshot_->entries();
2979   for (const HeapEntry& entry : entries) {
2980     SerializeNode(&entry);
2981     if (writer_->aborted()) return;
2982   }
2983 }
2984 
SerializeSnapshot()2985 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2986   writer_->AddString("\"meta\":");
2987   // The object describing node serialization layout.
2988   // We use a set of macros to improve readability.
2989 
2990 // clang-format off
2991 #define JSON_A(s) "[" s "]"
2992 #define JSON_O(s) "{" s "}"
2993 #define JSON_S(s) "\"" s "\""
2994   writer_->AddString(JSON_O(
2995     JSON_S("node_fields") ":" JSON_A(
2996         JSON_S("type") ","
2997         JSON_S("name") ","
2998         JSON_S("id") ","
2999         JSON_S("self_size") ","
3000         JSON_S("edge_count") ","
3001         JSON_S("trace_node_id") ","
3002         JSON_S("detachedness")) ","
3003     JSON_S("node_types") ":" JSON_A(
3004         JSON_A(
3005             JSON_S("hidden") ","
3006             JSON_S("array") ","
3007             JSON_S("string") ","
3008             JSON_S("object") ","
3009             JSON_S("code") ","
3010             JSON_S("closure") ","
3011             JSON_S("regexp") ","
3012             JSON_S("number") ","
3013             JSON_S("native") ","
3014             JSON_S("synthetic") ","
3015             JSON_S("concatenated string") ","
3016             JSON_S("sliced string") ","
3017             JSON_S("symbol") ","
3018             JSON_S("bigint")) ","
3019         JSON_S("string") ","
3020         JSON_S("number") ","
3021         JSON_S("number") ","
3022         JSON_S("number") ","
3023         JSON_S("number") ","
3024         JSON_S("number")) ","
3025     JSON_S("edge_fields") ":" JSON_A(
3026         JSON_S("type") ","
3027         JSON_S("name_or_index") ","
3028         JSON_S("to_node")) ","
3029     JSON_S("edge_types") ":" JSON_A(
3030         JSON_A(
3031             JSON_S("context") ","
3032             JSON_S("element") ","
3033             JSON_S("property") ","
3034             JSON_S("internal") ","
3035             JSON_S("hidden") ","
3036             JSON_S("shortcut") ","
3037             JSON_S("weak")) ","
3038         JSON_S("string_or_number") ","
3039         JSON_S("node")) ","
3040     JSON_S("trace_function_info_fields") ":" JSON_A(
3041         JSON_S("function_id") ","
3042         JSON_S("name") ","
3043         JSON_S("script_name") ","
3044         JSON_S("script_id") ","
3045         JSON_S("line") ","
3046         JSON_S("column")) ","
3047     JSON_S("trace_node_fields") ":" JSON_A(
3048         JSON_S("id") ","
3049         JSON_S("function_info_index") ","
3050         JSON_S("count") ","
3051         JSON_S("size") ","
3052         JSON_S("children")) ","
3053     JSON_S("sample_fields") ":" JSON_A(
3054         JSON_S("timestamp_us") ","
3055         JSON_S("last_assigned_id")) ","
3056     JSON_S("location_fields") ":" JSON_A(
3057         JSON_S("object_index") ","
3058         JSON_S("script_id") ","
3059         JSON_S("line") ","
3060         JSON_S("column"))));
3061 // clang-format on
3062 #undef JSON_S
3063 #undef JSON_O
3064 #undef JSON_A
3065   writer_->AddString(",\"node_count\":");
3066   writer_->AddNumber(static_cast<unsigned>(snapshot_->entries().size()));
3067   writer_->AddString(",\"edge_count\":");
3068   writer_->AddNumber(static_cast<double>(snapshot_->edges().size()));
3069   writer_->AddString(",\"trace_function_count\":");
3070   uint32_t count = 0;
3071   AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
3072   if (tracker) {
3073     count = static_cast<uint32_t>(tracker->function_info_list().size());
3074   }
3075   writer_->AddNumber(count);
3076 }
3077 
3078 
WriteUChar(OutputStreamWriter * w,unibrow::uchar u)3079 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
3080   static const char hex_chars[] = "0123456789ABCDEF";
3081   w->AddString("\\u");
3082   w->AddCharacter(hex_chars[(u >> 12) & 0xF]);
3083   w->AddCharacter(hex_chars[(u >> 8) & 0xF]);
3084   w->AddCharacter(hex_chars[(u >> 4) & 0xF]);
3085   w->AddCharacter(hex_chars[u & 0xF]);
3086 }
3087 
3088 
SerializeTraceTree()3089 void HeapSnapshotJSONSerializer::SerializeTraceTree() {
3090   AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
3091   if (!tracker) return;
3092   AllocationTraceTree* traces = tracker->trace_tree();
3093   SerializeTraceNode(traces->root());
3094 }
3095 
3096 
SerializeTraceNode(AllocationTraceNode * node)3097 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
3098   // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
3099   const int kBufferSize =
3100       4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned + 4 + 1 + 1;
3101   base::EmbeddedVector<char, kBufferSize> buffer;
3102   int buffer_pos = 0;
3103   buffer_pos = utoa(node->id(), buffer, buffer_pos);
3104   buffer[buffer_pos++] = ',';
3105   buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
3106   buffer[buffer_pos++] = ',';
3107   buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
3108   buffer[buffer_pos++] = ',';
3109   buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
3110   buffer[buffer_pos++] = ',';
3111   buffer[buffer_pos++] = '[';
3112   buffer[buffer_pos++] = '\0';
3113   writer_->AddString(buffer.begin());
3114 
3115   int i = 0;
3116   for (AllocationTraceNode* child : node->children()) {
3117     if (i++ > 0) {
3118       writer_->AddCharacter(',');
3119     }
3120     SerializeTraceNode(child);
3121   }
3122   writer_->AddCharacter(']');
3123 }
3124 
3125 
3126 // 0-based position is converted to 1-based during the serialization.
SerializePosition(int position,const base::Vector<char> & buffer,int buffer_pos)3127 static int SerializePosition(int position, const base::Vector<char>& buffer,
3128                              int buffer_pos) {
3129   if (position == -1) {
3130     buffer[buffer_pos++] = '0';
3131   } else {
3132     DCHECK_GE(position, 0);
3133     buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
3134   }
3135   return buffer_pos;
3136 }
3137 
SerializeTraceNodeInfos()3138 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
3139   AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
3140   if (!tracker) return;
3141   // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
3142   const int kBufferSize =
3143       6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned + 6 + 1 + 1;
3144   base::EmbeddedVector<char, kBufferSize> buffer;
3145   int i = 0;
3146   for (AllocationTracker::FunctionInfo* info : tracker->function_info_list()) {
3147     int buffer_pos = 0;
3148     if (i++ > 0) {
3149       buffer[buffer_pos++] = ',';
3150     }
3151     buffer_pos = utoa(info->function_id, buffer, buffer_pos);
3152     buffer[buffer_pos++] = ',';
3153     buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
3154     buffer[buffer_pos++] = ',';
3155     buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3156     buffer[buffer_pos++] = ',';
3157     // The cast is safe because script id is a non-negative Smi.
3158     buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3159         buffer_pos);
3160     buffer[buffer_pos++] = ',';
3161     buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3162     buffer[buffer_pos++] = ',';
3163     buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3164     buffer[buffer_pos++] = '\n';
3165     buffer[buffer_pos++] = '\0';
3166     writer_->AddString(buffer.begin());
3167   }
3168 }
3169 
3170 
SerializeSamples()3171 void HeapSnapshotJSONSerializer::SerializeSamples() {
3172   const std::vector<HeapObjectsMap::TimeInterval>& samples =
3173       snapshot_->profiler()->heap_object_map()->samples();
3174   if (samples.empty()) return;
3175   base::TimeTicks start_time = samples[0].timestamp;
3176   // The buffer needs space for 2 unsigned ints, 2 commas, \n and \0
3177   const int kBufferSize = MaxDecimalDigitsIn<sizeof(
3178                               base::TimeDelta().InMicroseconds())>::kUnsigned +
3179                           MaxDecimalDigitsIn<sizeof(samples[0].id)>::kUnsigned +
3180                           2 + 1 + 1;
3181   base::EmbeddedVector<char, kBufferSize> buffer;
3182   int i = 0;
3183   for (const HeapObjectsMap::TimeInterval& sample : samples) {
3184     int buffer_pos = 0;
3185     if (i++ > 0) {
3186       buffer[buffer_pos++] = ',';
3187     }
3188     base::TimeDelta time_delta = sample.timestamp - start_time;
3189     buffer_pos = utoa(time_delta.InMicroseconds(), buffer, buffer_pos);
3190     buffer[buffer_pos++] = ',';
3191     buffer_pos = utoa(sample.last_assigned_id(), buffer, buffer_pos);
3192     buffer[buffer_pos++] = '\n';
3193     buffer[buffer_pos++] = '\0';
3194     writer_->AddString(buffer.begin());
3195   }
3196 }
3197 
3198 
SerializeString(const unsigned char * s)3199 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3200   writer_->AddCharacter('\n');
3201   writer_->AddCharacter('\"');
3202   for ( ; *s != '\0'; ++s) {
3203     switch (*s) {
3204       case '\b':
3205         writer_->AddString("\\b");
3206         continue;
3207       case '\f':
3208         writer_->AddString("\\f");
3209         continue;
3210       case '\n':
3211         writer_->AddString("\\n");
3212         continue;
3213       case '\r':
3214         writer_->AddString("\\r");
3215         continue;
3216       case '\t':
3217         writer_->AddString("\\t");
3218         continue;
3219       case '\"':
3220       case '\\':
3221         writer_->AddCharacter('\\');
3222         writer_->AddCharacter(*s);
3223         continue;
3224       default:
3225         if (*s > 31 && *s < 128) {
3226           writer_->AddCharacter(*s);
3227         } else if (*s <= 31) {
3228           // Special character with no dedicated literal.
3229           WriteUChar(writer_, *s);
3230         } else {
3231           // Convert UTF-8 into \u UTF-16 literal.
3232           size_t length = 1, cursor = 0;
3233           for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3234           unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3235           if (c != unibrow::Utf8::kBadChar) {
3236             WriteUChar(writer_, c);
3237             DCHECK_NE(cursor, 0);
3238             s += cursor - 1;
3239           } else {
3240             writer_->AddCharacter('?');
3241           }
3242         }
3243     }
3244   }
3245   writer_->AddCharacter('\"');
3246 }
3247 
3248 
SerializeStrings()3249 void HeapSnapshotJSONSerializer::SerializeStrings() {
3250   base::ScopedVector<const unsigned char*> sorted_strings(strings_.occupancy() +
3251                                                           1);
3252   for (base::HashMap::Entry* entry = strings_.Start(); entry != nullptr;
3253        entry = strings_.Next(entry)) {
3254     int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3255     sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3256   }
3257   writer_->AddString("\"<dummy>\"");
3258   for (int i = 1; i < sorted_strings.length(); ++i) {
3259     writer_->AddCharacter(',');
3260     SerializeString(sorted_strings[i]);
3261     if (writer_->aborted()) return;
3262   }
3263 }
3264 
SerializeLocation(const SourceLocation & location)3265 void HeapSnapshotJSONSerializer::SerializeLocation(
3266     const SourceLocation& location) {
3267   // The buffer needs space for 4 unsigned ints, 3 commas, \n and \0
3268   static const int kBufferSize =
3269       MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 4 + 3 + 2;
3270   base::EmbeddedVector<char, kBufferSize> buffer;
3271   int buffer_pos = 0;
3272   buffer_pos = utoa(to_node_index(location.entry_index), buffer, buffer_pos);
3273   buffer[buffer_pos++] = ',';
3274   buffer_pos = utoa(location.scriptId, buffer, buffer_pos);
3275   buffer[buffer_pos++] = ',';
3276   buffer_pos = utoa(location.line, buffer, buffer_pos);
3277   buffer[buffer_pos++] = ',';
3278   buffer_pos = utoa(location.col, buffer, buffer_pos);
3279   buffer[buffer_pos++] = '\n';
3280   buffer[buffer_pos++] = '\0';
3281   writer_->AddString(buffer.begin());
3282 }
3283 
SerializeLocations()3284 void HeapSnapshotJSONSerializer::SerializeLocations() {
3285   const std::vector<SourceLocation>& locations = snapshot_->locations();
3286   for (size_t i = 0; i < locations.size(); i++) {
3287     if (i > 0) writer_->AddCharacter(',');
3288     SerializeLocation(locations[i]);
3289     if (writer_->aborted()) return;
3290   }
3291 }
3292 
3293 }  // namespace internal
3294 }  // namespace v8
3295