• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/profiler/heap-snapshot-generator.h"
6 
7 #include <utility>
8 
9 #include "src/api/api-inl.h"
10 #include "src/base/optional.h"
11 #include "src/base/vector.h"
12 #include "src/codegen/assembler-inl.h"
13 #include "src/common/globals.h"
14 #include "src/debug/debug.h"
15 #include "src/handles/global-handles.h"
16 #include "src/heap/combined-heap.h"
17 #include "src/heap/safepoint.h"
18 #include "src/numbers/conversions.h"
19 #include "src/objects/allocation-site-inl.h"
20 #include "src/objects/api-callbacks.h"
21 #include "src/objects/cell-inl.h"
22 #include "src/objects/feedback-cell-inl.h"
23 #include "src/objects/hash-table-inl.h"
24 #include "src/objects/js-array-buffer-inl.h"
25 #include "src/objects/js-array-inl.h"
26 #include "src/objects/js-collection-inl.h"
27 #include "src/objects/js-generator-inl.h"
28 #include "src/objects/js-promise-inl.h"
29 #include "src/objects/js-regexp-inl.h"
30 #include "src/objects/js-weak-refs-inl.h"
31 #include "src/objects/literal-objects-inl.h"
32 #include "src/objects/objects-inl.h"
33 #include "src/objects/prototype.h"
34 #include "src/objects/slots-inl.h"
35 #include "src/objects/struct-inl.h"
36 #include "src/objects/transitions-inl.h"
37 #include "src/objects/visitors.h"
38 #include "src/profiler/allocation-tracker.h"
39 #include "src/profiler/heap-profiler.h"
40 #include "src/profiler/heap-snapshot-generator-inl.h"
41 #include "src/profiler/output-stream-writer.h"
42 
43 namespace v8 {
44 namespace internal {
45 
46 #ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
47 class HeapEntryVerifier {
48  public:
HeapEntryVerifier(HeapSnapshotGenerator * generator,HeapObject obj)49   HeapEntryVerifier(HeapSnapshotGenerator* generator, HeapObject obj)
50       : generator_(generator),
51         primary_object_(obj),
52         reference_summary_(
53             ReferenceSummary::SummarizeReferencesFrom(generator->heap(), obj)) {
54     generator->set_verifier(this);
55   }
~HeapEntryVerifier()56   ~HeapEntryVerifier() {
57     CheckAllReferencesWereChecked();
58     generator_->set_verifier(nullptr);
59   }
60 
61   // Checks that `host` retains `target`, according to the marking visitor. This
62   // allows us to verify, when adding edges to the snapshot, that they
63   // correspond to real retaining relationships.
CheckStrongReference(HeapObject host,HeapObject target)64   void CheckStrongReference(HeapObject host, HeapObject target) {
65     // All references should be from the current primary object.
66     CHECK_EQ(host, primary_object_);
67 
68     checked_objects_.insert(target);
69 
70     // Check whether there is a direct strong reference from host to target.
71     if (reference_summary_.strong_references().find(target) !=
72         reference_summary_.strong_references().end()) {
73       return;
74     }
75 
76     // There is no direct reference from host to target, but sometimes heap
77     // snapshots include references that skip one, two, or three objects, such
78     // as __proto__ on a JSObject referring to its Map's prototype, or a
79     // property getter that bypasses the property array and accessor info. At
80     // this point, we must check for those indirect references.
81     for (size_t level = 0; level < 3; ++level) {
82       const std::unordered_set<HeapObject, Object::Hasher>& indirect =
83           GetIndirectStrongReferences(level);
84       if (indirect.find(target) != indirect.end()) {
85         return;
86       }
87     }
88 
89     FATAL("Could not find any matching reference");
90   }
91 
92   // Checks that `host` has a weak reference to `target`, according to the
93   // marking visitor.
CheckWeakReference(HeapObject host,HeapObject target)94   void CheckWeakReference(HeapObject host, HeapObject target) {
95     // All references should be from the current primary object.
96     CHECK_EQ(host, primary_object_);
97 
98     checked_objects_.insert(target);
99     CHECK_NE(reference_summary_.weak_references().find(target),
100              reference_summary_.weak_references().end());
101   }
102 
103   // Marks the relationship between `host` and `target` as checked, even if the
104   // marking visitor found no such relationship. This is necessary for
105   // ephemerons, where a pair of objects is required to retain the target.
106   // Use this function with care, since it bypasses verification.
MarkReferenceCheckedWithoutChecking(HeapObject host,HeapObject target)107   void MarkReferenceCheckedWithoutChecking(HeapObject host, HeapObject target) {
108     if (host == primary_object_) {
109       checked_objects_.insert(target);
110     }
111   }
112 
113   // Verifies that all of the references found by the marking visitor were
114   // checked via a call to CheckStrongReference or CheckWeakReference, or
115   // deliberately skipped via a call to MarkReferenceCheckedWithoutChecking.
116   // This ensures that there aren't retaining relationships found by the marking
117   // visitor which were omitted from the heap snapshot.
CheckAllReferencesWereChecked()118   void CheckAllReferencesWereChecked() {
119     // Both loops below skip pointers to read-only objects, because the heap
120     // snapshot deliberately omits many of those (see IsEssentialObject).
121     // Read-only objects can't ever retain normal read-write objects, so these
122     // are fine to skip.
123     for (HeapObject obj : reference_summary_.strong_references()) {
124       if (!BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
125         CHECK_NE(checked_objects_.find(obj), checked_objects_.end());
126       }
127     }
128     for (HeapObject obj : reference_summary_.weak_references()) {
129       if (!BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
130         CHECK_NE(checked_objects_.find(obj), checked_objects_.end());
131       }
132     }
133   }
134 
135  private:
136   const std::unordered_set<HeapObject, Object::Hasher>&
GetIndirectStrongReferences(size_t level)137   GetIndirectStrongReferences(size_t level) {
138     CHECK_GE(indirect_strong_references_.size(), level);
139 
140     if (indirect_strong_references_.size() == level) {
141       // Expansion is needed.
142       indirect_strong_references_.resize(level + 1);
143       const std::unordered_set<HeapObject, Object::Hasher>& previous =
144           level == 0 ? reference_summary_.strong_references()
145                      : indirect_strong_references_[level - 1];
146       for (HeapObject obj : previous) {
147         if (BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
148           // Marking visitors don't expect to visit objects in read-only space,
149           // and will fail DCHECKs if they are used on those objects. Read-only
150           // objects can never retain anything outside read-only space, so
151           // skipping those objects doesn't weaken verification.
152           continue;
153         }
154 
155         // Indirect references should only bypass internal structures, not
156         // user-visible objects or contexts.
157         if (obj.IsJSReceiver() || obj.IsString() || obj.IsContext()) {
158           continue;
159         }
160 
161         ReferenceSummary summary =
162             ReferenceSummary::SummarizeReferencesFrom(generator_->heap(), obj);
163         indirect_strong_references_[level].insert(
164             summary.strong_references().begin(),
165             summary.strong_references().end());
166       }
167     }
168 
169     return indirect_strong_references_[level];
170   }
171 
172   DISALLOW_GARBAGE_COLLECTION(no_gc)
173   HeapSnapshotGenerator* generator_;
174   HeapObject primary_object_;
175 
176   // All objects referred to by primary_object_, according to a marking visitor.
177   ReferenceSummary reference_summary_;
178 
179   // Objects that have been checked via a call to CheckStrongReference or
180   // CheckWeakReference, or deliberately skipped via a call to
181   // MarkReferenceCheckedWithoutChecking.
182   std::unordered_set<HeapObject, Object::Hasher> checked_objects_;
183 
184   // Objects transitively retained by the primary object. The objects in the set
185   // at index i are retained by the primary object via a chain of i+1
186   // intermediate objects.
187   std::vector<std::unordered_set<HeapObject, Object::Hasher>>
188       indirect_strong_references_;
189 };
190 #endif
191 
HeapGraphEdge(Type type,const char * name,HeapEntry * from,HeapEntry * to)192 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, HeapEntry* from,
193                              HeapEntry* to)
194     : bit_field_(TypeField::encode(type) |
195                  FromIndexField::encode(from->index())),
196       to_entry_(to),
197       name_(name) {
198   DCHECK(type == kContextVariable
199       || type == kProperty
200       || type == kInternal
201       || type == kShortcut
202       || type == kWeak);
203 }
204 
HeapGraphEdge(Type type,int index,HeapEntry * from,HeapEntry * to)205 HeapGraphEdge::HeapGraphEdge(Type type, int index, HeapEntry* from,
206                              HeapEntry* to)
207     : bit_field_(TypeField::encode(type) |
208                  FromIndexField::encode(from->index())),
209       to_entry_(to),
210       index_(index) {
211   DCHECK(type == kElement || type == kHidden);
212 }
213 
HeapEntry(HeapSnapshot * snapshot,int index,Type type,const char * name,SnapshotObjectId id,size_t self_size,unsigned trace_node_id)214 HeapEntry::HeapEntry(HeapSnapshot* snapshot, int index, Type type,
215                      const char* name, SnapshotObjectId id, size_t self_size,
216                      unsigned trace_node_id)
217     : type_(type),
218       index_(index),
219       children_count_(0),
220       self_size_(self_size),
221       snapshot_(snapshot),
222       name_(name),
223       id_(id),
224       trace_node_id_(trace_node_id) {
225   DCHECK_GE(index, 0);
226 }
227 
VerifyReference(HeapGraphEdge::Type type,HeapEntry * entry,HeapSnapshotGenerator * generator,ReferenceVerification verification)228 void HeapEntry::VerifyReference(HeapGraphEdge::Type type, HeapEntry* entry,
229                                 HeapSnapshotGenerator* generator,
230                                 ReferenceVerification verification) {
231 #ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
232   if (verification == kOffHeapPointer || generator->verifier() == nullptr) {
233     // Off-heap pointers are outside the scope of this verification; we just
234     // trust the embedder to provide accurate data. If the verifier is null,
235     // then verification is disabled.
236     return;
237   }
238   if (verification == kCustomWeakPointer) {
239     // The caller declared that this is a weak pointer ignored by the marking
240     // visitor. All we can verify at this point is that the edge type declares
241     // it to be weak.
242     CHECK_EQ(type, HeapGraphEdge::kWeak);
243     return;
244   }
245   Address from_address =
246       reinterpret_cast<Address>(generator->FindHeapThingForHeapEntry(this));
247   Address to_address =
248       reinterpret_cast<Address>(generator->FindHeapThingForHeapEntry(entry));
249   if (from_address == kNullAddress || to_address == kNullAddress) {
250     // One of these entries doesn't correspond to a real heap object.
251     // Verification is not possible.
252     return;
253   }
254   HeapObject from_obj = HeapObject::cast(Object(from_address));
255   HeapObject to_obj = HeapObject::cast(Object(to_address));
256   if (BasicMemoryChunk::FromHeapObject(to_obj)->InReadOnlySpace()) {
257     // We can't verify pointers into read-only space, because marking visitors
258     // might not mark those. For example, every Map has a pointer to the
259     // MetaMap, but marking visitors don't bother with following that link.
260     // Read-only objects are immortal and can never point to things outside of
261     // read-only space, so ignoring these objects is safe from the perspective
262     // of ensuring accurate retaining paths for normal read-write objects.
263     // Therefore, do nothing.
264   } else if (verification == kEphemeron) {
265     // Ephemerons can't be verified because they aren't marked directly by the
266     // marking visitor.
267     generator->verifier()->MarkReferenceCheckedWithoutChecking(from_obj,
268                                                                to_obj);
269   } else if (type == HeapGraphEdge::kWeak) {
270     generator->verifier()->CheckWeakReference(from_obj, to_obj);
271   } else {
272     generator->verifier()->CheckStrongReference(from_obj, to_obj);
273   }
274 #endif
275 }
276 
SetNamedReference(HeapGraphEdge::Type type,const char * name,HeapEntry * entry,HeapSnapshotGenerator * generator,ReferenceVerification verification)277 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type, const char* name,
278                                   HeapEntry* entry,
279                                   HeapSnapshotGenerator* generator,
280                                   ReferenceVerification verification) {
281   ++children_count_;
282   snapshot_->edges().emplace_back(type, name, this, entry);
283   VerifyReference(type, entry, generator, verification);
284 }
285 
SetIndexedReference(HeapGraphEdge::Type type,int index,HeapEntry * entry,HeapSnapshotGenerator * generator,ReferenceVerification verification)286 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type, int index,
287                                     HeapEntry* entry,
288                                     HeapSnapshotGenerator* generator,
289                                     ReferenceVerification verification) {
290   ++children_count_;
291   snapshot_->edges().emplace_back(type, index, this, entry);
292   VerifyReference(type, entry, generator, verification);
293 }
294 
SetNamedAutoIndexReference(HeapGraphEdge::Type type,const char * description,HeapEntry * child,StringsStorage * names,HeapSnapshotGenerator * generator,ReferenceVerification verification)295 void HeapEntry::SetNamedAutoIndexReference(HeapGraphEdge::Type type,
296                                            const char* description,
297                                            HeapEntry* child,
298                                            StringsStorage* names,
299                                            HeapSnapshotGenerator* generator,
300                                            ReferenceVerification verification) {
301   int index = children_count_ + 1;
302   const char* name = description
303                          ? names->GetFormatted("%d / %s", index, description)
304                          : names->GetName(index);
305   SetNamedReference(type, name, child, generator, verification);
306 }
307 
Print(const char * prefix,const char * edge_name,int max_depth,int indent) const308 void HeapEntry::Print(const char* prefix, const char* edge_name, int max_depth,
309                       int indent) const {
310   STATIC_ASSERT(sizeof(unsigned) == sizeof(id()));
311   base::OS::Print("%6zu @%6u %*c %s%s: ", self_size(), id(), indent, ' ',
312                   prefix, edge_name);
313   if (type() != kString) {
314     base::OS::Print("%s %.40s\n", TypeAsString(), name_);
315   } else {
316     base::OS::Print("\"");
317     const char* c = name_;
318     while (*c && (c - name_) <= 40) {
319       if (*c != '\n')
320         base::OS::Print("%c", *c);
321       else
322         base::OS::Print("\\n");
323       ++c;
324     }
325     base::OS::Print("\"\n");
326   }
327   if (--max_depth == 0) return;
328   for (auto i = children_begin(); i != children_end(); ++i) {
329     HeapGraphEdge& edge = **i;
330     const char* edge_prefix = "";
331     base::EmbeddedVector<char, 64> index;
332     edge_name = index.begin();
333     switch (edge.type()) {
334       case HeapGraphEdge::kContextVariable:
335         edge_prefix = "#";
336         edge_name = edge.name();
337         break;
338       case HeapGraphEdge::kElement:
339         SNPrintF(index, "%d", edge.index());
340         break;
341       case HeapGraphEdge::kInternal:
342         edge_prefix = "$";
343         edge_name = edge.name();
344         break;
345       case HeapGraphEdge::kProperty:
346         edge_name = edge.name();
347         break;
348       case HeapGraphEdge::kHidden:
349         edge_prefix = "$";
350         SNPrintF(index, "%d", edge.index());
351         break;
352       case HeapGraphEdge::kShortcut:
353         edge_prefix = "^";
354         edge_name = edge.name();
355         break;
356       case HeapGraphEdge::kWeak:
357         edge_prefix = "w";
358         edge_name = edge.name();
359         break;
360       default:
361         SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
362     }
363     edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
364   }
365 }
366 
TypeAsString() const367 const char* HeapEntry::TypeAsString() const {
368   switch (type()) {
369     case kHidden: return "/hidden/";
370     case kObject: return "/object/";
371     case kClosure: return "/closure/";
372     case kString: return "/string/";
373     case kCode: return "/code/";
374     case kArray: return "/array/";
375     case kRegExp: return "/regexp/";
376     case kHeapNumber: return "/number/";
377     case kNative: return "/native/";
378     case kSynthetic: return "/synthetic/";
379     case kConsString: return "/concatenated string/";
380     case kSlicedString: return "/sliced string/";
381     case kSymbol: return "/symbol/";
382     case kBigInt:
383       return "/bigint/";
384     default: return "???";
385   }
386 }
387 
HeapSnapshot(HeapProfiler * profiler,bool global_objects_as_roots,bool capture_numeric_value)388 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler, bool global_objects_as_roots,
389                            bool capture_numeric_value)
390     : profiler_(profiler),
391       treat_global_objects_as_roots_(global_objects_as_roots),
392       capture_numeric_value_(capture_numeric_value) {
393   // It is very important to keep objects that form a heap snapshot
394   // as small as possible. Check assumptions about data structure sizes.
395   STATIC_ASSERT(kSystemPointerSize != 4 || sizeof(HeapGraphEdge) == 12);
396   STATIC_ASSERT(kSystemPointerSize != 8 || sizeof(HeapGraphEdge) == 24);
397   STATIC_ASSERT(kSystemPointerSize != 4 || sizeof(HeapEntry) == 32);
398 #if V8_CC_MSVC
399   STATIC_ASSERT(kSystemPointerSize != 8 || sizeof(HeapEntry) == 48);
400 #else   // !V8_CC_MSVC
401   STATIC_ASSERT(kSystemPointerSize != 8 || sizeof(HeapEntry) == 40);
402 #endif  // !V8_CC_MSVC
403   memset(&gc_subroot_entries_, 0, sizeof(gc_subroot_entries_));
404 }
405 
Delete()406 void HeapSnapshot::Delete() {
407   profiler_->RemoveSnapshot(this);
408 }
409 
RememberLastJSObjectId()410 void HeapSnapshot::RememberLastJSObjectId() {
411   max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
412 }
413 
AddSyntheticRootEntries()414 void HeapSnapshot::AddSyntheticRootEntries() {
415   AddRootEntry();
416   AddGcRootsEntry();
417   SnapshotObjectId id = HeapObjectsMap::kGcRootsFirstSubrootId;
418   for (int root = 0; root < static_cast<int>(Root::kNumberOfRoots); root++) {
419     AddGcSubrootEntry(static_cast<Root>(root), id);
420     id += HeapObjectsMap::kObjectIdStep;
421   }
422   DCHECK_EQ(HeapObjectsMap::kFirstAvailableObjectId, id);
423 }
424 
AddRootEntry()425 void HeapSnapshot::AddRootEntry() {
426   DCHECK_NULL(root_entry_);
427   DCHECK(entries_.empty());  // Root entry must be the first one.
428   root_entry_ = AddEntry(HeapEntry::kSynthetic, "",
429                          HeapObjectsMap::kInternalRootObjectId, 0, 0);
430   DCHECK_EQ(1u, entries_.size());
431   DCHECK_EQ(root_entry_, &entries_.front());
432 }
433 
AddGcRootsEntry()434 void HeapSnapshot::AddGcRootsEntry() {
435   DCHECK_NULL(gc_roots_entry_);
436   gc_roots_entry_ = AddEntry(HeapEntry::kSynthetic, "(GC roots)",
437                              HeapObjectsMap::kGcRootsObjectId, 0, 0);
438 }
439 
AddGcSubrootEntry(Root root,SnapshotObjectId id)440 void HeapSnapshot::AddGcSubrootEntry(Root root, SnapshotObjectId id) {
441   DCHECK_NULL(gc_subroot_entries_[static_cast<int>(root)]);
442   gc_subroot_entries_[static_cast<int>(root)] =
443       AddEntry(HeapEntry::kSynthetic, RootVisitor::RootName(root), id, 0, 0);
444 }
445 
AddLocation(HeapEntry * entry,int scriptId,int line,int col)446 void HeapSnapshot::AddLocation(HeapEntry* entry, int scriptId, int line,
447                                int col) {
448   locations_.emplace_back(entry->index(), scriptId, line, col);
449 }
450 
AddEntry(HeapEntry::Type type,const char * name,SnapshotObjectId id,size_t size,unsigned trace_node_id)451 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
452                                   const char* name,
453                                   SnapshotObjectId id,
454                                   size_t size,
455                                   unsigned trace_node_id) {
456   DCHECK(!is_complete());
457   entries_.emplace_back(this, static_cast<int>(entries_.size()), type, name, id,
458                         size, trace_node_id);
459   return &entries_.back();
460 }
461 
FillChildren()462 void HeapSnapshot::FillChildren() {
463   DCHECK(children().empty());
464   int children_index = 0;
465   for (HeapEntry& entry : entries()) {
466     children_index = entry.set_children_index(children_index);
467   }
468   DCHECK_EQ(edges().size(), static_cast<size_t>(children_index));
469   children().resize(edges().size());
470   for (HeapGraphEdge& edge : edges()) {
471     edge.from()->add_child(&edge);
472   }
473 }
474 
GetEntryById(SnapshotObjectId id)475 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
476   if (entries_by_id_cache_.empty()) {
477     CHECK(is_complete());
478     entries_by_id_cache_.reserve(entries_.size());
479     for (HeapEntry& entry : entries_) {
480       entries_by_id_cache_.emplace(entry.id(), &entry);
481     }
482   }
483   auto it = entries_by_id_cache_.find(id);
484   return it != entries_by_id_cache_.end() ? it->second : nullptr;
485 }
486 
Print(int max_depth)487 void HeapSnapshot::Print(int max_depth) {
488   root()->Print("", "", max_depth, 0);
489 }
490 
491 // We split IDs on evens for embedder objects (see
492 // HeapObjectsMap::GenerateId) and odds for native objects.
493 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
494 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
495     HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
496 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
497     HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
498 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
499     HeapObjectsMap::kGcRootsFirstSubrootId +
500     static_cast<int>(Root::kNumberOfRoots) * HeapObjectsMap::kObjectIdStep;
501 
HeapObjectsMap(Heap * heap)502 HeapObjectsMap::HeapObjectsMap(Heap* heap)
503     : next_id_(kFirstAvailableObjectId), heap_(heap) {
504   // The dummy element at zero index is needed as entries_map_ cannot hold
505   // an entry with zero value. Otherwise it's impossible to tell if
506   // LookupOrInsert has added a new item or just returning exisiting one
507   // having the value of zero.
508   entries_.emplace_back(0, kNullAddress, 0, true);
509 }
510 
MoveObject(Address from,Address to,int object_size)511 bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
512   DCHECK_NE(kNullAddress, to);
513   DCHECK_NE(kNullAddress, from);
514   if (from == to) return false;
515   void* from_value = entries_map_.Remove(reinterpret_cast<void*>(from),
516                                          ComputeAddressHash(from));
517   if (from_value == nullptr) {
518     // It may occur that some untracked object moves to an address X and there
519     // is a tracked object at that address. In this case we should remove the
520     // entry as we know that the object has died.
521     void* to_value = entries_map_.Remove(reinterpret_cast<void*>(to),
522                                          ComputeAddressHash(to));
523     if (to_value != nullptr) {
524       int to_entry_info_index =
525           static_cast<int>(reinterpret_cast<intptr_t>(to_value));
526       entries_.at(to_entry_info_index).addr = kNullAddress;
527     }
528   } else {
529     base::HashMap::Entry* to_entry = entries_map_.LookupOrInsert(
530         reinterpret_cast<void*>(to), ComputeAddressHash(to));
531     if (to_entry->value != nullptr) {
532       // We found the existing entry with to address for an old object.
533       // Without this operation we will have two EntryInfo's with the same
534       // value in addr field. It is bad because later at RemoveDeadEntries
535       // one of this entry will be removed with the corresponding entries_map_
536       // entry.
537       int to_entry_info_index =
538           static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
539       entries_.at(to_entry_info_index).addr = kNullAddress;
540     }
541     int from_entry_info_index =
542         static_cast<int>(reinterpret_cast<intptr_t>(from_value));
543     entries_.at(from_entry_info_index).addr = to;
544     // Size of an object can change during its life, so to keep information
545     // about the object in entries_ consistent, we have to adjust size when the
546     // object is migrated.
547     if (FLAG_heap_profiler_trace_objects) {
548       PrintF("Move object from %p to %p old size %6d new size %6d\n",
549              reinterpret_cast<void*>(from), reinterpret_cast<void*>(to),
550              entries_.at(from_entry_info_index).size, object_size);
551     }
552     entries_.at(from_entry_info_index).size = object_size;
553     to_entry->value = from_value;
554   }
555   return from_value != nullptr;
556 }
557 
558 
UpdateObjectSize(Address addr,int size)559 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
560   FindOrAddEntry(addr, size, false);
561 }
562 
563 
FindEntry(Address addr)564 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
565   base::HashMap::Entry* entry = entries_map_.Lookup(
566       reinterpret_cast<void*>(addr), ComputeAddressHash(addr));
567   if (entry == nullptr) return v8::HeapProfiler::kUnknownObjectId;
568   int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
569   EntryInfo& entry_info = entries_.at(entry_index);
570   DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy());
571   return entry_info.id;
572 }
573 
574 
FindOrAddEntry(Address addr,unsigned int size,bool accessed)575 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
576                                                 unsigned int size,
577                                                 bool accessed) {
578   DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy());
579   base::HashMap::Entry* entry = entries_map_.LookupOrInsert(
580       reinterpret_cast<void*>(addr), ComputeAddressHash(addr));
581   if (entry->value != nullptr) {
582     int entry_index =
583         static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
584     EntryInfo& entry_info = entries_.at(entry_index);
585     entry_info.accessed = accessed;
586     if (FLAG_heap_profiler_trace_objects) {
587       PrintF("Update object size : %p with old size %d and new size %d\n",
588              reinterpret_cast<void*>(addr), entry_info.size, size);
589     }
590     entry_info.size = size;
591     return entry_info.id;
592   }
593   entry->value = reinterpret_cast<void*>(entries_.size());
594   SnapshotObjectId id = get_next_id();
595   entries_.push_back(EntryInfo(id, addr, size, accessed));
596   DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy());
597   return id;
598 }
599 
FindMergedNativeEntry(NativeObject addr)600 SnapshotObjectId HeapObjectsMap::FindMergedNativeEntry(NativeObject addr) {
601   auto it = merged_native_entries_map_.find(addr);
602   if (it == merged_native_entries_map_.end())
603     return v8::HeapProfiler::kUnknownObjectId;
604   return entries_[it->second].id;
605 }
606 
AddMergedNativeEntry(NativeObject addr,Address canonical_addr)607 void HeapObjectsMap::AddMergedNativeEntry(NativeObject addr,
608                                           Address canonical_addr) {
609   base::HashMap::Entry* entry =
610       entries_map_.Lookup(reinterpret_cast<void*>(canonical_addr),
611                           ComputeAddressHash(canonical_addr));
612   auto result = merged_native_entries_map_.insert(
613       {addr, reinterpret_cast<size_t>(entry->value)});
614   if (!result.second) {
615     result.first->second = reinterpret_cast<size_t>(entry->value);
616   }
617 }
618 
StopHeapObjectsTracking()619 void HeapObjectsMap::StopHeapObjectsTracking() { time_intervals_.clear(); }
620 
UpdateHeapObjectsMap()621 void HeapObjectsMap::UpdateHeapObjectsMap() {
622   if (FLAG_heap_profiler_trace_objects) {
623     PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
624            entries_map_.occupancy());
625   }
626   heap_->PreciseCollectAllGarbage(Heap::kNoGCFlags,
627                                   GarbageCollectionReason::kHeapProfiler);
628   PtrComprCageBase cage_base(heap_->isolate());
629   CombinedHeapObjectIterator iterator(heap_);
630   for (HeapObject obj = iterator.Next(); !obj.is_null();
631        obj = iterator.Next()) {
632     int object_size = obj.Size(cage_base);
633     FindOrAddEntry(obj.address(), object_size);
634     if (FLAG_heap_profiler_trace_objects) {
635       PrintF("Update object      : %p %6d. Next address is %p\n",
636              reinterpret_cast<void*>(obj.address()), object_size,
637              reinterpret_cast<void*>(obj.address() + object_size));
638     }
639   }
640   RemoveDeadEntries();
641   if (FLAG_heap_profiler_trace_objects) {
642     PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
643            entries_map_.occupancy());
644   }
645 }
646 
PushHeapObjectsStats(OutputStream * stream,int64_t * timestamp_us)647 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream,
648                                                       int64_t* timestamp_us) {
649   UpdateHeapObjectsMap();
650   time_intervals_.emplace_back(next_id_);
651   int prefered_chunk_size = stream->GetChunkSize();
652   std::vector<v8::HeapStatsUpdate> stats_buffer;
653   DCHECK(!entries_.empty());
654   EntryInfo* entry_info = &entries_.front();
655   EntryInfo* end_entry_info = &entries_.back() + 1;
656   for (size_t time_interval_index = 0;
657        time_interval_index < time_intervals_.size(); ++time_interval_index) {
658     TimeInterval& time_interval = time_intervals_[time_interval_index];
659     SnapshotObjectId time_interval_id = time_interval.id;
660     uint32_t entries_size = 0;
661     EntryInfo* start_entry_info = entry_info;
662     while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
663       entries_size += entry_info->size;
664       ++entry_info;
665     }
666     uint32_t entries_count =
667         static_cast<uint32_t>(entry_info - start_entry_info);
668     if (time_interval.count != entries_count ||
669         time_interval.size != entries_size) {
670       stats_buffer.emplace_back(static_cast<uint32_t>(time_interval_index),
671                                 time_interval.count = entries_count,
672                                 time_interval.size = entries_size);
673       if (static_cast<int>(stats_buffer.size()) >= prefered_chunk_size) {
674         OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
675             &stats_buffer.front(), static_cast<int>(stats_buffer.size()));
676         if (result == OutputStream::kAbort) return last_assigned_id();
677         stats_buffer.clear();
678       }
679     }
680   }
681   DCHECK(entry_info == end_entry_info);
682   if (!stats_buffer.empty()) {
683     OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
684         &stats_buffer.front(), static_cast<int>(stats_buffer.size()));
685     if (result == OutputStream::kAbort) return last_assigned_id();
686   }
687   stream->EndOfStream();
688   if (timestamp_us) {
689     *timestamp_us =
690         (time_intervals_.back().timestamp - time_intervals_.front().timestamp)
691             .InMicroseconds();
692   }
693   return last_assigned_id();
694 }
695 
696 
RemoveDeadEntries()697 void HeapObjectsMap::RemoveDeadEntries() {
698   DCHECK(entries_.size() > 0 && entries_.at(0).id == 0 &&
699          entries_.at(0).addr == kNullAddress);
700 
701   // Build up temporary reverse map.
702   std::unordered_map<size_t, NativeObject> reverse_merged_native_entries_map;
703   for (const auto& it : merged_native_entries_map_) {
704     auto result =
705         reverse_merged_native_entries_map.emplace(it.second, it.first);
706     DCHECK(result.second);
707     USE(result);
708   }
709 
710   size_t first_free_entry = 1;
711   for (size_t i = 1; i < entries_.size(); ++i) {
712     EntryInfo& entry_info = entries_.at(i);
713     auto merged_reverse_it = reverse_merged_native_entries_map.find(i);
714     if (entry_info.accessed) {
715       if (first_free_entry != i) {
716         entries_.at(first_free_entry) = entry_info;
717       }
718       entries_.at(first_free_entry).accessed = false;
719       base::HashMap::Entry* entry =
720           entries_map_.Lookup(reinterpret_cast<void*>(entry_info.addr),
721                               ComputeAddressHash(entry_info.addr));
722       DCHECK(entry);
723       entry->value = reinterpret_cast<void*>(first_free_entry);
724       if (merged_reverse_it != reverse_merged_native_entries_map.end()) {
725         auto it = merged_native_entries_map_.find(merged_reverse_it->second);
726         DCHECK_NE(merged_native_entries_map_.end(), it);
727         it->second = first_free_entry;
728       }
729       ++first_free_entry;
730     } else {
731       if (entry_info.addr) {
732         entries_map_.Remove(reinterpret_cast<void*>(entry_info.addr),
733                             ComputeAddressHash(entry_info.addr));
734         if (merged_reverse_it != reverse_merged_native_entries_map.end()) {
735           merged_native_entries_map_.erase(merged_reverse_it->second);
736         }
737       }
738     }
739   }
740   entries_.erase(entries_.begin() + first_free_entry, entries_.end());
741 
742   DCHECK(static_cast<uint32_t>(entries_.size()) - 1 ==
743          entries_map_.occupancy());
744 }
745 
V8HeapExplorer(HeapSnapshot * snapshot,SnapshottingProgressReportingInterface * progress,v8::HeapProfiler::ObjectNameResolver * resolver)746 V8HeapExplorer::V8HeapExplorer(HeapSnapshot* snapshot,
747                                SnapshottingProgressReportingInterface* progress,
748                                v8::HeapProfiler::ObjectNameResolver* resolver)
749     : heap_(snapshot->profiler()->heap_object_map()->heap()),
750       snapshot_(snapshot),
751       names_(snapshot_->profiler()->names()),
752       heap_object_map_(snapshot_->profiler()->heap_object_map()),
753       progress_(progress),
754       generator_(nullptr),
755       global_object_name_resolver_(resolver) {}
756 
AllocateEntry(HeapThing ptr)757 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
758   return AddEntry(HeapObject::cast(Object(reinterpret_cast<Address>(ptr))));
759 }
760 
AllocateEntry(Smi smi)761 HeapEntry* V8HeapExplorer::AllocateEntry(Smi smi) {
762   SnapshotObjectId id = heap_object_map_->get_next_id();
763   HeapEntry* entry =
764       snapshot_->AddEntry(HeapEntry::kHeapNumber, "smi number", id, 0, 0);
765   // XXX: Smis do not appear in CombinedHeapObjectIterator, so we need to
766   // extract the references here
767   ExtractNumberReference(entry, smi);
768   return entry;
769 }
770 
ExtractLocation(HeapEntry * entry,HeapObject object)771 void V8HeapExplorer::ExtractLocation(HeapEntry* entry, HeapObject object) {
772   if (object.IsJSFunction()) {
773     JSFunction func = JSFunction::cast(object);
774     ExtractLocationForJSFunction(entry, func);
775 
776   } else if (object.IsJSGeneratorObject()) {
777     JSGeneratorObject gen = JSGeneratorObject::cast(object);
778     ExtractLocationForJSFunction(entry, gen.function());
779 
780   } else if (object.IsJSObject()) {
781     JSObject obj = JSObject::cast(object);
782     JSFunction maybe_constructor = GetConstructor(heap_->isolate(), obj);
783 
784     if (!maybe_constructor.is_null()) {
785       ExtractLocationForJSFunction(entry, maybe_constructor);
786     }
787   }
788 }
789 
ExtractLocationForJSFunction(HeapEntry * entry,JSFunction func)790 void V8HeapExplorer::ExtractLocationForJSFunction(HeapEntry* entry,
791                                                   JSFunction func) {
792   if (!func.shared().script().IsScript()) return;
793   Script script = Script::cast(func.shared().script());
794   int scriptId = script.id();
795   int start = func.shared().StartPosition();
796   Script::PositionInfo info;
797   script.GetPositionInfo(start, &info, Script::WITH_OFFSET);
798   snapshot_->AddLocation(entry, scriptId, info.line, info.column);
799 }
800 
AddEntry(HeapObject object)801 HeapEntry* V8HeapExplorer::AddEntry(HeapObject object) {
802   if (object.IsJSFunction()) {
803     JSFunction func = JSFunction::cast(object);
804     SharedFunctionInfo shared = func.shared();
805     const char* name = names_->GetName(shared.Name());
806     return AddEntry(object, HeapEntry::kClosure, name);
807   } else if (object.IsJSBoundFunction()) {
808     return AddEntry(object, HeapEntry::kClosure, "native_bind");
809   } else if (object.IsJSRegExp()) {
810     JSRegExp re = JSRegExp::cast(object);
811     return AddEntry(object, HeapEntry::kRegExp, names_->GetName(re.source()));
812   } else if (object.IsJSObject()) {
813     // TODO(v8:12674) Fix and run full gcmole.
814     DisableGCMole no_gcmole;
815     const char* name = names_->GetName(
816         GetConstructorName(heap_->isolate(), JSObject::cast(object)));
817     if (object.IsJSGlobalObject()) {
818       auto it = global_object_tag_map_.find(JSGlobalObject::cast(object));
819       if (it != global_object_tag_map_.end()) {
820         name = names_->GetFormatted("%s / %s", name, it->second);
821       }
822     }
823     return AddEntry(object, HeapEntry::kObject, name);
824   } else if (object.IsString()) {
825     String string = String::cast(object);
826     if (string.IsConsString()) {
827       return AddEntry(object, HeapEntry::kConsString, "(concatenated string)");
828     } else if (string.IsSlicedString()) {
829       return AddEntry(object, HeapEntry::kSlicedString, "(sliced string)");
830     } else {
831       return AddEntry(object, HeapEntry::kString,
832                       names_->GetName(String::cast(object)));
833     }
834   } else if (object.IsSymbol()) {
835     if (Symbol::cast(object).is_private())
836       return AddEntry(object, HeapEntry::kHidden, "private symbol");
837     else
838       return AddEntry(object, HeapEntry::kSymbol, "symbol");
839   } else if (object.IsBigInt()) {
840     return AddEntry(object, HeapEntry::kBigInt, "bigint");
841   } else if (object.IsCode()) {
842     return AddEntry(object, HeapEntry::kCode, "");
843   } else if (object.IsSharedFunctionInfo()) {
844     String name = SharedFunctionInfo::cast(object).Name();
845     return AddEntry(object, HeapEntry::kCode, names_->GetName(name));
846   } else if (object.IsScript()) {
847     Object name = Script::cast(object).name();
848     return AddEntry(object, HeapEntry::kCode,
849                     name.IsString() ? names_->GetName(String::cast(name)) : "");
850   } else if (object.IsNativeContext()) {
851     return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
852   } else if (object.IsContext()) {
853     return AddEntry(object, HeapEntry::kObject, "system / Context");
854   } else if (object.IsHeapNumber()) {
855     return AddEntry(object, HeapEntry::kHeapNumber, "heap number");
856   }
857   return AddEntry(object, GetSystemEntryType(object),
858                   GetSystemEntryName(object));
859 }
860 
AddEntry(HeapObject object,HeapEntry::Type type,const char * name)861 HeapEntry* V8HeapExplorer::AddEntry(HeapObject object, HeapEntry::Type type,
862                                     const char* name) {
863   if (FLAG_heap_profiler_show_hidden_objects && type == HeapEntry::kHidden) {
864     type = HeapEntry::kNative;
865   }
866   PtrComprCageBase cage_base(isolate());
867   return AddEntry(object.address(), type, name, object.Size(cage_base));
868 }
869 
AddEntry(Address address,HeapEntry::Type type,const char * name,size_t size)870 HeapEntry* V8HeapExplorer::AddEntry(Address address,
871                                     HeapEntry::Type type,
872                                     const char* name,
873                                     size_t size) {
874   SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry(
875       address, static_cast<unsigned int>(size));
876   unsigned trace_node_id = 0;
877   if (AllocationTracker* allocation_tracker =
878       snapshot_->profiler()->allocation_tracker()) {
879     trace_node_id =
880         allocation_tracker->address_to_trace()->GetTraceNodeId(address);
881   }
882   return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
883 }
884 
GetSystemEntryName(HeapObject object)885 const char* V8HeapExplorer::GetSystemEntryName(HeapObject object) {
886   if (object.IsMap()) {
887     switch (Map::cast(object).instance_type()) {
888 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
889         case instance_type: return "system / Map (" #Name ")";
890       STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
891 #undef MAKE_STRING_MAP_CASE
892         default: return "system / Map";
893     }
894   }
895 
896   InstanceType type = object.map().instance_type();
897 
898   // Empty string names are special: TagObject can overwrite them, and devtools
899   // will report them as "(internal array)".
900   if (InstanceTypeChecker::IsFixedArray(type) ||
901       InstanceTypeChecker::IsFixedDoubleArray(type) ||
902       InstanceTypeChecker::IsByteArray(type)) {
903     return "";
904   }
905 
906   switch (type) {
907 #define MAKE_TORQUE_CASE(Name, TYPE) \
908   case TYPE:                         \
909     return "system / " #Name;
910     // The following lists include every non-String instance type.
911     // This includes a few types that already have non-"system" names assigned
912     // by AddEntry, but this is a convenient way to avoid manual upkeep here.
913     TORQUE_INSTANCE_CHECKERS_SINGLE_FULLY_DEFINED(MAKE_TORQUE_CASE)
914     TORQUE_INSTANCE_CHECKERS_MULTIPLE_FULLY_DEFINED(MAKE_TORQUE_CASE)
915     TORQUE_INSTANCE_CHECKERS_SINGLE_ONLY_DECLARED(MAKE_TORQUE_CASE)
916     TORQUE_INSTANCE_CHECKERS_MULTIPLE_ONLY_DECLARED(MAKE_TORQUE_CASE)
917 #undef MAKE_TORQUE_CASE
918 
919     // Strings were already handled by AddEntry.
920 #define MAKE_STRING_CASE(instance_type, size, name, Name) \
921   case instance_type:                                     \
922     UNREACHABLE();
923     STRING_TYPE_LIST(MAKE_STRING_CASE)
924 #undef MAKE_STRING_CASE
925   }
926 }
927 
GetSystemEntryType(HeapObject object)928 HeapEntry::Type V8HeapExplorer::GetSystemEntryType(HeapObject object) {
929   InstanceType type = object.map().instance_type();
930   if (InstanceTypeChecker::IsAllocationSite(type) ||
931       InstanceTypeChecker::IsArrayBoilerplateDescription(type) ||
932       InstanceTypeChecker::IsBytecodeArray(type) ||
933       InstanceTypeChecker::IsClosureFeedbackCellArray(type) ||
934       InstanceTypeChecker::IsCodeDataContainer(type) ||
935       InstanceTypeChecker::IsFeedbackCell(type) ||
936       InstanceTypeChecker::IsFeedbackMetadata(type) ||
937       InstanceTypeChecker::IsFeedbackVector(type) ||
938       InstanceTypeChecker::IsInterpreterData(type) ||
939       InstanceTypeChecker::IsLoadHandler(type) ||
940       InstanceTypeChecker::IsObjectBoilerplateDescription(type) ||
941       InstanceTypeChecker::IsPreparseData(type) ||
942       InstanceTypeChecker::IsRegExpBoilerplateDescription(type) ||
943       InstanceTypeChecker::IsScopeInfo(type) ||
944       InstanceTypeChecker::IsStoreHandler(type) ||
945       InstanceTypeChecker::IsTemplateObjectDescription(type) ||
946       InstanceTypeChecker::IsTurbofanType(type) ||
947       InstanceTypeChecker::IsUncompiledData(type)) {
948     return HeapEntry::kCode;
949   }
950 
951   // This check must come second, because some subtypes of FixedArray are
952   // determined above to represent code content.
953   if (InstanceTypeChecker::IsFixedArray(type) ||
954       InstanceTypeChecker::IsFixedDoubleArray(type) ||
955       InstanceTypeChecker::IsByteArray(type)) {
956     return HeapEntry::kArray;
957   }
958 
959   return HeapEntry::kHidden;
960 }
961 
EstimateObjectsCount()962 uint32_t V8HeapExplorer::EstimateObjectsCount() {
963   CombinedHeapObjectIterator it(heap_, HeapObjectIterator::kFilterUnreachable);
964   uint32_t objects_count = 0;
965   // Avoid overflowing the objects count. In worst case, we will show the same
966   // progress for a longer period of time, but we do not expect to have that
967   // many objects.
968   while (!it.Next().is_null() &&
969          objects_count != std::numeric_limits<uint32_t>::max())
970     ++objects_count;
971   return objects_count;
972 }
973 
974 class IndexedReferencesExtractor : public ObjectVisitorWithCageBases {
975  public:
IndexedReferencesExtractor(V8HeapExplorer * generator,HeapObject parent_obj,HeapEntry * parent)976   IndexedReferencesExtractor(V8HeapExplorer* generator, HeapObject parent_obj,
977                              HeapEntry* parent)
978       : ObjectVisitorWithCageBases(generator->isolate()),
979         generator_(generator),
980         parent_obj_(parent_obj),
981         parent_start_(parent_obj_.RawMaybeWeakField(0)),
982         parent_end_(
983             parent_obj_.RawMaybeWeakField(parent_obj_.Size(cage_base()))),
984         parent_(parent),
985         next_index_(0) {}
VisitPointers(HeapObject host,ObjectSlot start,ObjectSlot end)986   void VisitPointers(HeapObject host, ObjectSlot start,
987                      ObjectSlot end) override {
988     VisitPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end));
989   }
VisitMapPointer(HeapObject object)990   void VisitMapPointer(HeapObject object) override {
991     VisitSlotImpl(cage_base(), object.map_slot());
992   }
VisitPointers(HeapObject host,MaybeObjectSlot start,MaybeObjectSlot end)993   void VisitPointers(HeapObject host, MaybeObjectSlot start,
994                      MaybeObjectSlot end) override {
995     // [start,end) must be a sub-region of [parent_start_, parent_end), i.e.
996     // all the slots must point inside the object.
997     CHECK_LE(parent_start_, start);
998     CHECK_LE(end, parent_end_);
999     for (MaybeObjectSlot slot = start; slot < end; ++slot) {
1000       VisitSlotImpl(cage_base(), slot);
1001     }
1002   }
1003 
VisitCodePointer(HeapObject host,CodeObjectSlot slot)1004   void VisitCodePointer(HeapObject host, CodeObjectSlot slot) override {
1005     CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
1006     VisitSlotImpl(code_cage_base(), slot);
1007   }
1008 
VisitCodeTarget(Code host,RelocInfo * rinfo)1009   void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
1010     Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
1011     VisitHeapObjectImpl(target, -1);
1012   }
1013 
VisitEmbeddedPointer(Code host,RelocInfo * rinfo)1014   void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
1015     HeapObject object = rinfo->target_object(cage_base());
1016     if (host.IsWeakObject(object)) {
1017       generator_->SetWeakReference(parent_, next_index_++, object, {});
1018     } else {
1019       VisitHeapObjectImpl(object, -1);
1020     }
1021   }
1022 
1023  private:
1024   template <typename TSlot>
VisitSlotImpl(PtrComprCageBase cage_base,TSlot slot)1025   V8_INLINE void VisitSlotImpl(PtrComprCageBase cage_base, TSlot slot) {
1026     int field_index =
1027         static_cast<int>(MaybeObjectSlot(slot.address()) - parent_start_);
1028     if (generator_->visited_fields_[field_index]) {
1029       generator_->visited_fields_[field_index] = false;
1030     } else {
1031       HeapObject heap_object;
1032       auto loaded_value = slot.load(cage_base);
1033       if (loaded_value.GetHeapObjectIfStrong(&heap_object)) {
1034         VisitHeapObjectImpl(heap_object, field_index);
1035       } else if (loaded_value.GetHeapObjectIfWeak(&heap_object)) {
1036         generator_->SetWeakReference(parent_, next_index_++, heap_object, {});
1037       }
1038     }
1039   }
1040 
VisitHeapObjectImpl(HeapObject heap_object,int field_index)1041   V8_INLINE void VisitHeapObjectImpl(HeapObject heap_object, int field_index) {
1042     DCHECK_LE(-1, field_index);
1043     // The last parameter {field_offset} is only used to check some well-known
1044     // skipped references, so passing -1 * kTaggedSize for objects embedded
1045     // into code is fine.
1046     generator_->SetHiddenReference(parent_obj_, parent_, next_index_++,
1047                                    heap_object, field_index * kTaggedSize);
1048   }
1049 
1050   V8HeapExplorer* generator_;
1051   HeapObject parent_obj_;
1052   MaybeObjectSlot parent_start_;
1053   MaybeObjectSlot parent_end_;
1054   HeapEntry* parent_;
1055   int next_index_;
1056 };
1057 
ExtractReferences(HeapEntry * entry,HeapObject obj)1058 void V8HeapExplorer::ExtractReferences(HeapEntry* entry, HeapObject obj) {
1059   if (obj.IsJSGlobalProxy()) {
1060     ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1061   } else if (obj.IsJSArrayBuffer()) {
1062     ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
1063   } else if (obj.IsJSObject()) {
1064     if (obj.IsJSWeakSet()) {
1065       ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj));
1066     } else if (obj.IsJSWeakMap()) {
1067       ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj));
1068     } else if (obj.IsJSSet()) {
1069       ExtractJSCollectionReferences(entry, JSSet::cast(obj));
1070     } else if (obj.IsJSMap()) {
1071       ExtractJSCollectionReferences(entry, JSMap::cast(obj));
1072     } else if (obj.IsJSPromise()) {
1073       ExtractJSPromiseReferences(entry, JSPromise::cast(obj));
1074     } else if (obj.IsJSGeneratorObject()) {
1075       ExtractJSGeneratorObjectReferences(entry, JSGeneratorObject::cast(obj));
1076     } else if (obj.IsJSWeakRef()) {
1077       ExtractJSWeakRefReferences(entry, JSWeakRef::cast(obj));
1078     }
1079     ExtractJSObjectReferences(entry, JSObject::cast(obj));
1080   } else if (obj.IsString()) {
1081     ExtractStringReferences(entry, String::cast(obj));
1082   } else if (obj.IsSymbol()) {
1083     ExtractSymbolReferences(entry, Symbol::cast(obj));
1084   } else if (obj.IsMap()) {
1085     ExtractMapReferences(entry, Map::cast(obj));
1086   } else if (obj.IsSharedFunctionInfo()) {
1087     ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1088   } else if (obj.IsScript()) {
1089     ExtractScriptReferences(entry, Script::cast(obj));
1090   } else if (obj.IsAccessorInfo()) {
1091     ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj));
1092   } else if (obj.IsAccessorPair()) {
1093     ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1094   } else if (obj.IsCode()) {
1095     ExtractCodeReferences(entry, Code::cast(obj));
1096   } else if (obj.IsCell()) {
1097     ExtractCellReferences(entry, Cell::cast(obj));
1098   } else if (obj.IsFeedbackCell()) {
1099     ExtractFeedbackCellReferences(entry, FeedbackCell::cast(obj));
1100   } else if (obj.IsPropertyCell()) {
1101     ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1102   } else if (obj.IsAllocationSite()) {
1103     ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1104   } else if (obj.IsArrayBoilerplateDescription()) {
1105     ExtractArrayBoilerplateDescriptionReferences(
1106         entry, ArrayBoilerplateDescription::cast(obj));
1107   } else if (obj.IsRegExpBoilerplateDescription()) {
1108     ExtractRegExpBoilerplateDescriptionReferences(
1109         entry, RegExpBoilerplateDescription::cast(obj));
1110   } else if (obj.IsFeedbackVector()) {
1111     ExtractFeedbackVectorReferences(entry, FeedbackVector::cast(obj));
1112   } else if (obj.IsDescriptorArray()) {
1113     ExtractDescriptorArrayReferences(entry, DescriptorArray::cast(obj));
1114   } else if (obj.IsWeakFixedArray()) {
1115     ExtractWeakArrayReferences(WeakFixedArray::kHeaderSize, entry,
1116                                WeakFixedArray::cast(obj));
1117   } else if (obj.IsWeakArrayList()) {
1118     ExtractWeakArrayReferences(WeakArrayList::kHeaderSize, entry,
1119                                WeakArrayList::cast(obj));
1120   } else if (obj.IsContext()) {
1121     ExtractContextReferences(entry, Context::cast(obj));
1122   } else if (obj.IsEphemeronHashTable()) {
1123     ExtractEphemeronHashTableReferences(entry, EphemeronHashTable::cast(obj));
1124   } else if (obj.IsFixedArray()) {
1125     ExtractFixedArrayReferences(entry, FixedArray::cast(obj));
1126   } else if (obj.IsWeakCell()) {
1127     ExtractWeakCellReferences(entry, WeakCell::cast(obj));
1128   } else if (obj.IsHeapNumber()) {
1129     if (snapshot_->capture_numeric_value()) {
1130       ExtractNumberReference(entry, obj);
1131     }
1132   } else if (obj.IsBytecodeArray()) {
1133     ExtractBytecodeArrayReferences(entry, BytecodeArray::cast(obj));
1134   } else if (obj.IsScopeInfo()) {
1135     ExtractScopeInfoReferences(entry, ScopeInfo::cast(obj));
1136   }
1137 }
1138 
ExtractJSGlobalProxyReferences(HeapEntry * entry,JSGlobalProxy proxy)1139 void V8HeapExplorer::ExtractJSGlobalProxyReferences(HeapEntry* entry,
1140                                                     JSGlobalProxy proxy) {
1141   SetInternalReference(entry, "native_context", proxy.native_context(),
1142                        JSGlobalProxy::kNativeContextOffset);
1143 }
1144 
ExtractJSObjectReferences(HeapEntry * entry,JSObject js_obj)1145 void V8HeapExplorer::ExtractJSObjectReferences(HeapEntry* entry,
1146                                                JSObject js_obj) {
1147   HeapObject obj = js_obj;
1148   ExtractPropertyReferences(js_obj, entry);
1149   ExtractElementReferences(js_obj, entry);
1150   ExtractInternalReferences(js_obj, entry);
1151   Isolate* isolate = Isolate::FromHeap(heap_);
1152   PrototypeIterator iter(isolate, js_obj);
1153   ReadOnlyRoots roots(isolate);
1154   SetPropertyReference(entry, roots.proto_string(), iter.GetCurrent());
1155   if (obj.IsJSBoundFunction()) {
1156     JSBoundFunction js_fun = JSBoundFunction::cast(obj);
1157     TagObject(js_fun.bound_arguments(), "(bound arguments)");
1158     SetInternalReference(entry, "bindings", js_fun.bound_arguments(),
1159                          JSBoundFunction::kBoundArgumentsOffset);
1160     SetInternalReference(entry, "bound_this", js_fun.bound_this(),
1161                          JSBoundFunction::kBoundThisOffset);
1162     SetInternalReference(entry, "bound_function",
1163                          js_fun.bound_target_function(),
1164                          JSBoundFunction::kBoundTargetFunctionOffset);
1165     FixedArray bindings = js_fun.bound_arguments();
1166     for (int i = 0; i < bindings.length(); i++) {
1167       const char* reference_name = names_->GetFormatted("bound_argument_%d", i);
1168       SetNativeBindReference(entry, reference_name, bindings.get(i));
1169     }
1170   } else if (obj.IsJSFunction()) {
1171     JSFunction js_fun = JSFunction::cast(js_obj);
1172     if (js_fun.has_prototype_slot()) {
1173       Object proto_or_map = js_fun.prototype_or_initial_map(kAcquireLoad);
1174       if (!proto_or_map.IsTheHole(isolate)) {
1175         if (!proto_or_map.IsMap()) {
1176           SetPropertyReference(entry, roots.prototype_string(), proto_or_map,
1177                                nullptr,
1178                                JSFunction::kPrototypeOrInitialMapOffset);
1179         } else {
1180           SetPropertyReference(entry, roots.prototype_string(),
1181                                js_fun.prototype());
1182           SetInternalReference(entry, "initial_map", proto_or_map,
1183                                JSFunction::kPrototypeOrInitialMapOffset);
1184         }
1185       }
1186     }
1187     SharedFunctionInfo shared_info = js_fun.shared();
1188     TagObject(js_fun.raw_feedback_cell(), "(function feedback cell)");
1189     SetInternalReference(entry, "feedback_cell", js_fun.raw_feedback_cell(),
1190                          JSFunction::kFeedbackCellOffset);
1191     TagObject(shared_info, "(shared function info)");
1192     SetInternalReference(entry, "shared", shared_info,
1193                          JSFunction::kSharedFunctionInfoOffset);
1194     TagObject(js_fun.context(), "(context)");
1195     SetInternalReference(entry, "context", js_fun.context(),
1196                          JSFunction::kContextOffset);
1197     SetInternalReference(entry, "code", js_fun.code(), JSFunction::kCodeOffset);
1198   } else if (obj.IsJSGlobalObject()) {
1199     JSGlobalObject global_obj = JSGlobalObject::cast(obj);
1200     SetInternalReference(entry, "native_context", global_obj.native_context(),
1201                          JSGlobalObject::kNativeContextOffset);
1202     SetInternalReference(entry, "global_proxy", global_obj.global_proxy(),
1203                          JSGlobalObject::kGlobalProxyOffset);
1204     STATIC_ASSERT(JSGlobalObject::kHeaderSize - JSObject::kHeaderSize ==
1205                   2 * kTaggedSize);
1206   } else if (obj.IsJSArrayBufferView()) {
1207     JSArrayBufferView view = JSArrayBufferView::cast(obj);
1208     SetInternalReference(entry, "buffer", view.buffer(),
1209                          JSArrayBufferView::kBufferOffset);
1210   }
1211 
1212   TagObject(js_obj.raw_properties_or_hash(), "(object properties)");
1213   SetInternalReference(entry, "properties", js_obj.raw_properties_or_hash(),
1214                        JSObject::kPropertiesOrHashOffset);
1215 
1216   TagObject(js_obj.elements(), "(object elements)");
1217   SetInternalReference(entry, "elements", js_obj.elements(),
1218                        JSObject::kElementsOffset);
1219 }
1220 
ExtractStringReferences(HeapEntry * entry,String string)1221 void V8HeapExplorer::ExtractStringReferences(HeapEntry* entry, String string) {
1222   if (string.IsConsString()) {
1223     ConsString cs = ConsString::cast(string);
1224     SetInternalReference(entry, "first", cs.first(), ConsString::kFirstOffset);
1225     SetInternalReference(entry, "second", cs.second(),
1226                          ConsString::kSecondOffset);
1227   } else if (string.IsSlicedString()) {
1228     SlicedString ss = SlicedString::cast(string);
1229     SetInternalReference(entry, "parent", ss.parent(),
1230                          SlicedString::kParentOffset);
1231   } else if (string.IsThinString()) {
1232     ThinString ts = ThinString::cast(string);
1233     SetInternalReference(entry, "actual", ts.actual(),
1234                          ThinString::kActualOffset);
1235   }
1236 }
1237 
ExtractSymbolReferences(HeapEntry * entry,Symbol symbol)1238 void V8HeapExplorer::ExtractSymbolReferences(HeapEntry* entry, Symbol symbol) {
1239   SetInternalReference(entry, "name", symbol.description(),
1240                        Symbol::kDescriptionOffset);
1241 }
1242 
ExtractJSCollectionReferences(HeapEntry * entry,JSCollection collection)1243 void V8HeapExplorer::ExtractJSCollectionReferences(HeapEntry* entry,
1244                                                    JSCollection collection) {
1245   SetInternalReference(entry, "table", collection.table(),
1246                        JSCollection::kTableOffset);
1247 }
1248 
ExtractJSWeakCollectionReferences(HeapEntry * entry,JSWeakCollection obj)1249 void V8HeapExplorer::ExtractJSWeakCollectionReferences(HeapEntry* entry,
1250                                                        JSWeakCollection obj) {
1251   SetInternalReference(entry, "table", obj.table(),
1252                        JSWeakCollection::kTableOffset);
1253 }
1254 
ExtractEphemeronHashTableReferences(HeapEntry * entry,EphemeronHashTable table)1255 void V8HeapExplorer::ExtractEphemeronHashTableReferences(
1256     HeapEntry* entry, EphemeronHashTable table) {
1257   for (InternalIndex i : table.IterateEntries()) {
1258     int key_index = EphemeronHashTable::EntryToIndex(i) +
1259                     EphemeronHashTable::kEntryKeyIndex;
1260     int value_index = EphemeronHashTable::EntryToValueIndex(i);
1261     Object key = table.get(key_index);
1262     Object value = table.get(value_index);
1263     SetWeakReference(entry, key_index, key, table.OffsetOfElementAt(key_index));
1264     SetWeakReference(entry, value_index, value,
1265                      table.OffsetOfElementAt(value_index));
1266     HeapEntry* key_entry = GetEntry(key);
1267     HeapEntry* value_entry = GetEntry(value);
1268     HeapEntry* table_entry = GetEntry(table);
1269     if (key_entry && value_entry && !key.IsUndefined()) {
1270       const char* edge_name = names_->GetFormatted(
1271           "part of key (%s @%u) -> value (%s @%u) pair in WeakMap (table @%u)",
1272           key_entry->name(), key_entry->id(), value_entry->name(),
1273           value_entry->id(), table_entry->id());
1274       key_entry->SetNamedAutoIndexReference(HeapGraphEdge::kInternal, edge_name,
1275                                             value_entry, names_, generator_,
1276                                             HeapEntry::kEphemeron);
1277       table_entry->SetNamedAutoIndexReference(
1278           HeapGraphEdge::kInternal, edge_name, value_entry, names_, generator_,
1279           HeapEntry::kEphemeron);
1280     }
1281   }
1282 }
1283 
1284 // These static arrays are used to prevent excessive code-size in
1285 // ExtractContextReferences below, which would happen if we called
1286 // SetInternalReference for every native context field in a macro.
1287 static const struct {
1288   int index;
1289   const char* name;
1290 } native_context_names[] = {
1291 #define CONTEXT_FIELD_INDEX_NAME(index, _, name) {Context::index, #name},
1292     NATIVE_CONTEXT_FIELDS(CONTEXT_FIELD_INDEX_NAME)
1293 #undef CONTEXT_FIELD_INDEX_NAME
1294 };
1295 
ExtractContextReferences(HeapEntry * entry,Context context)1296 void V8HeapExplorer::ExtractContextReferences(HeapEntry* entry,
1297                                               Context context) {
1298   DisallowGarbageCollection no_gc;
1299   if (!context.IsNativeContext() && context.is_declaration_context()) {
1300     ScopeInfo scope_info = context.scope_info();
1301     // Add context allocated locals.
1302     for (auto it : ScopeInfo::IterateLocalNames(&scope_info, no_gc)) {
1303       int idx = scope_info.ContextHeaderLength() + it->index();
1304       SetContextReference(entry, it->name(), context.get(idx),
1305                           Context::OffsetOfElementAt(idx));
1306     }
1307     if (scope_info.HasContextAllocatedFunctionName()) {
1308       String name = String::cast(scope_info.FunctionName());
1309       int idx = scope_info.FunctionContextSlotIndex(name);
1310       if (idx >= 0) {
1311         SetContextReference(entry, name, context.get(idx),
1312                             Context::OffsetOfElementAt(idx));
1313       }
1314     }
1315   }
1316 
1317   SetInternalReference(
1318       entry, "scope_info", context.get(Context::SCOPE_INFO_INDEX),
1319       FixedArray::OffsetOfElementAt(Context::SCOPE_INFO_INDEX));
1320   SetInternalReference(entry, "previous", context.get(Context::PREVIOUS_INDEX),
1321                        FixedArray::OffsetOfElementAt(Context::PREVIOUS_INDEX));
1322   if (context.has_extension()) {
1323     SetInternalReference(
1324         entry, "extension", context.get(Context::EXTENSION_INDEX),
1325         FixedArray::OffsetOfElementAt(Context::EXTENSION_INDEX));
1326   }
1327 
1328   if (context.IsNativeContext()) {
1329     TagObject(context.normalized_map_cache(), "(context norm. map cache)");
1330     TagObject(context.embedder_data(), "(context data)");
1331     for (size_t i = 0; i < arraysize(native_context_names); i++) {
1332       int index = native_context_names[i].index;
1333       const char* name = native_context_names[i].name;
1334       SetInternalReference(entry, name, context.get(index),
1335                            FixedArray::OffsetOfElementAt(index));
1336     }
1337 
1338     SetWeakReference(entry, "optimized_code_list",
1339                      context.get(Context::OPTIMIZED_CODE_LIST),
1340                      Context::OffsetOfElementAt(Context::OPTIMIZED_CODE_LIST),
1341                      HeapEntry::kCustomWeakPointer);
1342     SetWeakReference(entry, "deoptimized_code_list",
1343                      context.get(Context::DEOPTIMIZED_CODE_LIST),
1344                      Context::OffsetOfElementAt(Context::DEOPTIMIZED_CODE_LIST),
1345                      HeapEntry::kCustomWeakPointer);
1346     STATIC_ASSERT(Context::OPTIMIZED_CODE_LIST == Context::FIRST_WEAK_SLOT);
1347     STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 ==
1348                   Context::NATIVE_CONTEXT_SLOTS);
1349     STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 3 ==
1350                   Context::NATIVE_CONTEXT_SLOTS);
1351   }
1352 }
1353 
ExtractMapReferences(HeapEntry * entry,Map map)1354 void V8HeapExplorer::ExtractMapReferences(HeapEntry* entry, Map map) {
1355   MaybeObject maybe_raw_transitions_or_prototype_info = map.raw_transitions();
1356   HeapObject raw_transitions_or_prototype_info;
1357   if (maybe_raw_transitions_or_prototype_info->GetHeapObjectIfWeak(
1358           &raw_transitions_or_prototype_info)) {
1359     DCHECK(raw_transitions_or_prototype_info.IsMap());
1360     SetWeakReference(entry, "transition", raw_transitions_or_prototype_info,
1361                      Map::kTransitionsOrPrototypeInfoOffset);
1362   } else if (maybe_raw_transitions_or_prototype_info->GetHeapObjectIfStrong(
1363                  &raw_transitions_or_prototype_info)) {
1364     if (raw_transitions_or_prototype_info.IsTransitionArray()) {
1365       TransitionArray transitions =
1366           TransitionArray::cast(raw_transitions_or_prototype_info);
1367       if (map.CanTransition() && transitions.HasPrototypeTransitions()) {
1368         TagObject(transitions.GetPrototypeTransitions(),
1369                   "(prototype transitions)");
1370       }
1371       TagObject(transitions, "(transition array)");
1372       SetInternalReference(entry, "transitions", transitions,
1373                            Map::kTransitionsOrPrototypeInfoOffset);
1374     } else if (raw_transitions_or_prototype_info.IsFixedArray()) {
1375       TagObject(raw_transitions_or_prototype_info, "(transition)");
1376       SetInternalReference(entry, "transition",
1377                            raw_transitions_or_prototype_info,
1378                            Map::kTransitionsOrPrototypeInfoOffset);
1379     } else if (map.is_prototype_map()) {
1380       TagObject(raw_transitions_or_prototype_info, "prototype_info");
1381       SetInternalReference(entry, "prototype_info",
1382                            raw_transitions_or_prototype_info,
1383                            Map::kTransitionsOrPrototypeInfoOffset);
1384     }
1385   }
1386   DescriptorArray descriptors = map.instance_descriptors();
1387   TagObject(descriptors, "(map descriptors)");
1388   SetInternalReference(entry, "descriptors", descriptors,
1389                        Map::kInstanceDescriptorsOffset);
1390   SetInternalReference(entry, "prototype", map.prototype(),
1391                        Map::kPrototypeOffset);
1392   if (map.IsContextMap()) {
1393     Object native_context = map.native_context();
1394     TagObject(native_context, "(native context)");
1395     SetInternalReference(entry, "native_context", native_context,
1396                          Map::kConstructorOrBackPointerOrNativeContextOffset);
1397   } else {
1398     Object constructor_or_back_pointer = map.constructor_or_back_pointer();
1399     if (constructor_or_back_pointer.IsMap()) {
1400       TagObject(constructor_or_back_pointer, "(back pointer)");
1401       SetInternalReference(entry, "back_pointer", constructor_or_back_pointer,
1402                            Map::kConstructorOrBackPointerOrNativeContextOffset);
1403     } else if (constructor_or_back_pointer.IsFunctionTemplateInfo()) {
1404       TagObject(constructor_or_back_pointer, "(constructor function data)");
1405       SetInternalReference(entry, "constructor_function_data",
1406                            constructor_or_back_pointer,
1407                            Map::kConstructorOrBackPointerOrNativeContextOffset);
1408     } else {
1409       SetInternalReference(entry, "constructor", constructor_or_back_pointer,
1410                            Map::kConstructorOrBackPointerOrNativeContextOffset);
1411     }
1412   }
1413   TagObject(map.dependent_code(), "(dependent code)");
1414   SetInternalReference(entry, "dependent_code", map.dependent_code(),
1415                        Map::kDependentCodeOffset);
1416 }
1417 
ExtractSharedFunctionInfoReferences(HeapEntry * entry,SharedFunctionInfo shared)1418 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1419     HeapEntry* entry, SharedFunctionInfo shared) {
1420   std::unique_ptr<char[]> name = shared.DebugNameCStr();
1421   if (name[0] != '\0') {
1422     TagObject(FromCodeT(shared.GetCode()),
1423               names_->GetFormatted("(code for %s)", name.get()));
1424   } else {
1425     TagObject(FromCodeT(shared.GetCode()),
1426               names_->GetFormatted("(%s code)",
1427                                    CodeKindToString(shared.GetCode().kind())));
1428   }
1429 
1430   Object name_or_scope_info = shared.name_or_scope_info(kAcquireLoad);
1431   if (name_or_scope_info.IsScopeInfo()) {
1432     TagObject(name_or_scope_info, "(function scope info)");
1433   }
1434   SetInternalReference(entry, "name_or_scope_info", name_or_scope_info,
1435                        SharedFunctionInfo::kNameOrScopeInfoOffset);
1436   SetInternalReference(entry, "script_or_debug_info",
1437                        shared.script_or_debug_info(kAcquireLoad),
1438                        SharedFunctionInfo::kScriptOrDebugInfoOffset);
1439   SetInternalReference(entry, "function_data",
1440                        shared.function_data(kAcquireLoad),
1441                        SharedFunctionInfo::kFunctionDataOffset);
1442   SetInternalReference(
1443       entry, "raw_outer_scope_info_or_feedback_metadata",
1444       shared.raw_outer_scope_info_or_feedback_metadata(),
1445       SharedFunctionInfo::kOuterScopeInfoOrFeedbackMetadataOffset);
1446 }
1447 
ExtractScriptReferences(HeapEntry * entry,Script script)1448 void V8HeapExplorer::ExtractScriptReferences(HeapEntry* entry, Script script) {
1449   SetInternalReference(entry, "source", script.source(), Script::kSourceOffset);
1450   SetInternalReference(entry, "name", script.name(), Script::kNameOffset);
1451   SetInternalReference(entry, "context_data", script.context_data(),
1452                        Script::kContextDataOffset);
1453   TagObject(script.line_ends(), "(script line ends)", HeapEntry::kCode);
1454   SetInternalReference(entry, "line_ends", script.line_ends(),
1455                        Script::kLineEndsOffset);
1456   TagObject(script.shared_function_infos(), "(shared function infos)",
1457             HeapEntry::kCode);
1458   TagObject(script.host_defined_options(), "(host-defined options)",
1459             HeapEntry::kCode);
1460 }
1461 
ExtractAccessorInfoReferences(HeapEntry * entry,AccessorInfo accessor_info)1462 void V8HeapExplorer::ExtractAccessorInfoReferences(HeapEntry* entry,
1463                                                    AccessorInfo accessor_info) {
1464   SetInternalReference(entry, "name", accessor_info.name(),
1465                        AccessorInfo::kNameOffset);
1466   SetInternalReference(entry, "expected_receiver_type",
1467                        accessor_info.expected_receiver_type(),
1468                        AccessorInfo::kExpectedReceiverTypeOffset);
1469   SetInternalReference(entry, "getter", accessor_info.getter(),
1470                        AccessorInfo::kGetterOffset);
1471   SetInternalReference(entry, "setter", accessor_info.setter(),
1472                        AccessorInfo::kSetterOffset);
1473   SetInternalReference(entry, "data", accessor_info.data(),
1474                        AccessorInfo::kDataOffset);
1475 }
1476 
ExtractAccessorPairReferences(HeapEntry * entry,AccessorPair accessors)1477 void V8HeapExplorer::ExtractAccessorPairReferences(HeapEntry* entry,
1478                                                    AccessorPair accessors) {
1479   SetInternalReference(entry, "getter", accessors.getter(),
1480                        AccessorPair::kGetterOffset);
1481   SetInternalReference(entry, "setter", accessors.setter(),
1482                        AccessorPair::kSetterOffset);
1483 }
1484 
ExtractJSWeakRefReferences(HeapEntry * entry,JSWeakRef js_weak_ref)1485 void V8HeapExplorer::ExtractJSWeakRefReferences(HeapEntry* entry,
1486                                                 JSWeakRef js_weak_ref) {
1487   SetWeakReference(entry, "target", js_weak_ref.target(),
1488                    JSWeakRef::kTargetOffset);
1489 }
1490 
ExtractWeakCellReferences(HeapEntry * entry,WeakCell weak_cell)1491 void V8HeapExplorer::ExtractWeakCellReferences(HeapEntry* entry,
1492                                                WeakCell weak_cell) {
1493   SetWeakReference(entry, "target", weak_cell.target(),
1494                    WeakCell::kTargetOffset);
1495   SetWeakReference(entry, "unregister_token", weak_cell.unregister_token(),
1496                    WeakCell::kUnregisterTokenOffset);
1497 }
1498 
TagBuiltinCodeObject(CodeT code,const char * name)1499 void V8HeapExplorer::TagBuiltinCodeObject(CodeT code, const char* name) {
1500   TagObject(FromCodeT(code), names_->GetFormatted("(%s builtin)", name));
1501 }
1502 
ExtractCodeReferences(HeapEntry * entry,Code code)1503 void V8HeapExplorer::ExtractCodeReferences(HeapEntry* entry, Code code) {
1504   TagObject(code.relocation_info(), "(code relocation info)", HeapEntry::kCode);
1505   SetInternalReference(entry, "relocation_info", code.relocation_info(),
1506                        Code::kRelocationInfoOffset);
1507 
1508   if (code.kind() == CodeKind::BASELINE) {
1509     TagObject(code.bytecode_or_interpreter_data(), "(interpreter data)");
1510     SetInternalReference(entry, "interpreter_data",
1511                          code.bytecode_or_interpreter_data(),
1512                          Code::kDeoptimizationDataOrInterpreterDataOffset);
1513     TagObject(code.bytecode_offset_table(), "(bytecode offset table)",
1514               HeapEntry::kCode);
1515     SetInternalReference(entry, "bytecode_offset_table",
1516                          code.bytecode_offset_table(),
1517                          Code::kPositionTableOffset);
1518   } else {
1519     DeoptimizationData deoptimization_data =
1520         DeoptimizationData::cast(code.deoptimization_data());
1521     TagObject(deoptimization_data, "(code deopt data)", HeapEntry::kCode);
1522     SetInternalReference(entry, "deoptimization_data", deoptimization_data,
1523                          Code::kDeoptimizationDataOrInterpreterDataOffset);
1524     if (deoptimization_data.length() > 0) {
1525       TagObject(deoptimization_data.TranslationByteArray(), "(code deopt data)",
1526                 HeapEntry::kCode);
1527       TagObject(deoptimization_data.LiteralArray(), "(code deopt data)",
1528                 HeapEntry::kCode);
1529       TagObject(deoptimization_data.InliningPositions(), "(code deopt data)",
1530                 HeapEntry::kCode);
1531     }
1532     TagObject(code.source_position_table(), "(source position table)",
1533               HeapEntry::kCode);
1534     SetInternalReference(entry, "source_position_table",
1535                          code.source_position_table(),
1536                          Code::kPositionTableOffset);
1537   }
1538 }
1539 
ExtractCellReferences(HeapEntry * entry,Cell cell)1540 void V8HeapExplorer::ExtractCellReferences(HeapEntry* entry, Cell cell) {
1541   SetInternalReference(entry, "value", cell.value(), Cell::kValueOffset);
1542 }
1543 
ExtractFeedbackCellReferences(HeapEntry * entry,FeedbackCell feedback_cell)1544 void V8HeapExplorer::ExtractFeedbackCellReferences(HeapEntry* entry,
1545                                                    FeedbackCell feedback_cell) {
1546   TagObject(feedback_cell, "(feedback cell)");
1547   SetInternalReference(entry, "value", feedback_cell.value(),
1548                        FeedbackCell::kValueOffset);
1549 }
1550 
ExtractPropertyCellReferences(HeapEntry * entry,PropertyCell cell)1551 void V8HeapExplorer::ExtractPropertyCellReferences(HeapEntry* entry,
1552                                                    PropertyCell cell) {
1553   SetInternalReference(entry, "value", cell.value(),
1554                        PropertyCell::kValueOffset);
1555   TagObject(cell.dependent_code(), "(dependent code)");
1556   SetInternalReference(entry, "dependent_code", cell.dependent_code(),
1557                        PropertyCell::kDependentCodeOffset);
1558 }
1559 
ExtractAllocationSiteReferences(HeapEntry * entry,AllocationSite site)1560 void V8HeapExplorer::ExtractAllocationSiteReferences(HeapEntry* entry,
1561                                                      AllocationSite site) {
1562   SetInternalReference(entry, "transition_info",
1563                        site.transition_info_or_boilerplate(),
1564                        AllocationSite::kTransitionInfoOrBoilerplateOffset);
1565   SetInternalReference(entry, "nested_site", site.nested_site(),
1566                        AllocationSite::kNestedSiteOffset);
1567   TagObject(site.dependent_code(), "(dependent code)", HeapEntry::kCode);
1568   SetInternalReference(entry, "dependent_code", site.dependent_code(),
1569                        AllocationSite::kDependentCodeOffset);
1570 }
1571 
ExtractArrayBoilerplateDescriptionReferences(HeapEntry * entry,ArrayBoilerplateDescription value)1572 void V8HeapExplorer::ExtractArrayBoilerplateDescriptionReferences(
1573     HeapEntry* entry, ArrayBoilerplateDescription value) {
1574   FixedArrayBase constant_elements = value.constant_elements();
1575   SetInternalReference(entry, "constant_elements", constant_elements,
1576                        ArrayBoilerplateDescription::kConstantElementsOffset);
1577   TagObject(constant_elements, "(constant elements)", HeapEntry::kCode);
1578 }
1579 
ExtractRegExpBoilerplateDescriptionReferences(HeapEntry * entry,RegExpBoilerplateDescription value)1580 void V8HeapExplorer::ExtractRegExpBoilerplateDescriptionReferences(
1581     HeapEntry* entry, RegExpBoilerplateDescription value) {
1582   TagObject(value.data(), "(RegExp data)", HeapEntry::kCode);
1583 }
1584 
1585 class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
1586  public:
JSArrayBufferDataEntryAllocator(size_t size,V8HeapExplorer * explorer)1587   JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer)
1588       : size_(size)
1589       , explorer_(explorer) {
1590   }
AllocateEntry(HeapThing ptr)1591   HeapEntry* AllocateEntry(HeapThing ptr) override {
1592     return explorer_->AddEntry(reinterpret_cast<Address>(ptr),
1593                                HeapEntry::kNative, "system / JSArrayBufferData",
1594                                size_);
1595   }
AllocateEntry(Smi smi)1596   HeapEntry* AllocateEntry(Smi smi) override {
1597     DCHECK(false);
1598     return nullptr;
1599   }
1600 
1601  private:
1602   size_t size_;
1603   V8HeapExplorer* explorer_;
1604 };
1605 
ExtractJSArrayBufferReferences(HeapEntry * entry,JSArrayBuffer buffer)1606 void V8HeapExplorer::ExtractJSArrayBufferReferences(HeapEntry* entry,
1607                                                     JSArrayBuffer buffer) {
1608   // Setup a reference to a native memory backing_store object.
1609   if (!buffer.backing_store()) return;
1610   size_t data_size = buffer.byte_length();
1611   JSArrayBufferDataEntryAllocator allocator(data_size, this);
1612   HeapEntry* data_entry =
1613       generator_->FindOrAddEntry(buffer.backing_store(), &allocator);
1614   entry->SetNamedReference(HeapGraphEdge::kInternal, "backing_store",
1615                            data_entry, generator_, HeapEntry::kOffHeapPointer);
1616 }
1617 
ExtractJSPromiseReferences(HeapEntry * entry,JSPromise promise)1618 void V8HeapExplorer::ExtractJSPromiseReferences(HeapEntry* entry,
1619                                                 JSPromise promise) {
1620   SetInternalReference(entry, "reactions_or_result",
1621                        promise.reactions_or_result(),
1622                        JSPromise::kReactionsOrResultOffset);
1623 }
1624 
ExtractJSGeneratorObjectReferences(HeapEntry * entry,JSGeneratorObject generator)1625 void V8HeapExplorer::ExtractJSGeneratorObjectReferences(
1626     HeapEntry* entry, JSGeneratorObject generator) {
1627   SetInternalReference(entry, "function", generator.function(),
1628                        JSGeneratorObject::kFunctionOffset);
1629   SetInternalReference(entry, "context", generator.context(),
1630                        JSGeneratorObject::kContextOffset);
1631   SetInternalReference(entry, "receiver", generator.receiver(),
1632                        JSGeneratorObject::kReceiverOffset);
1633   SetInternalReference(entry, "parameters_and_registers",
1634                        generator.parameters_and_registers(),
1635                        JSGeneratorObject::kParametersAndRegistersOffset);
1636 }
1637 
ExtractFixedArrayReferences(HeapEntry * entry,FixedArray array)1638 void V8HeapExplorer::ExtractFixedArrayReferences(HeapEntry* entry,
1639                                                  FixedArray array) {
1640   for (int i = 0, l = array.length(); i < l; ++i) {
1641     DCHECK(!HasWeakHeapObjectTag(array.get(i)));
1642     SetInternalReference(entry, i, array.get(i), array.OffsetOfElementAt(i));
1643   }
1644 }
1645 
ExtractNumberReference(HeapEntry * entry,Object number)1646 void V8HeapExplorer::ExtractNumberReference(HeapEntry* entry, Object number) {
1647   DCHECK(number.IsNumber());
1648 
1649   // Must be large enough to fit any double, int, or size_t.
1650   char arr[32];
1651   base::Vector<char> buffer(arr, arraysize(arr));
1652 
1653   const char* string;
1654   if (number.IsSmi()) {
1655     int int_value = Smi::ToInt(number);
1656     string = IntToCString(int_value, buffer);
1657   } else {
1658     double double_value = HeapNumber::cast(number).value();
1659     string = DoubleToCString(double_value, buffer);
1660   }
1661 
1662   const char* name = names_->GetCopy(string);
1663 
1664   SnapshotObjectId id = heap_object_map_->get_next_id();
1665   HeapEntry* child_entry =
1666       snapshot_->AddEntry(HeapEntry::kString, name, id, 0, 0);
1667   entry->SetNamedReference(HeapGraphEdge::kInternal, "value", child_entry,
1668                            generator_);
1669 }
1670 
ExtractBytecodeArrayReferences(HeapEntry * entry,BytecodeArray bytecode)1671 void V8HeapExplorer::ExtractBytecodeArrayReferences(HeapEntry* entry,
1672                                                     BytecodeArray bytecode) {
1673   RecursivelyTagConstantPool(bytecode.constant_pool(), "(constant pool)",
1674                              HeapEntry::kCode, 3);
1675   TagObject(bytecode.handler_table(), "(handler table)", HeapEntry::kCode);
1676   TagObject(bytecode.source_position_table(kAcquireLoad),
1677             "(source position table)", HeapEntry::kCode);
1678 }
1679 
ExtractScopeInfoReferences(HeapEntry * entry,ScopeInfo info)1680 void V8HeapExplorer::ExtractScopeInfoReferences(HeapEntry* entry,
1681                                                 ScopeInfo info) {
1682   if (!info.HasInlinedLocalNames()) {
1683     TagObject(info.context_local_names_hashtable(), "(context local names)",
1684               HeapEntry::kCode);
1685   }
1686 }
1687 
ExtractFeedbackVectorReferences(HeapEntry * entry,FeedbackVector feedback_vector)1688 void V8HeapExplorer::ExtractFeedbackVectorReferences(
1689     HeapEntry* entry, FeedbackVector feedback_vector) {
1690   MaybeObject code = feedback_vector.maybe_optimized_code();
1691   HeapObject code_heap_object;
1692   if (code->GetHeapObjectIfWeak(&code_heap_object)) {
1693     SetWeakReference(entry, "optimized code", code_heap_object,
1694                      FeedbackVector::kMaybeOptimizedCodeOffset);
1695   }
1696   for (int i = 0; i < feedback_vector.length(); ++i) {
1697     MaybeObject maybe_entry = *(feedback_vector.slots_start() + i);
1698     HeapObject entry;
1699     if (maybe_entry.GetHeapObjectIfStrong(&entry) &&
1700         (entry.map(isolate()).instance_type() == WEAK_FIXED_ARRAY_TYPE ||
1701          entry.IsFixedArrayExact())) {
1702       TagObject(entry, "(feedback)", HeapEntry::kCode);
1703     }
1704   }
1705 }
1706 
ExtractDescriptorArrayReferences(HeapEntry * entry,DescriptorArray array)1707 void V8HeapExplorer::ExtractDescriptorArrayReferences(HeapEntry* entry,
1708                                                       DescriptorArray array) {
1709   SetInternalReference(entry, "enum_cache", array.enum_cache(),
1710                        DescriptorArray::kEnumCacheOffset);
1711   MaybeObjectSlot start = MaybeObjectSlot(array.GetDescriptorSlot(0));
1712   MaybeObjectSlot end = MaybeObjectSlot(
1713       array.GetDescriptorSlot(array.number_of_all_descriptors()));
1714   for (int i = 0; start + i < end; ++i) {
1715     MaybeObjectSlot slot = start + i;
1716     int offset = static_cast<int>(slot.address() - array.address());
1717     MaybeObject object = *slot;
1718     HeapObject heap_object;
1719     if (object->GetHeapObjectIfWeak(&heap_object)) {
1720       SetWeakReference(entry, i, heap_object, offset);
1721     } else if (object->GetHeapObjectIfStrong(&heap_object)) {
1722       SetInternalReference(entry, i, heap_object, offset);
1723     }
1724   }
1725 }
1726 
1727 template <typename T>
ExtractWeakArrayReferences(int header_size,HeapEntry * entry,T array)1728 void V8HeapExplorer::ExtractWeakArrayReferences(int header_size,
1729                                                 HeapEntry* entry, T array) {
1730   for (int i = 0; i < array.length(); ++i) {
1731     MaybeObject object = array.Get(i);
1732     HeapObject heap_object;
1733     if (object->GetHeapObjectIfWeak(&heap_object)) {
1734       SetWeakReference(entry, i, heap_object, header_size + i * kTaggedSize);
1735     } else if (object->GetHeapObjectIfStrong(&heap_object)) {
1736       SetInternalReference(entry, i, heap_object,
1737                            header_size + i * kTaggedSize);
1738     }
1739   }
1740 }
1741 
ExtractPropertyReferences(JSObject js_obj,HeapEntry * entry)1742 void V8HeapExplorer::ExtractPropertyReferences(JSObject js_obj,
1743                                                HeapEntry* entry) {
1744   Isolate* isolate = js_obj.GetIsolate();
1745   if (js_obj.HasFastProperties()) {
1746     DescriptorArray descs = js_obj.map().instance_descriptors(isolate);
1747     for (InternalIndex i : js_obj.map().IterateOwnDescriptors()) {
1748       PropertyDetails details = descs.GetDetails(i);
1749       switch (details.location()) {
1750         case PropertyLocation::kField: {
1751           if (!snapshot_->capture_numeric_value()) {
1752             Representation r = details.representation();
1753             if (r.IsSmi() || r.IsDouble()) break;
1754           }
1755 
1756           Name k = descs.GetKey(i);
1757           FieldIndex field_index = FieldIndex::ForDescriptor(js_obj.map(), i);
1758           Object value = js_obj.RawFastPropertyAt(field_index);
1759           int field_offset =
1760               field_index.is_inobject() ? field_index.offset() : -1;
1761 
1762           SetDataOrAccessorPropertyReference(details.kind(), entry, k, value,
1763                                              nullptr, field_offset);
1764           break;
1765         }
1766         case PropertyLocation::kDescriptor:
1767           SetDataOrAccessorPropertyReference(
1768               details.kind(), entry, descs.GetKey(i), descs.GetStrongValue(i));
1769           break;
1770       }
1771     }
1772   } else if (js_obj.IsJSGlobalObject()) {
1773     // We assume that global objects can only have slow properties.
1774     GlobalDictionary dictionary =
1775         JSGlobalObject::cast(js_obj).global_dictionary(kAcquireLoad);
1776     ReadOnlyRoots roots(isolate);
1777     for (InternalIndex i : dictionary.IterateEntries()) {
1778       if (!dictionary.IsKey(roots, dictionary.KeyAt(i))) continue;
1779       PropertyCell cell = dictionary.CellAt(i);
1780       Name name = cell.name();
1781       Object value = cell.value();
1782       PropertyDetails details = cell.property_details();
1783       SetDataOrAccessorPropertyReference(details.kind(), entry, name, value);
1784     }
1785   } else if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL) {
1786     // SwissNameDictionary::IterateEntries creates a Handle, which should not
1787     // leak out of here.
1788     HandleScope scope(isolate);
1789 
1790     SwissNameDictionary dictionary = js_obj.property_dictionary_swiss();
1791     ReadOnlyRoots roots(isolate);
1792     for (InternalIndex i : dictionary.IterateEntries()) {
1793       Object k = dictionary.KeyAt(i);
1794       if (!dictionary.IsKey(roots, k)) continue;
1795       Object value = dictionary.ValueAt(i);
1796       PropertyDetails details = dictionary.DetailsAt(i);
1797       SetDataOrAccessorPropertyReference(details.kind(), entry, Name::cast(k),
1798                                          value);
1799     }
1800   } else {
1801     NameDictionary dictionary = js_obj.property_dictionary();
1802     ReadOnlyRoots roots(isolate);
1803     for (InternalIndex i : dictionary.IterateEntries()) {
1804       Object k = dictionary.KeyAt(i);
1805       if (!dictionary.IsKey(roots, k)) continue;
1806       Object value = dictionary.ValueAt(i);
1807       PropertyDetails details = dictionary.DetailsAt(i);
1808       SetDataOrAccessorPropertyReference(details.kind(), entry, Name::cast(k),
1809                                          value);
1810     }
1811   }
1812 }
1813 
ExtractAccessorPairProperty(HeapEntry * entry,Name key,Object callback_obj,int field_offset)1814 void V8HeapExplorer::ExtractAccessorPairProperty(HeapEntry* entry, Name key,
1815                                                  Object callback_obj,
1816                                                  int field_offset) {
1817   if (!callback_obj.IsAccessorPair()) return;
1818   AccessorPair accessors = AccessorPair::cast(callback_obj);
1819   SetPropertyReference(entry, key, accessors, nullptr, field_offset);
1820   Object getter = accessors.getter();
1821   if (!getter.IsOddball()) {
1822     SetPropertyReference(entry, key, getter, "get %s");
1823   }
1824   Object setter = accessors.setter();
1825   if (!setter.IsOddball()) {
1826     SetPropertyReference(entry, key, setter, "set %s");
1827   }
1828 }
1829 
ExtractElementReferences(JSObject js_obj,HeapEntry * entry)1830 void V8HeapExplorer::ExtractElementReferences(JSObject js_obj,
1831                                               HeapEntry* entry) {
1832   ReadOnlyRoots roots = js_obj.GetReadOnlyRoots();
1833   if (js_obj.HasObjectElements()) {
1834     FixedArray elements = FixedArray::cast(js_obj.elements());
1835     int length = js_obj.IsJSArray() ? Smi::ToInt(JSArray::cast(js_obj).length())
1836                                     : elements.length();
1837     for (int i = 0; i < length; ++i) {
1838       if (!elements.get(i).IsTheHole(roots)) {
1839         SetElementReference(entry, i, elements.get(i));
1840       }
1841     }
1842   } else if (js_obj.HasDictionaryElements()) {
1843     NumberDictionary dictionary = js_obj.element_dictionary();
1844     for (InternalIndex i : dictionary.IterateEntries()) {
1845       Object k = dictionary.KeyAt(i);
1846       if (!dictionary.IsKey(roots, k)) continue;
1847       DCHECK(k.IsNumber());
1848       uint32_t index = static_cast<uint32_t>(k.Number());
1849       SetElementReference(entry, index, dictionary.ValueAt(i));
1850     }
1851   }
1852 }
1853 
ExtractInternalReferences(JSObject js_obj,HeapEntry * entry)1854 void V8HeapExplorer::ExtractInternalReferences(JSObject js_obj,
1855                                                HeapEntry* entry) {
1856   int length = js_obj.GetEmbedderFieldCount();
1857   for (int i = 0; i < length; ++i) {
1858     Object o = js_obj.GetEmbedderField(i);
1859     SetInternalReference(entry, i, o, js_obj.GetEmbedderFieldOffset(i));
1860   }
1861 }
1862 
GetConstructor(Isolate * isolate,JSReceiver receiver)1863 JSFunction V8HeapExplorer::GetConstructor(Isolate* isolate,
1864                                           JSReceiver receiver) {
1865   DisallowGarbageCollection no_gc;
1866   HandleScope scope(isolate);
1867   MaybeHandle<JSFunction> maybe_constructor =
1868       JSReceiver::GetConstructor(isolate, handle(receiver, isolate));
1869 
1870   if (maybe_constructor.is_null()) return JSFunction();
1871 
1872   return *maybe_constructor.ToHandleChecked();
1873 }
1874 
GetConstructorName(Isolate * isolate,JSObject object)1875 String V8HeapExplorer::GetConstructorName(Isolate* isolate, JSObject object) {
1876   if (object.IsJSFunction()) return ReadOnlyRoots(isolate).closure_string();
1877   DisallowGarbageCollection no_gc;
1878   HandleScope scope(isolate);
1879   return *JSReceiver::GetConstructorName(isolate, handle(object, isolate));
1880 }
1881 
GetEntry(Object obj)1882 HeapEntry* V8HeapExplorer::GetEntry(Object obj) {
1883   if (obj.IsHeapObject()) {
1884     return generator_->FindOrAddEntry(reinterpret_cast<void*>(obj.ptr()), this);
1885   }
1886 
1887   DCHECK(obj.IsSmi());
1888   if (!snapshot_->capture_numeric_value()) {
1889     return nullptr;
1890   }
1891   return generator_->FindOrAddEntry(Smi::cast(obj), this);
1892 }
1893 
1894 class RootsReferencesExtractor : public RootVisitor {
1895  public:
RootsReferencesExtractor(V8HeapExplorer * explorer)1896   explicit RootsReferencesExtractor(V8HeapExplorer* explorer)
1897       : explorer_(explorer), visiting_weak_roots_(false) {}
1898 
SetVisitingWeakRoots()1899   void SetVisitingWeakRoots() { visiting_weak_roots_ = true; }
1900 
VisitRootPointer(Root root,const char * description,FullObjectSlot object)1901   void VisitRootPointer(Root root, const char* description,
1902                         FullObjectSlot object) override {
1903     if (root == Root::kBuiltins) {
1904       explorer_->TagBuiltinCodeObject(CodeT::cast(*object), description);
1905     }
1906     explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_,
1907                                      *object);
1908   }
1909 
VisitRootPointers(Root root,const char * description,FullObjectSlot start,FullObjectSlot end)1910   void VisitRootPointers(Root root, const char* description,
1911                          FullObjectSlot start, FullObjectSlot end) override {
1912     for (FullObjectSlot p = start; p < end; ++p) {
1913       DCHECK(!MapWord::IsPacked(p.Relaxed_Load().ptr()));
1914       VisitRootPointer(root, description, p);
1915     }
1916   }
1917 
VisitRootPointers(Root root,const char * description,OffHeapObjectSlot start,OffHeapObjectSlot end)1918   void VisitRootPointers(Root root, const char* description,
1919                          OffHeapObjectSlot start,
1920                          OffHeapObjectSlot end) override {
1921     DCHECK_EQ(root, Root::kStringTable);
1922     PtrComprCageBase cage_base(explorer_->heap_->isolate());
1923     for (OffHeapObjectSlot p = start; p < end; ++p) {
1924       explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_,
1925                                        p.load(cage_base));
1926     }
1927   }
1928 
VisitRunningCode(FullObjectSlot p)1929   void VisitRunningCode(FullObjectSlot p) override {
1930     // Must match behavior in
1931     // MarkCompactCollector::RootMarkingVisitor::VisitRunningCode, which treats
1932     // deoptimization literals in running code as stack roots.
1933     Code code = Code::cast(*p);
1934     if (code.kind() != CodeKind::BASELINE) {
1935       DeoptimizationData deopt_data =
1936           DeoptimizationData::cast(code.deoptimization_data());
1937       if (deopt_data.length() > 0) {
1938         DeoptimizationLiteralArray literals = deopt_data.LiteralArray();
1939         int literals_length = literals.length();
1940         for (int i = 0; i < literals_length; ++i) {
1941           MaybeObject maybe_literal = literals.Get(i);
1942           HeapObject heap_literal;
1943           if (maybe_literal.GetHeapObject(&heap_literal)) {
1944             VisitRootPointer(Root::kStackRoots, nullptr,
1945                              FullObjectSlot(&heap_literal));
1946           }
1947         }
1948       }
1949     }
1950 
1951     // Finally visit the Code itself.
1952     VisitRootPointer(Root::kStackRoots, nullptr, p);
1953   }
1954 
1955  private:
1956   V8HeapExplorer* explorer_;
1957   bool visiting_weak_roots_;
1958 };
1959 
IterateAndExtractReferences(HeapSnapshotGenerator * generator)1960 bool V8HeapExplorer::IterateAndExtractReferences(
1961     HeapSnapshotGenerator* generator) {
1962   generator_ = generator;
1963 
1964   // Create references to the synthetic roots.
1965   SetRootGcRootsReference();
1966   for (int root = 0; root < static_cast<int>(Root::kNumberOfRoots); root++) {
1967     SetGcRootsReference(static_cast<Root>(root));
1968   }
1969 
1970   // Make sure builtin code objects get their builtin tags
1971   // first. Otherwise a particular JSFunction object could set
1972   // its custom name to a generic builtin.
1973   RootsReferencesExtractor extractor(this);
1974   ReadOnlyRoots(heap_).Iterate(&extractor);
1975   heap_->IterateRoots(&extractor, base::EnumSet<SkipRoot>{SkipRoot::kWeak});
1976   // TODO(v8:11800): The heap snapshot generator incorrectly considers the weak
1977   // string tables as strong retainers. Move IterateWeakRoots after
1978   // SetVisitingWeakRoots.
1979   heap_->IterateWeakRoots(&extractor, {});
1980   extractor.SetVisitingWeakRoots();
1981   heap_->IterateWeakGlobalHandles(&extractor);
1982 
1983   bool interrupted = false;
1984 
1985   CombinedHeapObjectIterator iterator(heap_,
1986                                       HeapObjectIterator::kFilterUnreachable);
1987   PtrComprCageBase cage_base(heap_->isolate());
1988   // Heap iteration with filtering must be finished in any case.
1989   for (HeapObject obj = iterator.Next(); !obj.is_null();
1990        obj = iterator.Next(), progress_->ProgressStep()) {
1991     if (interrupted) continue;
1992 
1993     size_t max_pointer = obj.Size(cage_base) / kTaggedSize;
1994     if (max_pointer > visited_fields_.size()) {
1995       // Clear the current bits.
1996       std::vector<bool>().swap(visited_fields_);
1997       // Reallocate to right size.
1998       visited_fields_.resize(max_pointer, false);
1999     }
2000 
2001 #ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
2002     std::unique_ptr<HeapEntryVerifier> verifier;
2003     // MarkingVisitorBase doesn't expect that we will ever visit read-only
2004     // objects, and fails DCHECKs if we attempt to. Read-only objects can
2005     // never retain read-write objects, so there is no risk in skipping
2006     // verification for them.
2007     if (FLAG_heap_snapshot_verify &&
2008         !BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
2009       verifier = std::make_unique<HeapEntryVerifier>(generator, obj);
2010     }
2011 #endif
2012 
2013     HeapEntry* entry = GetEntry(obj);
2014     ExtractReferences(entry, obj);
2015     SetInternalReference(entry, "map", obj.map(cage_base),
2016                          HeapObject::kMapOffset);
2017     // Extract unvisited fields as hidden references and restore tags
2018     // of visited fields.
2019     IndexedReferencesExtractor refs_extractor(this, obj, entry);
2020     obj.Iterate(cage_base, &refs_extractor);
2021 
2022     // Ensure visited_fields_ doesn't leak to the next object.
2023     for (size_t i = 0; i < max_pointer; ++i) {
2024       DCHECK(!visited_fields_[i]);
2025     }
2026 
2027     // Extract location for specific object types
2028     ExtractLocation(entry, obj);
2029 
2030     if (!progress_->ProgressReport(false)) interrupted = true;
2031   }
2032 
2033   generator_ = nullptr;
2034   return interrupted ? false : progress_->ProgressReport(true);
2035 }
2036 
IsEssentialObject(Object object)2037 bool V8HeapExplorer::IsEssentialObject(Object object) {
2038   Isolate* isolate = heap_->isolate();
2039   ReadOnlyRoots roots(isolate);
2040   return object.IsHeapObject() && !object.IsOddball(isolate) &&
2041          object != roots.empty_byte_array() &&
2042          object != roots.empty_fixed_array() &&
2043          object != roots.empty_weak_fixed_array() &&
2044          object != roots.empty_descriptor_array() &&
2045          object != roots.fixed_array_map() && object != roots.cell_map() &&
2046          object != roots.global_property_cell_map() &&
2047          object != roots.shared_function_info_map() &&
2048          object != roots.free_space_map() &&
2049          object != roots.one_pointer_filler_map() &&
2050          object != roots.two_pointer_filler_map();
2051 }
2052 
IsEssentialHiddenReference(Object parent,int field_offset)2053 bool V8HeapExplorer::IsEssentialHiddenReference(Object parent,
2054                                                 int field_offset) {
2055   if (parent.IsAllocationSite() &&
2056       field_offset == AllocationSite::kWeakNextOffset)
2057     return false;
2058   if (parent.IsCodeDataContainer() &&
2059       field_offset == CodeDataContainer::kNextCodeLinkOffset)
2060     return false;
2061   if (parent.IsContext() &&
2062       field_offset == Context::OffsetOfElementAt(Context::NEXT_CONTEXT_LINK))
2063     return false;
2064   if (parent.IsJSFinalizationRegistry() &&
2065       field_offset == JSFinalizationRegistry::kNextDirtyOffset)
2066     return false;
2067   return true;
2068 }
2069 
SetContextReference(HeapEntry * parent_entry,String reference_name,Object child_obj,int field_offset)2070 void V8HeapExplorer::SetContextReference(HeapEntry* parent_entry,
2071                                          String reference_name,
2072                                          Object child_obj, int field_offset) {
2073   HeapEntry* child_entry = GetEntry(child_obj);
2074   if (child_entry == nullptr) return;
2075   parent_entry->SetNamedReference(HeapGraphEdge::kContextVariable,
2076                                   names_->GetName(reference_name), child_entry,
2077                                   generator_);
2078   MarkVisitedField(field_offset);
2079 }
2080 
MarkVisitedField(int offset)2081 void V8HeapExplorer::MarkVisitedField(int offset) {
2082   if (offset < 0) return;
2083   int index = offset / kTaggedSize;
2084   DCHECK(!visited_fields_[index]);
2085   visited_fields_[index] = true;
2086 }
2087 
SetNativeBindReference(HeapEntry * parent_entry,const char * reference_name,Object child_obj)2088 void V8HeapExplorer::SetNativeBindReference(HeapEntry* parent_entry,
2089                                             const char* reference_name,
2090                                             Object child_obj) {
2091   HeapEntry* child_entry = GetEntry(child_obj);
2092   if (child_entry == nullptr) return;
2093   parent_entry->SetNamedReference(HeapGraphEdge::kShortcut, reference_name,
2094                                   child_entry, generator_);
2095 }
2096 
SetElementReference(HeapEntry * parent_entry,int index,Object child_obj)2097 void V8HeapExplorer::SetElementReference(HeapEntry* parent_entry, int index,
2098                                          Object child_obj) {
2099   HeapEntry* child_entry = GetEntry(child_obj);
2100   if (child_entry == nullptr) return;
2101   parent_entry->SetIndexedReference(HeapGraphEdge::kElement, index, child_entry,
2102                                     generator_);
2103 }
2104 
SetInternalReference(HeapEntry * parent_entry,const char * reference_name,Object child_obj,int field_offset)2105 void V8HeapExplorer::SetInternalReference(HeapEntry* parent_entry,
2106                                           const char* reference_name,
2107                                           Object child_obj, int field_offset) {
2108   if (!IsEssentialObject(child_obj)) {
2109     return;
2110   }
2111   HeapEntry* child_entry = GetEntry(child_obj);
2112   DCHECK_NOT_NULL(child_entry);
2113   parent_entry->SetNamedReference(HeapGraphEdge::kInternal, reference_name,
2114                                   child_entry, generator_);
2115   MarkVisitedField(field_offset);
2116 }
2117 
SetInternalReference(HeapEntry * parent_entry,int index,Object child_obj,int field_offset)2118 void V8HeapExplorer::SetInternalReference(HeapEntry* parent_entry, int index,
2119                                           Object child_obj, int field_offset) {
2120   if (!IsEssentialObject(child_obj)) {
2121     return;
2122   }
2123   HeapEntry* child_entry = GetEntry(child_obj);
2124   DCHECK_NOT_NULL(child_entry);
2125   parent_entry->SetNamedReference(HeapGraphEdge::kInternal,
2126                                   names_->GetName(index), child_entry,
2127                                   generator_);
2128   MarkVisitedField(field_offset);
2129 }
2130 
SetHiddenReference(HeapObject parent_obj,HeapEntry * parent_entry,int index,Object child_obj,int field_offset)2131 void V8HeapExplorer::SetHiddenReference(HeapObject parent_obj,
2132                                         HeapEntry* parent_entry, int index,
2133                                         Object child_obj, int field_offset) {
2134   DCHECK_EQ(parent_entry, GetEntry(parent_obj));
2135   DCHECK(!MapWord::IsPacked(child_obj.ptr()));
2136   if (!IsEssentialObject(child_obj)) {
2137     return;
2138   }
2139   HeapEntry* child_entry = GetEntry(child_obj);
2140   DCHECK_NOT_NULL(child_entry);
2141   if (IsEssentialHiddenReference(parent_obj, field_offset)) {
2142     parent_entry->SetIndexedReference(HeapGraphEdge::kHidden, index,
2143                                       child_entry, generator_);
2144   }
2145 }
2146 
SetWeakReference(HeapEntry * parent_entry,const char * reference_name,Object child_obj,int field_offset,HeapEntry::ReferenceVerification verification)2147 void V8HeapExplorer::SetWeakReference(
2148     HeapEntry* parent_entry, const char* reference_name, Object child_obj,
2149     int field_offset, HeapEntry::ReferenceVerification verification) {
2150   if (!IsEssentialObject(child_obj)) {
2151     return;
2152   }
2153   HeapEntry* child_entry = GetEntry(child_obj);
2154   DCHECK_NOT_NULL(child_entry);
2155   parent_entry->SetNamedReference(HeapGraphEdge::kWeak, reference_name,
2156                                   child_entry, generator_, verification);
2157   MarkVisitedField(field_offset);
2158 }
2159 
SetWeakReference(HeapEntry * parent_entry,int index,Object child_obj,base::Optional<int> field_offset)2160 void V8HeapExplorer::SetWeakReference(HeapEntry* parent_entry, int index,
2161                                       Object child_obj,
2162                                       base::Optional<int> field_offset) {
2163   if (!IsEssentialObject(child_obj)) {
2164     return;
2165   }
2166   HeapEntry* child_entry = GetEntry(child_obj);
2167   DCHECK_NOT_NULL(child_entry);
2168   parent_entry->SetNamedReference(HeapGraphEdge::kWeak,
2169                                   names_->GetFormatted("%d", index),
2170                                   child_entry, generator_);
2171   if (field_offset.has_value()) {
2172     MarkVisitedField(*field_offset);
2173   }
2174 }
2175 
SetDataOrAccessorPropertyReference(PropertyKind kind,HeapEntry * parent_entry,Name reference_name,Object child_obj,const char * name_format_string,int field_offset)2176 void V8HeapExplorer::SetDataOrAccessorPropertyReference(
2177     PropertyKind kind, HeapEntry* parent_entry, Name reference_name,
2178     Object child_obj, const char* name_format_string, int field_offset) {
2179   if (kind == PropertyKind::kAccessor) {
2180     ExtractAccessorPairProperty(parent_entry, reference_name, child_obj,
2181                                 field_offset);
2182   } else {
2183     SetPropertyReference(parent_entry, reference_name, child_obj,
2184                          name_format_string, field_offset);
2185   }
2186 }
2187 
SetPropertyReference(HeapEntry * parent_entry,Name reference_name,Object child_obj,const char * name_format_string,int field_offset)2188 void V8HeapExplorer::SetPropertyReference(HeapEntry* parent_entry,
2189                                           Name reference_name, Object child_obj,
2190                                           const char* name_format_string,
2191                                           int field_offset) {
2192   HeapEntry* child_entry = GetEntry(child_obj);
2193   if (child_entry == nullptr) return;
2194   HeapGraphEdge::Type type =
2195       reference_name.IsSymbol() || String::cast(reference_name).length() > 0
2196           ? HeapGraphEdge::kProperty
2197           : HeapGraphEdge::kInternal;
2198   const char* name =
2199       name_format_string != nullptr && reference_name.IsString()
2200           ? names_->GetFormatted(
2201                 name_format_string,
2202                 String::cast(reference_name)
2203                     .ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)
2204                     .get())
2205           : names_->GetName(reference_name);
2206 
2207   parent_entry->SetNamedReference(type, name, child_entry, generator_);
2208   MarkVisitedField(field_offset);
2209 }
2210 
SetRootGcRootsReference()2211 void V8HeapExplorer::SetRootGcRootsReference() {
2212   snapshot_->root()->SetIndexedAutoIndexReference(
2213       HeapGraphEdge::kElement, snapshot_->gc_roots(), generator_);
2214 }
2215 
SetUserGlobalReference(Object child_obj)2216 void V8HeapExplorer::SetUserGlobalReference(Object child_obj) {
2217   HeapEntry* child_entry = GetEntry(child_obj);
2218   DCHECK_NOT_NULL(child_entry);
2219   snapshot_->root()->SetNamedAutoIndexReference(
2220       HeapGraphEdge::kShortcut, nullptr, child_entry, names_, generator_);
2221 }
2222 
SetGcRootsReference(Root root)2223 void V8HeapExplorer::SetGcRootsReference(Root root) {
2224   snapshot_->gc_roots()->SetIndexedAutoIndexReference(
2225       HeapGraphEdge::kElement, snapshot_->gc_subroot(root), generator_);
2226 }
2227 
SetGcSubrootReference(Root root,const char * description,bool is_weak,Object child_obj)2228 void V8HeapExplorer::SetGcSubrootReference(Root root, const char* description,
2229                                            bool is_weak, Object child_obj) {
2230   if (child_obj.IsSmi()) {
2231     // TODO(arenevier): if we handle smis here, the snapshot gets 2 to 3 times
2232     // slower on large heaps. According to perf, The bulk of the extra works
2233     // happens in TemplateHashMapImpl::Probe method, when tyring to get
2234     // names->GetFormatted("%d / %s", index, description)
2235     return;
2236   }
2237   HeapEntry* child_entry = GetEntry(child_obj);
2238   if (child_entry == nullptr) return;
2239   const char* name = GetStrongGcSubrootName(child_obj);
2240   HeapGraphEdge::Type edge_type =
2241       is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kInternal;
2242   if (name != nullptr) {
2243     snapshot_->gc_subroot(root)->SetNamedReference(edge_type, name, child_entry,
2244                                                    generator_);
2245   } else {
2246     snapshot_->gc_subroot(root)->SetNamedAutoIndexReference(
2247         edge_type, description, child_entry, names_, generator_);
2248   }
2249 
2250   // For full heap snapshots we do not emit user roots but rather rely on
2251   // regular GC roots to retain objects.
2252   if (!snapshot_->treat_global_objects_as_roots()) return;
2253 
2254   // Add a shortcut to JS global object reference at snapshot root.
2255   // That allows the user to easily find global objects. They are
2256   // also used as starting points in distance calculations.
2257   if (is_weak || !child_obj.IsNativeContext()) return;
2258 
2259   JSGlobalObject global = Context::cast(child_obj).global_object();
2260   if (!global.IsJSGlobalObject()) return;
2261 
2262   if (!user_roots_.insert(global).second) return;
2263 
2264   SetUserGlobalReference(global);
2265 }
2266 
GetStrongGcSubrootName(Object object)2267 const char* V8HeapExplorer::GetStrongGcSubrootName(Object object) {
2268   if (strong_gc_subroot_names_.empty()) {
2269     Isolate* isolate = Isolate::FromHeap(heap_);
2270     for (RootIndex root_index = RootIndex::kFirstStrongOrReadOnlyRoot;
2271          root_index <= RootIndex::kLastStrongOrReadOnlyRoot; ++root_index) {
2272       const char* name = RootsTable::name(root_index);
2273       strong_gc_subroot_names_.emplace(isolate->root(root_index), name);
2274     }
2275     CHECK(!strong_gc_subroot_names_.empty());
2276   }
2277   auto it = strong_gc_subroot_names_.find(object);
2278   return it != strong_gc_subroot_names_.end() ? it->second : nullptr;
2279 }
2280 
TagObject(Object obj,const char * tag,base::Optional<HeapEntry::Type> type)2281 void V8HeapExplorer::TagObject(Object obj, const char* tag,
2282                                base::Optional<HeapEntry::Type> type) {
2283   if (IsEssentialObject(obj)) {
2284     HeapEntry* entry = GetEntry(obj);
2285     if (entry->name()[0] == '\0') {
2286       entry->set_name(tag);
2287     }
2288     if (type.has_value()) {
2289       entry->set_type(*type);
2290     }
2291   }
2292 }
2293 
RecursivelyTagConstantPool(Object obj,const char * tag,HeapEntry::Type type,int recursion_limit)2294 void V8HeapExplorer::RecursivelyTagConstantPool(Object obj, const char* tag,
2295                                                 HeapEntry::Type type,
2296                                                 int recursion_limit) {
2297   --recursion_limit;
2298   if (obj.IsFixedArrayExact(isolate())) {
2299     FixedArray arr = FixedArray::cast(obj);
2300     TagObject(arr, tag, type);
2301     if (recursion_limit <= 0) return;
2302     for (int i = 0; i < arr.length(); ++i) {
2303       RecursivelyTagConstantPool(arr.get(i), tag, type, recursion_limit);
2304     }
2305   } else if (obj.IsNameDictionary(isolate()) ||
2306              obj.IsNumberDictionary(isolate())) {
2307     TagObject(obj, tag, type);
2308   }
2309 }
2310 
2311 class GlobalObjectsEnumerator : public RootVisitor {
2312  public:
GlobalObjectsEnumerator(Isolate * isolate)2313   explicit GlobalObjectsEnumerator(Isolate* isolate) : isolate_(isolate) {}
2314 
VisitRootPointers(Root root,const char * description,FullObjectSlot start,FullObjectSlot end)2315   void VisitRootPointers(Root root, const char* description,
2316                          FullObjectSlot start, FullObjectSlot end) override {
2317     VisitRootPointersImpl(root, description, start, end);
2318   }
2319 
VisitRootPointers(Root root,const char * description,OffHeapObjectSlot start,OffHeapObjectSlot end)2320   void VisitRootPointers(Root root, const char* description,
2321                          OffHeapObjectSlot start,
2322                          OffHeapObjectSlot end) override {
2323     VisitRootPointersImpl(root, description, start, end);
2324   }
2325 
count() const2326   int count() const { return static_cast<int>(objects_.size()); }
at(int i)2327   Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2328 
2329  private:
2330   template <typename TSlot>
VisitRootPointersImpl(Root root,const char * description,TSlot start,TSlot end)2331   void VisitRootPointersImpl(Root root, const char* description, TSlot start,
2332                              TSlot end) {
2333     for (TSlot p = start; p < end; ++p) {
2334       DCHECK(!MapWord::IsPacked(p.Relaxed_Load(isolate_).ptr()));
2335       Object o = p.load(isolate_);
2336       if (!o.IsNativeContext(isolate_)) continue;
2337       JSObject proxy = Context::cast(o).global_proxy();
2338       if (!proxy.IsJSGlobalProxy(isolate_)) continue;
2339       Object global = proxy.map(isolate_).prototype(isolate_);
2340       if (!global.IsJSGlobalObject(isolate_)) continue;
2341       objects_.push_back(handle(JSGlobalObject::cast(global), isolate_));
2342     }
2343   }
2344 
2345   Isolate* isolate_;
2346   std::vector<Handle<JSGlobalObject>> objects_;
2347 };
2348 
2349 
2350 // Modifies heap. Must not be run during heap traversal.
CollectGlobalObjectsTags()2351 void V8HeapExplorer::CollectGlobalObjectsTags() {
2352   if (!global_object_name_resolver_) return;
2353 
2354   Isolate* isolate = Isolate::FromHeap(heap_);
2355   GlobalObjectsEnumerator enumerator(isolate);
2356   isolate->global_handles()->IterateAllRoots(&enumerator);
2357   for (int i = 0, l = enumerator.count(); i < l; ++i) {
2358     Handle<JSGlobalObject> obj = enumerator.at(i);
2359     const char* tag = global_object_name_resolver_->GetName(
2360         Utils::ToLocal(Handle<JSObject>::cast(obj)));
2361     if (tag) {
2362       global_object_tag_pairs_.emplace_back(obj, tag);
2363     }
2364   }
2365 }
2366 
MakeGlobalObjectTagMap(const SafepointScope & safepoint_scope)2367 void V8HeapExplorer::MakeGlobalObjectTagMap(
2368     const SafepointScope& safepoint_scope) {
2369   for (const auto& pair : global_object_tag_pairs_) {
2370     global_object_tag_map_.emplace(*pair.first, pair.second);
2371   }
2372 }
2373 
2374 class EmbedderGraphImpl : public EmbedderGraph {
2375  public:
2376   struct Edge {
2377     Node* from;
2378     Node* to;
2379     const char* name;
2380   };
2381 
2382   class V8NodeImpl : public Node {
2383    public:
V8NodeImpl(Object object)2384     explicit V8NodeImpl(Object object) : object_(object) {}
GetObject()2385     Object GetObject() { return object_; }
2386 
2387     // Node overrides.
IsEmbedderNode()2388     bool IsEmbedderNode() override { return false; }
Name()2389     const char* Name() override {
2390       // The name should be retrieved via GetObject().
2391       UNREACHABLE();
2392     }
SizeInBytes()2393     size_t SizeInBytes() override {
2394       // The size should be retrieved via GetObject().
2395       UNREACHABLE();
2396     }
2397 
2398    private:
2399     Object object_;
2400   };
2401 
V8Node(const v8::Local<v8::Value> & value)2402   Node* V8Node(const v8::Local<v8::Value>& value) final {
2403     Handle<Object> object = v8::Utils::OpenHandle(*value);
2404     DCHECK(!object.is_null());
2405     return AddNode(std::unique_ptr<Node>(new V8NodeImpl(*object)));
2406   }
2407 
AddNode(std::unique_ptr<Node> node)2408   Node* AddNode(std::unique_ptr<Node> node) final {
2409     Node* result = node.get();
2410     nodes_.push_back(std::move(node));
2411     return result;
2412   }
2413 
AddEdge(Node * from,Node * to,const char * name)2414   void AddEdge(Node* from, Node* to, const char* name) final {
2415     edges_.push_back({from, to, name});
2416   }
2417 
nodes()2418   const std::vector<std::unique_ptr<Node>>& nodes() { return nodes_; }
edges()2419   const std::vector<Edge>& edges() { return edges_; }
2420 
2421  private:
2422   std::vector<std::unique_ptr<Node>> nodes_;
2423   std::vector<Edge> edges_;
2424 };
2425 
2426 class EmbedderGraphEntriesAllocator : public HeapEntriesAllocator {
2427  public:
EmbedderGraphEntriesAllocator(HeapSnapshot * snapshot)2428   explicit EmbedderGraphEntriesAllocator(HeapSnapshot* snapshot)
2429       : snapshot_(snapshot),
2430         names_(snapshot_->profiler()->names()),
2431         heap_object_map_(snapshot_->profiler()->heap_object_map()) {}
2432   HeapEntry* AllocateEntry(HeapThing ptr) override;
2433   HeapEntry* AllocateEntry(Smi smi) override;
2434 
2435  private:
2436   HeapSnapshot* snapshot_;
2437   StringsStorage* names_;
2438   HeapObjectsMap* heap_object_map_;
2439 };
2440 
2441 namespace {
2442 
EmbedderGraphNodeName(StringsStorage * names,EmbedderGraphImpl::Node * node)2443 const char* EmbedderGraphNodeName(StringsStorage* names,
2444                                   EmbedderGraphImpl::Node* node) {
2445   const char* prefix = node->NamePrefix();
2446   return prefix ? names->GetFormatted("%s %s", prefix, node->Name())
2447                 : names->GetCopy(node->Name());
2448 }
2449 
EmbedderGraphNodeType(EmbedderGraphImpl::Node * node)2450 HeapEntry::Type EmbedderGraphNodeType(EmbedderGraphImpl::Node* node) {
2451   return node->IsRootNode() ? HeapEntry::kSynthetic : HeapEntry::kNative;
2452 }
2453 
2454 // Merges the names of an embedder node and its wrapper node.
2455 // If the wrapper node name contains a tag suffix (part after '/') then the
2456 // result is the embedder node name concatenated with the tag suffix.
2457 // Otherwise, the result is the embedder node name.
MergeNames(StringsStorage * names,const char * embedder_name,const char * wrapper_name)2458 const char* MergeNames(StringsStorage* names, const char* embedder_name,
2459                        const char* wrapper_name) {
2460   const char* suffix = strchr(wrapper_name, '/');
2461   return suffix ? names->GetFormatted("%s %s", embedder_name, suffix)
2462                 : embedder_name;
2463 }
2464 
2465 }  // anonymous namespace
2466 
AllocateEntry(HeapThing ptr)2467 HeapEntry* EmbedderGraphEntriesAllocator::AllocateEntry(HeapThing ptr) {
2468   EmbedderGraphImpl::Node* node =
2469       reinterpret_cast<EmbedderGraphImpl::Node*>(ptr);
2470   DCHECK(node->IsEmbedderNode());
2471   size_t size = node->SizeInBytes();
2472   Address lookup_address = reinterpret_cast<Address>(node->GetNativeObject());
2473   SnapshotObjectId id =
2474       (lookup_address) ? heap_object_map_->FindOrAddEntry(lookup_address, 0)
2475                        : static_cast<SnapshotObjectId>(
2476                              reinterpret_cast<uintptr_t>(node) << 1);
2477   auto* heap_entry = snapshot_->AddEntry(EmbedderGraphNodeType(node),
2478                                          EmbedderGraphNodeName(names_, node),
2479                                          id, static_cast<int>(size), 0);
2480   heap_entry->set_detachedness(node->GetDetachedness());
2481   return heap_entry;
2482 }
2483 
AllocateEntry(Smi smi)2484 HeapEntry* EmbedderGraphEntriesAllocator::AllocateEntry(Smi smi) {
2485   DCHECK(false);
2486   return nullptr;
2487 }
2488 
NativeObjectsExplorer(HeapSnapshot * snapshot,SnapshottingProgressReportingInterface * progress)2489 NativeObjectsExplorer::NativeObjectsExplorer(
2490     HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
2491     : isolate_(
2492           Isolate::FromHeap(snapshot->profiler()->heap_object_map()->heap())),
2493       snapshot_(snapshot),
2494       names_(snapshot_->profiler()->names()),
2495       heap_object_map_(snapshot_->profiler()->heap_object_map()),
2496       embedder_graph_entries_allocator_(
2497           new EmbedderGraphEntriesAllocator(snapshot)) {}
2498 
MergeNodeIntoEntry(HeapEntry * entry,EmbedderGraph::Node * original_node,EmbedderGraph::Node * wrapper_node)2499 void NativeObjectsExplorer::MergeNodeIntoEntry(
2500     HeapEntry* entry, EmbedderGraph::Node* original_node,
2501     EmbedderGraph::Node* wrapper_node) {
2502   // The wrapper node may be an embedder node (for testing purposes) or a V8
2503   // node (production code).
2504   if (!wrapper_node->IsEmbedderNode()) {
2505     // For V8 nodes only we can add a lookup.
2506     EmbedderGraphImpl::V8NodeImpl* v8_node =
2507         static_cast<EmbedderGraphImpl::V8NodeImpl*>(wrapper_node);
2508     Object object = v8_node->GetObject();
2509     DCHECK(!object.IsSmi());
2510     if (original_node->GetNativeObject()) {
2511       HeapObject heap_object = HeapObject::cast(object);
2512       heap_object_map_->AddMergedNativeEntry(original_node->GetNativeObject(),
2513                                              heap_object.address());
2514       DCHECK_EQ(entry->id(), heap_object_map_->FindMergedNativeEntry(
2515                                  original_node->GetNativeObject()));
2516     }
2517   }
2518   entry->set_detachedness(original_node->GetDetachedness());
2519   entry->set_name(MergeNames(
2520       names_, EmbedderGraphNodeName(names_, original_node), entry->name()));
2521   entry->set_type(EmbedderGraphNodeType(original_node));
2522   DCHECK_GE(entry->self_size() + original_node->SizeInBytes(),
2523             entry->self_size());
2524   entry->add_self_size(original_node->SizeInBytes());
2525 }
2526 
EntryForEmbedderGraphNode(EmbedderGraphImpl::Node * node)2527 HeapEntry* NativeObjectsExplorer::EntryForEmbedderGraphNode(
2528     EmbedderGraphImpl::Node* node) {
2529   // Return the entry for the wrapper node if present.
2530   if (node->WrapperNode()) {
2531     node = node->WrapperNode();
2532   }
2533   // Node is EmbedderNode.
2534   if (node->IsEmbedderNode()) {
2535     return generator_->FindOrAddEntry(node,
2536                                       embedder_graph_entries_allocator_.get());
2537   }
2538   // Node is V8NodeImpl.
2539   Object object =
2540       static_cast<EmbedderGraphImpl::V8NodeImpl*>(node)->GetObject();
2541   if (object.IsSmi()) return nullptr;
2542   auto* entry = generator_->FindEntry(
2543       reinterpret_cast<void*>(Object::cast(object).ptr()));
2544   return entry;
2545 }
2546 
IterateAndExtractReferences(HeapSnapshotGenerator * generator)2547 bool NativeObjectsExplorer::IterateAndExtractReferences(
2548     HeapSnapshotGenerator* generator) {
2549   generator_ = generator;
2550 
2551   if (FLAG_heap_profiler_use_embedder_graph &&
2552       snapshot_->profiler()->HasBuildEmbedderGraphCallback()) {
2553     v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_));
2554     DisallowGarbageCollection no_gc;
2555     EmbedderGraphImpl graph;
2556     snapshot_->profiler()->BuildEmbedderGraph(isolate_, &graph);
2557     for (const auto& node : graph.nodes()) {
2558       // Only add embedder nodes as V8 nodes have been added already by the
2559       // V8HeapExplorer.
2560       if (!node->IsEmbedderNode()) continue;
2561 
2562       if (auto* entry = EntryForEmbedderGraphNode(node.get())) {
2563         if (node->IsRootNode()) {
2564           snapshot_->root()->SetIndexedAutoIndexReference(
2565               HeapGraphEdge::kElement, entry, generator_,
2566               HeapEntry::kOffHeapPointer);
2567         }
2568         if (node->WrapperNode()) {
2569           MergeNodeIntoEntry(entry, node.get(), node->WrapperNode());
2570         }
2571       }
2572     }
2573     // Fill edges of the graph.
2574     for (const auto& edge : graph.edges()) {
2575       // |from| and |to| can be nullptr if the corresponding node is a V8 node
2576       // pointing to a Smi.
2577       HeapEntry* from = EntryForEmbedderGraphNode(edge.from);
2578       if (!from) continue;
2579       HeapEntry* to = EntryForEmbedderGraphNode(edge.to);
2580       if (!to) continue;
2581       if (edge.name == nullptr) {
2582         from->SetIndexedAutoIndexReference(HeapGraphEdge::kElement, to,
2583                                            generator_,
2584                                            HeapEntry::kOffHeapPointer);
2585       } else {
2586         from->SetNamedReference(HeapGraphEdge::kInternal,
2587                                 names_->GetCopy(edge.name), to, generator_,
2588                                 HeapEntry::kOffHeapPointer);
2589       }
2590     }
2591   }
2592   generator_ = nullptr;
2593   return true;
2594 }
2595 
HeapSnapshotGenerator(HeapSnapshot * snapshot,v8::ActivityControl * control,v8::HeapProfiler::ObjectNameResolver * resolver,Heap * heap)2596 HeapSnapshotGenerator::HeapSnapshotGenerator(
2597     HeapSnapshot* snapshot, v8::ActivityControl* control,
2598     v8::HeapProfiler::ObjectNameResolver* resolver, Heap* heap)
2599     : snapshot_(snapshot),
2600       control_(control),
2601       v8_heap_explorer_(snapshot_, this, resolver),
2602       dom_explorer_(snapshot_, this),
2603       heap_(heap) {}
2604 
2605 namespace {
2606 class V8_NODISCARD NullContextForSnapshotScope {
2607  public:
NullContextForSnapshotScope(Isolate * isolate)2608   explicit NullContextForSnapshotScope(Isolate* isolate)
2609       : isolate_(isolate), prev_(isolate->context()) {
2610     isolate_->set_context(Context());
2611   }
~NullContextForSnapshotScope()2612   ~NullContextForSnapshotScope() { isolate_->set_context(prev_); }
2613 
2614  private:
2615   Isolate* isolate_;
2616   Context prev_;
2617 };
2618 }  // namespace
2619 
GenerateSnapshot()2620 bool HeapSnapshotGenerator::GenerateSnapshot() {
2621   Isolate* isolate = Isolate::FromHeap(heap_);
2622   base::Optional<HandleScope> handle_scope(base::in_place, isolate);
2623   v8_heap_explorer_.CollectGlobalObjectsTags();
2624 
2625   heap_->CollectAllAvailableGarbage(GarbageCollectionReason::kHeapProfiler);
2626 
2627   NullContextForSnapshotScope null_context_scope(isolate);
2628   SafepointScope scope(heap_);
2629   v8_heap_explorer_.MakeGlobalObjectTagMap(scope);
2630   handle_scope.reset();
2631 
2632 #ifdef VERIFY_HEAP
2633   Heap* debug_heap = heap_;
2634   if (FLAG_verify_heap) {
2635     debug_heap->Verify();
2636   }
2637 #endif
2638 
2639   InitProgressCounter();
2640 
2641 #ifdef VERIFY_HEAP
2642   if (FLAG_verify_heap) {
2643     debug_heap->Verify();
2644   }
2645 #endif
2646 
2647   snapshot_->AddSyntheticRootEntries();
2648 
2649   if (!FillReferences()) return false;
2650 
2651   snapshot_->FillChildren();
2652   snapshot_->RememberLastJSObjectId();
2653 
2654   progress_counter_ = progress_total_;
2655   if (!ProgressReport(true)) return false;
2656   return true;
2657 }
2658 
ProgressStep()2659 void HeapSnapshotGenerator::ProgressStep() {
2660   // Only increment the progress_counter_ until
2661   // equal to progress_total -1 == progress_counter.
2662   // This ensures that intermediate ProgressReport calls will never signal
2663   // that the work is finished (i.e. progress_counter_ == progress_total_).
2664   // Only the forced ProgressReport() at the end of GenerateSnapshot() should,
2665   // after setting progress_counter_ = progress_total_, signal that the
2666   // work is finished because signalling finished twice
2667   // breaks the DevTools frontend.
2668   if (control_ != nullptr && progress_total_ > progress_counter_ + 1) {
2669     ++progress_counter_;
2670   }
2671 }
2672 
ProgressReport(bool force)2673 bool HeapSnapshotGenerator::ProgressReport(bool force) {
2674   const int kProgressReportGranularity = 10000;
2675   if (control_ != nullptr &&
2676       (force || progress_counter_ % kProgressReportGranularity == 0)) {
2677     return control_->ReportProgressValue(progress_counter_, progress_total_) ==
2678            v8::ActivityControl::kContinue;
2679   }
2680   return true;
2681 }
2682 
InitProgressCounter()2683 void HeapSnapshotGenerator::InitProgressCounter() {
2684   if (control_ == nullptr) return;
2685   progress_total_ = v8_heap_explorer_.EstimateObjectsCount();
2686   progress_counter_ = 0;
2687 }
2688 
FillReferences()2689 bool HeapSnapshotGenerator::FillReferences() {
2690   return v8_heap_explorer_.IterateAndExtractReferences(this) &&
2691          dom_explorer_.IterateAndExtractReferences(this);
2692 }
2693 
2694 // type, name|index, to_node.
2695 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2696 // type, name, id, self_size, edge_count, trace_node_id, detachedness.
2697 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 7;
2698 
Serialize(v8::OutputStream * stream)2699 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2700   if (AllocationTracker* allocation_tracker =
2701       snapshot_->profiler()->allocation_tracker()) {
2702     allocation_tracker->PrepareForSerialization();
2703   }
2704   DCHECK_NULL(writer_);
2705   writer_ = new OutputStreamWriter(stream);
2706   SerializeImpl();
2707   delete writer_;
2708   writer_ = nullptr;
2709 }
2710 
2711 
SerializeImpl()2712 void HeapSnapshotJSONSerializer::SerializeImpl() {
2713   DCHECK_EQ(0, snapshot_->root()->index());
2714   writer_->AddCharacter('{');
2715   writer_->AddString("\"snapshot\":{");
2716   SerializeSnapshot();
2717   if (writer_->aborted()) return;
2718   writer_->AddString("},\n");
2719   writer_->AddString("\"nodes\":[");
2720   SerializeNodes();
2721   if (writer_->aborted()) return;
2722   writer_->AddString("],\n");
2723   writer_->AddString("\"edges\":[");
2724   SerializeEdges();
2725   if (writer_->aborted()) return;
2726   writer_->AddString("],\n");
2727 
2728   writer_->AddString("\"trace_function_infos\":[");
2729   SerializeTraceNodeInfos();
2730   if (writer_->aborted()) return;
2731   writer_->AddString("],\n");
2732   writer_->AddString("\"trace_tree\":[");
2733   SerializeTraceTree();
2734   if (writer_->aborted()) return;
2735   writer_->AddString("],\n");
2736 
2737   writer_->AddString("\"samples\":[");
2738   SerializeSamples();
2739   if (writer_->aborted()) return;
2740   writer_->AddString("],\n");
2741 
2742   writer_->AddString("\"locations\":[");
2743   SerializeLocations();
2744   if (writer_->aborted()) return;
2745   writer_->AddString("],\n");
2746 
2747   writer_->AddString("\"strings\":[");
2748   SerializeStrings();
2749   if (writer_->aborted()) return;
2750   writer_->AddCharacter(']');
2751   writer_->AddCharacter('}');
2752   writer_->Finalize();
2753 }
2754 
2755 
GetStringId(const char * s)2756 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2757   base::HashMap::Entry* cache_entry =
2758       strings_.LookupOrInsert(const_cast<char*>(s), StringHash(s));
2759   if (cache_entry->value == nullptr) {
2760     cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2761   }
2762   return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2763 }
2764 
2765 
2766 namespace {
2767 
2768 template<size_t size> struct ToUnsigned;
2769 
2770 template <>
2771 struct ToUnsigned<1> {
2772   using Type = uint8_t;
2773 };
2774 
2775 template<> struct ToUnsigned<4> {
2776   using Type = uint32_t;
2777 };
2778 
2779 template<> struct ToUnsigned<8> {
2780   using Type = uint64_t;
2781 };
2782 
2783 }  // namespace
2784 
2785 template <typename T>
utoa_impl(T value,const base::Vector<char> & buffer,int buffer_pos)2786 static int utoa_impl(T value, const base::Vector<char>& buffer,
2787                      int buffer_pos) {
2788   STATIC_ASSERT(static_cast<T>(-1) > 0);  // Check that T is unsigned
2789   int number_of_digits = 0;
2790   T t = value;
2791   do {
2792     ++number_of_digits;
2793   } while (t /= 10);
2794 
2795   buffer_pos += number_of_digits;
2796   int result = buffer_pos;
2797   do {
2798     int last_digit = static_cast<int>(value % 10);
2799     buffer[--buffer_pos] = '0' + last_digit;
2800     value /= 10;
2801   } while (value);
2802   return result;
2803 }
2804 
2805 template <typename T>
utoa(T value,const base::Vector<char> & buffer,int buffer_pos)2806 static int utoa(T value, const base::Vector<char>& buffer, int buffer_pos) {
2807   typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2808   STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value));
2809   return utoa_impl(unsigned_value, buffer, buffer_pos);
2810 }
2811 
SerializeEdge(HeapGraphEdge * edge,bool first_edge)2812 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2813                                                bool first_edge) {
2814   // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2815   static const int kBufferSize =
2816       MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2;
2817   base::EmbeddedVector<char, kBufferSize> buffer;
2818   int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2819       || edge->type() == HeapGraphEdge::kHidden
2820       ? edge->index() : GetStringId(edge->name());
2821   int buffer_pos = 0;
2822   if (!first_edge) {
2823     buffer[buffer_pos++] = ',';
2824   }
2825   buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2826   buffer[buffer_pos++] = ',';
2827   buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2828   buffer[buffer_pos++] = ',';
2829   buffer_pos = utoa(to_node_index(edge->to()), buffer, buffer_pos);
2830   buffer[buffer_pos++] = '\n';
2831   buffer[buffer_pos++] = '\0';
2832   writer_->AddString(buffer.begin());
2833 }
2834 
SerializeEdges()2835 void HeapSnapshotJSONSerializer::SerializeEdges() {
2836   std::vector<HeapGraphEdge*>& edges = snapshot_->children();
2837   for (size_t i = 0; i < edges.size(); ++i) {
2838     DCHECK(i == 0 ||
2839            edges[i - 1]->from()->index() <= edges[i]->from()->index());
2840     SerializeEdge(edges[i], i == 0);
2841     if (writer_->aborted()) return;
2842   }
2843 }
2844 
SerializeNode(const HeapEntry * entry)2845 void HeapSnapshotJSONSerializer::SerializeNode(const HeapEntry* entry) {
2846   // The buffer needs space for 5 unsigned ints, 1 size_t, 1 uint8_t, 7 commas,
2847   // \n and \0
2848   static const int kBufferSize =
2849       5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned +
2850       MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned +
2851       MaxDecimalDigitsIn<sizeof(uint8_t)>::kUnsigned + 7 + 1 + 1;
2852   base::EmbeddedVector<char, kBufferSize> buffer;
2853   int buffer_pos = 0;
2854   if (to_node_index(entry) != 0) {
2855     buffer[buffer_pos++] = ',';
2856   }
2857   buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2858   buffer[buffer_pos++] = ',';
2859   buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2860   buffer[buffer_pos++] = ',';
2861   buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2862   buffer[buffer_pos++] = ',';
2863   buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2864   buffer[buffer_pos++] = ',';
2865   buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2866   buffer[buffer_pos++] = ',';
2867   buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2868   buffer[buffer_pos++] = ',';
2869   buffer_pos = utoa(entry->detachedness(), buffer, buffer_pos);
2870   buffer[buffer_pos++] = '\n';
2871   buffer[buffer_pos++] = '\0';
2872   writer_->AddString(buffer.begin());
2873 }
2874 
SerializeNodes()2875 void HeapSnapshotJSONSerializer::SerializeNodes() {
2876   const std::deque<HeapEntry>& entries = snapshot_->entries();
2877   for (const HeapEntry& entry : entries) {
2878     SerializeNode(&entry);
2879     if (writer_->aborted()) return;
2880   }
2881 }
2882 
SerializeSnapshot()2883 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2884   writer_->AddString("\"meta\":");
2885   // The object describing node serialization layout.
2886   // We use a set of macros to improve readability.
2887 
2888 // clang-format off
2889 #define JSON_A(s) "[" s "]"
2890 #define JSON_O(s) "{" s "}"
2891 #define JSON_S(s) "\"" s "\""
2892   writer_->AddString(JSON_O(
2893     JSON_S("node_fields") ":" JSON_A(
2894         JSON_S("type") ","
2895         JSON_S("name") ","
2896         JSON_S("id") ","
2897         JSON_S("self_size") ","
2898         JSON_S("edge_count") ","
2899         JSON_S("trace_node_id") ","
2900         JSON_S("detachedness")) ","
2901     JSON_S("node_types") ":" JSON_A(
2902         JSON_A(
2903             JSON_S("hidden") ","
2904             JSON_S("array") ","
2905             JSON_S("string") ","
2906             JSON_S("object") ","
2907             JSON_S("code") ","
2908             JSON_S("closure") ","
2909             JSON_S("regexp") ","
2910             JSON_S("number") ","
2911             JSON_S("native") ","
2912             JSON_S("synthetic") ","
2913             JSON_S("concatenated string") ","
2914             JSON_S("sliced string") ","
2915             JSON_S("symbol") ","
2916             JSON_S("bigint")) ","
2917         JSON_S("string") ","
2918         JSON_S("number") ","
2919         JSON_S("number") ","
2920         JSON_S("number") ","
2921         JSON_S("number") ","
2922         JSON_S("number")) ","
2923     JSON_S("edge_fields") ":" JSON_A(
2924         JSON_S("type") ","
2925         JSON_S("name_or_index") ","
2926         JSON_S("to_node")) ","
2927     JSON_S("edge_types") ":" JSON_A(
2928         JSON_A(
2929             JSON_S("context") ","
2930             JSON_S("element") ","
2931             JSON_S("property") ","
2932             JSON_S("internal") ","
2933             JSON_S("hidden") ","
2934             JSON_S("shortcut") ","
2935             JSON_S("weak")) ","
2936         JSON_S("string_or_number") ","
2937         JSON_S("node")) ","
2938     JSON_S("trace_function_info_fields") ":" JSON_A(
2939         JSON_S("function_id") ","
2940         JSON_S("name") ","
2941         JSON_S("script_name") ","
2942         JSON_S("script_id") ","
2943         JSON_S("line") ","
2944         JSON_S("column")) ","
2945     JSON_S("trace_node_fields") ":" JSON_A(
2946         JSON_S("id") ","
2947         JSON_S("function_info_index") ","
2948         JSON_S("count") ","
2949         JSON_S("size") ","
2950         JSON_S("children")) ","
2951     JSON_S("sample_fields") ":" JSON_A(
2952         JSON_S("timestamp_us") ","
2953         JSON_S("last_assigned_id")) ","
2954     JSON_S("location_fields") ":" JSON_A(
2955         JSON_S("object_index") ","
2956         JSON_S("script_id") ","
2957         JSON_S("line") ","
2958         JSON_S("column"))));
2959 // clang-format on
2960 #undef JSON_S
2961 #undef JSON_O
2962 #undef JSON_A
2963   writer_->AddString(",\"node_count\":");
2964   writer_->AddNumber(static_cast<unsigned>(snapshot_->entries().size()));
2965   writer_->AddString(",\"edge_count\":");
2966   writer_->AddNumber(static_cast<double>(snapshot_->edges().size()));
2967   writer_->AddString(",\"trace_function_count\":");
2968   uint32_t count = 0;
2969   AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2970   if (tracker) {
2971     count = static_cast<uint32_t>(tracker->function_info_list().size());
2972   }
2973   writer_->AddNumber(count);
2974 }
2975 
2976 
WriteUChar(OutputStreamWriter * w,unibrow::uchar u)2977 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2978   static const char hex_chars[] = "0123456789ABCDEF";
2979   w->AddString("\\u");
2980   w->AddCharacter(hex_chars[(u >> 12) & 0xF]);
2981   w->AddCharacter(hex_chars[(u >> 8) & 0xF]);
2982   w->AddCharacter(hex_chars[(u >> 4) & 0xF]);
2983   w->AddCharacter(hex_chars[u & 0xF]);
2984 }
2985 
2986 
SerializeTraceTree()2987 void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2988   AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2989   if (!tracker) return;
2990   AllocationTraceTree* traces = tracker->trace_tree();
2991   SerializeTraceNode(traces->root());
2992 }
2993 
2994 
SerializeTraceNode(AllocationTraceNode * node)2995 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2996   // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2997   const int kBufferSize =
2998       4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned + 4 + 1 + 1;
2999   base::EmbeddedVector<char, kBufferSize> buffer;
3000   int buffer_pos = 0;
3001   buffer_pos = utoa(node->id(), buffer, buffer_pos);
3002   buffer[buffer_pos++] = ',';
3003   buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
3004   buffer[buffer_pos++] = ',';
3005   buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
3006   buffer[buffer_pos++] = ',';
3007   buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
3008   buffer[buffer_pos++] = ',';
3009   buffer[buffer_pos++] = '[';
3010   buffer[buffer_pos++] = '\0';
3011   writer_->AddString(buffer.begin());
3012 
3013   int i = 0;
3014   for (AllocationTraceNode* child : node->children()) {
3015     if (i++ > 0) {
3016       writer_->AddCharacter(',');
3017     }
3018     SerializeTraceNode(child);
3019   }
3020   writer_->AddCharacter(']');
3021 }
3022 
3023 
3024 // 0-based position is converted to 1-based during the serialization.
SerializePosition(int position,const base::Vector<char> & buffer,int buffer_pos)3025 static int SerializePosition(int position, const base::Vector<char>& buffer,
3026                              int buffer_pos) {
3027   if (position == -1) {
3028     buffer[buffer_pos++] = '0';
3029   } else {
3030     DCHECK_GE(position, 0);
3031     buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
3032   }
3033   return buffer_pos;
3034 }
3035 
SerializeTraceNodeInfos()3036 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
3037   AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
3038   if (!tracker) return;
3039   // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
3040   const int kBufferSize =
3041       6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned + 6 + 1 + 1;
3042   base::EmbeddedVector<char, kBufferSize> buffer;
3043   int i = 0;
3044   for (AllocationTracker::FunctionInfo* info : tracker->function_info_list()) {
3045     int buffer_pos = 0;
3046     if (i++ > 0) {
3047       buffer[buffer_pos++] = ',';
3048     }
3049     buffer_pos = utoa(info->function_id, buffer, buffer_pos);
3050     buffer[buffer_pos++] = ',';
3051     buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
3052     buffer[buffer_pos++] = ',';
3053     buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3054     buffer[buffer_pos++] = ',';
3055     // The cast is safe because script id is a non-negative Smi.
3056     buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3057         buffer_pos);
3058     buffer[buffer_pos++] = ',';
3059     buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3060     buffer[buffer_pos++] = ',';
3061     buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3062     buffer[buffer_pos++] = '\n';
3063     buffer[buffer_pos++] = '\0';
3064     writer_->AddString(buffer.begin());
3065   }
3066 }
3067 
3068 
SerializeSamples()3069 void HeapSnapshotJSONSerializer::SerializeSamples() {
3070   const std::vector<HeapObjectsMap::TimeInterval>& samples =
3071       snapshot_->profiler()->heap_object_map()->samples();
3072   if (samples.empty()) return;
3073   base::TimeTicks start_time = samples[0].timestamp;
3074   // The buffer needs space for 2 unsigned ints, 2 commas, \n and \0
3075   const int kBufferSize = MaxDecimalDigitsIn<sizeof(
3076                               base::TimeDelta().InMicroseconds())>::kUnsigned +
3077                           MaxDecimalDigitsIn<sizeof(samples[0].id)>::kUnsigned +
3078                           2 + 1 + 1;
3079   base::EmbeddedVector<char, kBufferSize> buffer;
3080   int i = 0;
3081   for (const HeapObjectsMap::TimeInterval& sample : samples) {
3082     int buffer_pos = 0;
3083     if (i++ > 0) {
3084       buffer[buffer_pos++] = ',';
3085     }
3086     base::TimeDelta time_delta = sample.timestamp - start_time;
3087     buffer_pos = utoa(time_delta.InMicroseconds(), buffer, buffer_pos);
3088     buffer[buffer_pos++] = ',';
3089     buffer_pos = utoa(sample.last_assigned_id(), buffer, buffer_pos);
3090     buffer[buffer_pos++] = '\n';
3091     buffer[buffer_pos++] = '\0';
3092     writer_->AddString(buffer.begin());
3093   }
3094 }
3095 
3096 
SerializeString(const unsigned char * s)3097 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3098   writer_->AddCharacter('\n');
3099   writer_->AddCharacter('\"');
3100   for ( ; *s != '\0'; ++s) {
3101     switch (*s) {
3102       case '\b':
3103         writer_->AddString("\\b");
3104         continue;
3105       case '\f':
3106         writer_->AddString("\\f");
3107         continue;
3108       case '\n':
3109         writer_->AddString("\\n");
3110         continue;
3111       case '\r':
3112         writer_->AddString("\\r");
3113         continue;
3114       case '\t':
3115         writer_->AddString("\\t");
3116         continue;
3117       case '\"':
3118       case '\\':
3119         writer_->AddCharacter('\\');
3120         writer_->AddCharacter(*s);
3121         continue;
3122       default:
3123         if (*s > 31 && *s < 128) {
3124           writer_->AddCharacter(*s);
3125         } else if (*s <= 31) {
3126           // Special character with no dedicated literal.
3127           WriteUChar(writer_, *s);
3128         } else {
3129           // Convert UTF-8 into \u UTF-16 literal.
3130           size_t length = 1, cursor = 0;
3131           for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3132           unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3133           if (c != unibrow::Utf8::kBadChar) {
3134             WriteUChar(writer_, c);
3135             DCHECK_NE(cursor, 0);
3136             s += cursor - 1;
3137           } else {
3138             writer_->AddCharacter('?');
3139           }
3140         }
3141     }
3142   }
3143   writer_->AddCharacter('\"');
3144 }
3145 
3146 
SerializeStrings()3147 void HeapSnapshotJSONSerializer::SerializeStrings() {
3148   base::ScopedVector<const unsigned char*> sorted_strings(strings_.occupancy() +
3149                                                           1);
3150   for (base::HashMap::Entry* entry = strings_.Start(); entry != nullptr;
3151        entry = strings_.Next(entry)) {
3152     int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3153     sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3154   }
3155   writer_->AddString("\"<dummy>\"");
3156   for (int i = 1; i < sorted_strings.length(); ++i) {
3157     writer_->AddCharacter(',');
3158     SerializeString(sorted_strings[i]);
3159     if (writer_->aborted()) return;
3160   }
3161 }
3162 
SerializeLocation(const SourceLocation & location)3163 void HeapSnapshotJSONSerializer::SerializeLocation(
3164     const SourceLocation& location) {
3165   // The buffer needs space for 4 unsigned ints, 3 commas, \n and \0
3166   static const int kBufferSize =
3167       MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 4 + 3 + 2;
3168   base::EmbeddedVector<char, kBufferSize> buffer;
3169   int buffer_pos = 0;
3170   buffer_pos = utoa(to_node_index(location.entry_index), buffer, buffer_pos);
3171   buffer[buffer_pos++] = ',';
3172   buffer_pos = utoa(location.scriptId, buffer, buffer_pos);
3173   buffer[buffer_pos++] = ',';
3174   buffer_pos = utoa(location.line, buffer, buffer_pos);
3175   buffer[buffer_pos++] = ',';
3176   buffer_pos = utoa(location.col, buffer, buffer_pos);
3177   buffer[buffer_pos++] = '\n';
3178   buffer[buffer_pos++] = '\0';
3179   writer_->AddString(buffer.begin());
3180 }
3181 
SerializeLocations()3182 void HeapSnapshotJSONSerializer::SerializeLocations() {
3183   const std::vector<SourceLocation>& locations = snapshot_->locations();
3184   for (size_t i = 0; i < locations.size(); i++) {
3185     if (i > 0) writer_->AddCharacter(',');
3186     SerializeLocation(locations[i]);
3187     if (writer_->aborted()) return;
3188   }
3189 }
3190 
3191 }  // namespace internal
3192 }  // namespace v8
3193