1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/profiler/heap-snapshot-generator.h"
6
7 #include <utility>
8
9 #include "src/api.h"
10 #include "src/code-stubs.h"
11 #include "src/conversions.h"
12 #include "src/debug/debug.h"
13 #include "src/layout-descriptor.h"
14 #include "src/objects-body-descriptors.h"
15 #include "src/objects-inl.h"
16 #include "src/profiler/allocation-tracker.h"
17 #include "src/profiler/heap-profiler.h"
18 #include "src/profiler/heap-snapshot-generator-inl.h"
19 #include "src/prototype.h"
20 #include "src/transitions.h"
21
22 namespace v8 {
23 namespace internal {
24
25
HeapGraphEdge(Type type,const char * name,int from,int to)26 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
27 : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
28 to_index_(to),
29 name_(name) {
30 DCHECK(type == kContextVariable
31 || type == kProperty
32 || type == kInternal
33 || type == kShortcut
34 || type == kWeak);
35 }
36
37
HeapGraphEdge(Type type,int index,int from,int to)38 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
39 : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
40 to_index_(to),
41 index_(index) {
42 DCHECK(type == kElement || type == kHidden);
43 }
44
45
ReplaceToIndexWithEntry(HeapSnapshot * snapshot)46 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
47 to_entry_ = &snapshot->entries()[to_index_];
48 }
49
50
51 const int HeapEntry::kNoEntry = -1;
52
HeapEntry(HeapSnapshot * snapshot,Type type,const char * name,SnapshotObjectId id,size_t self_size,unsigned trace_node_id)53 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
54 Type type,
55 const char* name,
56 SnapshotObjectId id,
57 size_t self_size,
58 unsigned trace_node_id)
59 : type_(type),
60 children_count_(0),
61 children_index_(-1),
62 self_size_(self_size),
63 snapshot_(snapshot),
64 name_(name),
65 id_(id),
66 trace_node_id_(trace_node_id) { }
67
68
SetNamedReference(HeapGraphEdge::Type type,const char * name,HeapEntry * entry)69 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
70 const char* name,
71 HeapEntry* entry) {
72 HeapGraphEdge edge(type, name, this->index(), entry->index());
73 snapshot_->edges().push_back(edge);
74 ++children_count_;
75 }
76
77
SetIndexedReference(HeapGraphEdge::Type type,int index,HeapEntry * entry)78 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
79 int index,
80 HeapEntry* entry) {
81 HeapGraphEdge edge(type, index, this->index(), entry->index());
82 snapshot_->edges().push_back(edge);
83 ++children_count_;
84 }
85
86
Print(const char * prefix,const char * edge_name,int max_depth,int indent)87 void HeapEntry::Print(
88 const char* prefix, const char* edge_name, int max_depth, int indent) {
89 STATIC_ASSERT(sizeof(unsigned) == sizeof(id()));
90 base::OS::Print("%6" PRIuS " @%6u %*c %s%s: ", self_size(), id(), indent, ' ',
91 prefix, edge_name);
92 if (type() != kString) {
93 base::OS::Print("%s %.40s\n", TypeAsString(), name_);
94 } else {
95 base::OS::Print("\"");
96 const char* c = name_;
97 while (*c && (c - name_) <= 40) {
98 if (*c != '\n')
99 base::OS::Print("%c", *c);
100 else
101 base::OS::Print("\\n");
102 ++c;
103 }
104 base::OS::Print("\"\n");
105 }
106 if (--max_depth == 0) return;
107 for (auto i = children_begin(); i != children_end(); ++i) {
108 HeapGraphEdge& edge = **i;
109 const char* edge_prefix = "";
110 EmbeddedVector<char, 64> index;
111 const char* edge_name = index.start();
112 switch (edge.type()) {
113 case HeapGraphEdge::kContextVariable:
114 edge_prefix = "#";
115 edge_name = edge.name();
116 break;
117 case HeapGraphEdge::kElement:
118 SNPrintF(index, "%d", edge.index());
119 break;
120 case HeapGraphEdge::kInternal:
121 edge_prefix = "$";
122 edge_name = edge.name();
123 break;
124 case HeapGraphEdge::kProperty:
125 edge_name = edge.name();
126 break;
127 case HeapGraphEdge::kHidden:
128 edge_prefix = "$";
129 SNPrintF(index, "%d", edge.index());
130 break;
131 case HeapGraphEdge::kShortcut:
132 edge_prefix = "^";
133 edge_name = edge.name();
134 break;
135 case HeapGraphEdge::kWeak:
136 edge_prefix = "w";
137 edge_name = edge.name();
138 break;
139 default:
140 SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
141 }
142 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
143 }
144 }
145
146
TypeAsString()147 const char* HeapEntry::TypeAsString() {
148 switch (type()) {
149 case kHidden: return "/hidden/";
150 case kObject: return "/object/";
151 case kClosure: return "/closure/";
152 case kString: return "/string/";
153 case kCode: return "/code/";
154 case kArray: return "/array/";
155 case kRegExp: return "/regexp/";
156 case kHeapNumber: return "/number/";
157 case kNative: return "/native/";
158 case kSynthetic: return "/synthetic/";
159 case kConsString: return "/concatenated string/";
160 case kSlicedString: return "/sliced string/";
161 case kSymbol: return "/symbol/";
162 default: return "???";
163 }
164 }
165
166
167 // It is very important to keep objects that form a heap snapshot
168 // as small as possible.
169 namespace { // Avoid littering the global namespace.
170
171 template <size_t ptr_size> struct SnapshotSizeConstants;
172
173 template <> struct SnapshotSizeConstants<4> {
174 static const int kExpectedHeapGraphEdgeSize = 12;
175 static const int kExpectedHeapEntrySize = 28;
176 };
177
178 template <> struct SnapshotSizeConstants<8> {
179 static const int kExpectedHeapGraphEdgeSize = 24;
180 static const int kExpectedHeapEntrySize = 40;
181 };
182
183 } // namespace
184
185
HeapSnapshot(HeapProfiler * profiler)186 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler)
187 : profiler_(profiler),
188 root_index_(HeapEntry::kNoEntry),
189 gc_roots_index_(HeapEntry::kNoEntry),
190 max_snapshot_js_object_id_(0) {
191 STATIC_ASSERT(
192 sizeof(HeapGraphEdge) ==
193 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
194 STATIC_ASSERT(
195 sizeof(HeapEntry) ==
196 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
197 USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
198 USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
199 USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
200 USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
201 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
202 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
203 }
204 }
205
206
Delete()207 void HeapSnapshot::Delete() {
208 profiler_->RemoveSnapshot(this);
209 delete this;
210 }
211
212
RememberLastJSObjectId()213 void HeapSnapshot::RememberLastJSObjectId() {
214 max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
215 }
216
217
AddSyntheticRootEntries()218 void HeapSnapshot::AddSyntheticRootEntries() {
219 AddRootEntry();
220 AddGcRootsEntry();
221 SnapshotObjectId id = HeapObjectsMap::kGcRootsFirstSubrootId;
222 for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
223 AddGcSubrootEntry(tag, id);
224 id += HeapObjectsMap::kObjectIdStep;
225 }
226 DCHECK(HeapObjectsMap::kFirstAvailableObjectId == id);
227 }
228
229
AddRootEntry()230 HeapEntry* HeapSnapshot::AddRootEntry() {
231 DCHECK(root_index_ == HeapEntry::kNoEntry);
232 DCHECK(entries_.is_empty()); // Root entry must be the first one.
233 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
234 "",
235 HeapObjectsMap::kInternalRootObjectId,
236 0,
237 0);
238 root_index_ = entry->index();
239 DCHECK(root_index_ == 0);
240 return entry;
241 }
242
243
AddGcRootsEntry()244 HeapEntry* HeapSnapshot::AddGcRootsEntry() {
245 DCHECK(gc_roots_index_ == HeapEntry::kNoEntry);
246 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
247 "(GC roots)",
248 HeapObjectsMap::kGcRootsObjectId,
249 0,
250 0);
251 gc_roots_index_ = entry->index();
252 return entry;
253 }
254
255
AddGcSubrootEntry(int tag,SnapshotObjectId id)256 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag, SnapshotObjectId id) {
257 DCHECK(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
258 DCHECK(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
259 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
260 VisitorSynchronization::kTagNames[tag], id, 0, 0);
261 gc_subroot_indexes_[tag] = entry->index();
262 return entry;
263 }
264
265
AddEntry(HeapEntry::Type type,const char * name,SnapshotObjectId id,size_t size,unsigned trace_node_id)266 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
267 const char* name,
268 SnapshotObjectId id,
269 size_t size,
270 unsigned trace_node_id) {
271 HeapEntry entry(this, type, name, id, size, trace_node_id);
272 entries_.Add(entry);
273 return &entries_.last();
274 }
275
276
FillChildren()277 void HeapSnapshot::FillChildren() {
278 DCHECK(children().empty());
279 children().resize(edges().size());
280 int children_index = 0;
281 for (int i = 0; i < entries().length(); ++i) {
282 HeapEntry* entry = &entries()[i];
283 children_index = entry->set_children_index(children_index);
284 }
285 DCHECK_EQ(edges().size(), static_cast<size_t>(children_index));
286 for (size_t i = 0; i < edges().size(); ++i) {
287 HeapGraphEdge* edge = &edges()[i];
288 edge->ReplaceToIndexWithEntry(this);
289 edge->from()->add_child(edge);
290 }
291 }
292
293
294 class FindEntryById {
295 public:
FindEntryById(SnapshotObjectId id)296 explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
operator ()(HeapEntry * const * entry)297 int operator()(HeapEntry* const* entry) {
298 if ((*entry)->id() == id_) return 0;
299 return (*entry)->id() < id_ ? -1 : 1;
300 }
301 private:
302 SnapshotObjectId id_;
303 };
304
305
GetEntryById(SnapshotObjectId id)306 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
307 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
308 // Perform a binary search by id.
309 int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
310 if (index == -1)
311 return NULL;
312 return entries_by_id->at(index);
313 }
314
315
316 template<class T>
SortByIds(const T * entry1_ptr,const T * entry2_ptr)317 static int SortByIds(const T* entry1_ptr,
318 const T* entry2_ptr) {
319 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
320 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
321 }
322
323
GetSortedEntriesList()324 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
325 if (sorted_entries_.is_empty()) {
326 sorted_entries_.Allocate(entries_.length());
327 for (int i = 0; i < entries_.length(); ++i) {
328 sorted_entries_[i] = &entries_[i];
329 }
330 sorted_entries_.Sort<int (*)(HeapEntry* const*, HeapEntry* const*)>(
331 SortByIds);
332 }
333 return &sorted_entries_;
334 }
335
336
Print(int max_depth)337 void HeapSnapshot::Print(int max_depth) {
338 root()->Print("", "", max_depth, 0);
339 }
340
341
RawSnapshotSize() const342 size_t HeapSnapshot::RawSnapshotSize() const {
343 return sizeof(*this) + GetMemoryUsedByList(entries_) +
344 edges_.size() * sizeof(decltype(edges_)::value_type) +
345 children_.size() * sizeof(decltype(children_)::value_type) +
346 GetMemoryUsedByList(sorted_entries_);
347 }
348
349
350 // We split IDs on evens for embedder objects (see
351 // HeapObjectsMap::GenerateId) and odds for native objects.
352 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
353 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
354 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
355 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
356 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
357 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
358 HeapObjectsMap::kGcRootsFirstSubrootId +
359 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
360
HeapObjectsMap(Heap * heap)361 HeapObjectsMap::HeapObjectsMap(Heap* heap)
362 : next_id_(kFirstAvailableObjectId), heap_(heap) {
363 // This dummy element solves a problem with entries_map_.
364 // When we do lookup in HashMap we see no difference between two cases:
365 // it has an entry with NULL as the value or it has created
366 // a new entry on the fly with NULL as the default value.
367 // With such dummy element we have a guaranty that all entries_map_ entries
368 // will have the value field grater than 0.
369 // This fact is using in MoveObject method.
370 entries_.Add(EntryInfo(0, NULL, 0));
371 }
372
373
MoveObject(Address from,Address to,int object_size)374 bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
375 DCHECK(to != NULL);
376 DCHECK(from != NULL);
377 if (from == to) return false;
378 void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
379 if (from_value == NULL) {
380 // It may occur that some untracked object moves to an address X and there
381 // is a tracked object at that address. In this case we should remove the
382 // entry as we know that the object has died.
383 void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
384 if (to_value != NULL) {
385 int to_entry_info_index =
386 static_cast<int>(reinterpret_cast<intptr_t>(to_value));
387 entries_.at(to_entry_info_index).addr = NULL;
388 }
389 } else {
390 base::HashMap::Entry* to_entry =
391 entries_map_.LookupOrInsert(to, ComputePointerHash(to));
392 if (to_entry->value != NULL) {
393 // We found the existing entry with to address for an old object.
394 // Without this operation we will have two EntryInfo's with the same
395 // value in addr field. It is bad because later at RemoveDeadEntries
396 // one of this entry will be removed with the corresponding entries_map_
397 // entry.
398 int to_entry_info_index =
399 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
400 entries_.at(to_entry_info_index).addr = NULL;
401 }
402 int from_entry_info_index =
403 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
404 entries_.at(from_entry_info_index).addr = to;
405 // Size of an object can change during its life, so to keep information
406 // about the object in entries_ consistent, we have to adjust size when the
407 // object is migrated.
408 if (FLAG_heap_profiler_trace_objects) {
409 PrintF("Move object from %p to %p old size %6d new size %6d\n",
410 static_cast<void*>(from), static_cast<void*>(to),
411 entries_.at(from_entry_info_index).size, object_size);
412 }
413 entries_.at(from_entry_info_index).size = object_size;
414 to_entry->value = from_value;
415 }
416 return from_value != NULL;
417 }
418
419
UpdateObjectSize(Address addr,int size)420 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
421 FindOrAddEntry(addr, size, false);
422 }
423
424
FindEntry(Address addr)425 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
426 base::HashMap::Entry* entry =
427 entries_map_.Lookup(addr, ComputePointerHash(addr));
428 if (entry == NULL) return 0;
429 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
430 EntryInfo& entry_info = entries_.at(entry_index);
431 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
432 return entry_info.id;
433 }
434
435
FindOrAddEntry(Address addr,unsigned int size,bool accessed)436 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
437 unsigned int size,
438 bool accessed) {
439 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
440 base::HashMap::Entry* entry =
441 entries_map_.LookupOrInsert(addr, ComputePointerHash(addr));
442 if (entry->value != NULL) {
443 int entry_index =
444 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
445 EntryInfo& entry_info = entries_.at(entry_index);
446 entry_info.accessed = accessed;
447 if (FLAG_heap_profiler_trace_objects) {
448 PrintF("Update object size : %p with old size %d and new size %d\n",
449 static_cast<void*>(addr), entry_info.size, size);
450 }
451 entry_info.size = size;
452 return entry_info.id;
453 }
454 entry->value = reinterpret_cast<void*>(entries_.length());
455 SnapshotObjectId id = next_id_;
456 next_id_ += kObjectIdStep;
457 entries_.Add(EntryInfo(id, addr, size, accessed));
458 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
459 return id;
460 }
461
462
StopHeapObjectsTracking()463 void HeapObjectsMap::StopHeapObjectsTracking() {
464 time_intervals_.Clear();
465 }
466
467
UpdateHeapObjectsMap()468 void HeapObjectsMap::UpdateHeapObjectsMap() {
469 if (FLAG_heap_profiler_trace_objects) {
470 PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
471 entries_map_.occupancy());
472 }
473 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
474 GarbageCollectionReason::kHeapProfiler);
475 HeapIterator iterator(heap_);
476 for (HeapObject* obj = iterator.next();
477 obj != NULL;
478 obj = iterator.next()) {
479 FindOrAddEntry(obj->address(), obj->Size());
480 if (FLAG_heap_profiler_trace_objects) {
481 PrintF("Update object : %p %6d. Next address is %p\n",
482 static_cast<void*>(obj->address()), obj->Size(),
483 static_cast<void*>(obj->address() + obj->Size()));
484 }
485 }
486 RemoveDeadEntries();
487 if (FLAG_heap_profiler_trace_objects) {
488 PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
489 entries_map_.occupancy());
490 }
491 }
492
493
494 namespace {
495
496
497 struct HeapObjectInfo {
HeapObjectInfov8::internal::__anon156ac4f90211::HeapObjectInfo498 HeapObjectInfo(HeapObject* obj, int expected_size)
499 : obj(obj),
500 expected_size(expected_size) {
501 }
502
503 HeapObject* obj;
504 int expected_size;
505
IsValidv8::internal::__anon156ac4f90211::HeapObjectInfo506 bool IsValid() const { return expected_size == obj->Size(); }
507
Printv8::internal::__anon156ac4f90211::HeapObjectInfo508 void Print() const {
509 if (expected_size == 0) {
510 PrintF("Untracked object : %p %6d. Next address is %p\n",
511 static_cast<void*>(obj->address()), obj->Size(),
512 static_cast<void*>(obj->address() + obj->Size()));
513 } else if (obj->Size() != expected_size) {
514 PrintF("Wrong size %6d: %p %6d. Next address is %p\n", expected_size,
515 static_cast<void*>(obj->address()), obj->Size(),
516 static_cast<void*>(obj->address() + obj->Size()));
517 } else {
518 PrintF("Good object : %p %6d. Next address is %p\n",
519 static_cast<void*>(obj->address()), expected_size,
520 static_cast<void*>(obj->address() + obj->Size()));
521 }
522 }
523 };
524
525
comparator(const HeapObjectInfo * a,const HeapObjectInfo * b)526 static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
527 if (a->obj < b->obj) return -1;
528 if (a->obj > b->obj) return 1;
529 return 0;
530 }
531
532
533 } // namespace
534
535
FindUntrackedObjects()536 int HeapObjectsMap::FindUntrackedObjects() {
537 List<HeapObjectInfo> heap_objects(1000);
538
539 HeapIterator iterator(heap_);
540 int untracked = 0;
541 for (HeapObject* obj = iterator.next();
542 obj != NULL;
543 obj = iterator.next()) {
544 base::HashMap::Entry* entry =
545 entries_map_.Lookup(obj->address(), ComputePointerHash(obj->address()));
546 if (entry == NULL) {
547 ++untracked;
548 if (FLAG_heap_profiler_trace_objects) {
549 heap_objects.Add(HeapObjectInfo(obj, 0));
550 }
551 } else {
552 int entry_index = static_cast<int>(
553 reinterpret_cast<intptr_t>(entry->value));
554 EntryInfo& entry_info = entries_.at(entry_index);
555 if (FLAG_heap_profiler_trace_objects) {
556 heap_objects.Add(HeapObjectInfo(obj,
557 static_cast<int>(entry_info.size)));
558 if (obj->Size() != static_cast<int>(entry_info.size))
559 ++untracked;
560 } else {
561 CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
562 }
563 }
564 }
565 if (FLAG_heap_profiler_trace_objects) {
566 PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
567 entries_map_.occupancy());
568 heap_objects.Sort(comparator);
569 int last_printed_object = -1;
570 bool print_next_object = false;
571 for (int i = 0; i < heap_objects.length(); ++i) {
572 const HeapObjectInfo& object_info = heap_objects[i];
573 if (!object_info.IsValid()) {
574 ++untracked;
575 if (last_printed_object != i - 1) {
576 if (i > 0) {
577 PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
578 heap_objects[i - 1].Print();
579 }
580 }
581 object_info.Print();
582 last_printed_object = i;
583 print_next_object = true;
584 } else if (print_next_object) {
585 object_info.Print();
586 print_next_object = false;
587 last_printed_object = i;
588 }
589 }
590 if (last_printed_object < heap_objects.length() - 1) {
591 PrintF("Last %d objects were skipped\n",
592 heap_objects.length() - 1 - last_printed_object);
593 }
594 PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
595 entries_map_.occupancy());
596 }
597 return untracked;
598 }
599
600
PushHeapObjectsStats(OutputStream * stream,int64_t * timestamp_us)601 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream,
602 int64_t* timestamp_us) {
603 UpdateHeapObjectsMap();
604 time_intervals_.Add(TimeInterval(next_id_));
605 int prefered_chunk_size = stream->GetChunkSize();
606 List<v8::HeapStatsUpdate> stats_buffer;
607 DCHECK(!entries_.is_empty());
608 EntryInfo* entry_info = &entries_.first();
609 EntryInfo* end_entry_info = &entries_.last() + 1;
610 for (int time_interval_index = 0;
611 time_interval_index < time_intervals_.length();
612 ++time_interval_index) {
613 TimeInterval& time_interval = time_intervals_[time_interval_index];
614 SnapshotObjectId time_interval_id = time_interval.id;
615 uint32_t entries_size = 0;
616 EntryInfo* start_entry_info = entry_info;
617 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
618 entries_size += entry_info->size;
619 ++entry_info;
620 }
621 uint32_t entries_count =
622 static_cast<uint32_t>(entry_info - start_entry_info);
623 if (time_interval.count != entries_count ||
624 time_interval.size != entries_size) {
625 stats_buffer.Add(v8::HeapStatsUpdate(
626 time_interval_index,
627 time_interval.count = entries_count,
628 time_interval.size = entries_size));
629 if (stats_buffer.length() >= prefered_chunk_size) {
630 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
631 &stats_buffer.first(), stats_buffer.length());
632 if (result == OutputStream::kAbort) return last_assigned_id();
633 stats_buffer.Clear();
634 }
635 }
636 }
637 DCHECK(entry_info == end_entry_info);
638 if (!stats_buffer.is_empty()) {
639 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
640 &stats_buffer.first(), stats_buffer.length());
641 if (result == OutputStream::kAbort) return last_assigned_id();
642 }
643 stream->EndOfStream();
644 if (timestamp_us) {
645 *timestamp_us = (time_intervals_.last().timestamp -
646 time_intervals_[0].timestamp).InMicroseconds();
647 }
648 return last_assigned_id();
649 }
650
651
RemoveDeadEntries()652 void HeapObjectsMap::RemoveDeadEntries() {
653 DCHECK(entries_.length() > 0 &&
654 entries_.at(0).id == 0 &&
655 entries_.at(0).addr == NULL);
656 int first_free_entry = 1;
657 for (int i = 1; i < entries_.length(); ++i) {
658 EntryInfo& entry_info = entries_.at(i);
659 if (entry_info.accessed) {
660 if (first_free_entry != i) {
661 entries_.at(first_free_entry) = entry_info;
662 }
663 entries_.at(first_free_entry).accessed = false;
664 base::HashMap::Entry* entry = entries_map_.Lookup(
665 entry_info.addr, ComputePointerHash(entry_info.addr));
666 DCHECK(entry);
667 entry->value = reinterpret_cast<void*>(first_free_entry);
668 ++first_free_entry;
669 } else {
670 if (entry_info.addr) {
671 entries_map_.Remove(entry_info.addr,
672 ComputePointerHash(entry_info.addr));
673 }
674 }
675 }
676 entries_.Rewind(first_free_entry);
677 DCHECK(static_cast<uint32_t>(entries_.length()) - 1 ==
678 entries_map_.occupancy());
679 }
680
681
GenerateId(v8::RetainedObjectInfo * info)682 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
683 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
684 const char* label = info->GetLabel();
685 id ^= StringHasher::HashSequentialString(label,
686 static_cast<int>(strlen(label)),
687 heap_->HashSeed());
688 intptr_t element_count = info->GetElementCount();
689 if (element_count != -1)
690 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
691 v8::internal::kZeroHashSeed);
692 return id << 1;
693 }
694
695
GetUsedMemorySize() const696 size_t HeapObjectsMap::GetUsedMemorySize() const {
697 return sizeof(*this) +
698 sizeof(base::HashMap::Entry) * entries_map_.capacity() +
699 GetMemoryUsedByList(entries_) + GetMemoryUsedByList(time_intervals_);
700 }
701
HeapEntriesMap()702 HeapEntriesMap::HeapEntriesMap() : entries_() {}
703
Map(HeapThing thing)704 int HeapEntriesMap::Map(HeapThing thing) {
705 base::HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing));
706 if (cache_entry == NULL) return HeapEntry::kNoEntry;
707 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
708 }
709
710
Pair(HeapThing thing,int entry)711 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
712 base::HashMap::Entry* cache_entry =
713 entries_.LookupOrInsert(thing, Hash(thing));
714 DCHECK(cache_entry->value == NULL);
715 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
716 }
717
HeapObjectsSet()718 HeapObjectsSet::HeapObjectsSet() : entries_() {}
719
Clear()720 void HeapObjectsSet::Clear() {
721 entries_.Clear();
722 }
723
724
Contains(Object * obj)725 bool HeapObjectsSet::Contains(Object* obj) {
726 if (!obj->IsHeapObject()) return false;
727 HeapObject* object = HeapObject::cast(obj);
728 return entries_.Lookup(object, HeapEntriesMap::Hash(object)) != NULL;
729 }
730
731
Insert(Object * obj)732 void HeapObjectsSet::Insert(Object* obj) {
733 if (!obj->IsHeapObject()) return;
734 HeapObject* object = HeapObject::cast(obj);
735 entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object));
736 }
737
738
GetTag(Object * obj)739 const char* HeapObjectsSet::GetTag(Object* obj) {
740 HeapObject* object = HeapObject::cast(obj);
741 base::HashMap::Entry* cache_entry =
742 entries_.Lookup(object, HeapEntriesMap::Hash(object));
743 return cache_entry != NULL
744 ? reinterpret_cast<const char*>(cache_entry->value)
745 : NULL;
746 }
747
748
SetTag(Object * obj,const char * tag)749 V8_NOINLINE void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
750 if (!obj->IsHeapObject()) return;
751 HeapObject* object = HeapObject::cast(obj);
752 base::HashMap::Entry* cache_entry =
753 entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object));
754 cache_entry->value = const_cast<char*>(tag);
755 }
756
757
V8HeapExplorer(HeapSnapshot * snapshot,SnapshottingProgressReportingInterface * progress,v8::HeapProfiler::ObjectNameResolver * resolver)758 V8HeapExplorer::V8HeapExplorer(
759 HeapSnapshot* snapshot,
760 SnapshottingProgressReportingInterface* progress,
761 v8::HeapProfiler::ObjectNameResolver* resolver)
762 : heap_(snapshot->profiler()->heap_object_map()->heap()),
763 snapshot_(snapshot),
764 names_(snapshot_->profiler()->names()),
765 heap_object_map_(snapshot_->profiler()->heap_object_map()),
766 progress_(progress),
767 filler_(NULL),
768 global_object_name_resolver_(resolver) {
769 }
770
771
~V8HeapExplorer()772 V8HeapExplorer::~V8HeapExplorer() {
773 }
774
775
AllocateEntry(HeapThing ptr)776 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
777 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
778 }
779
780
AddEntry(HeapObject * object)781 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
782 if (object->IsJSFunction()) {
783 JSFunction* func = JSFunction::cast(object);
784 SharedFunctionInfo* shared = func->shared();
785 const char* name = names_->GetName(String::cast(shared->name()));
786 return AddEntry(object, HeapEntry::kClosure, name);
787 } else if (object->IsJSBoundFunction()) {
788 return AddEntry(object, HeapEntry::kClosure, "native_bind");
789 } else if (object->IsJSRegExp()) {
790 JSRegExp* re = JSRegExp::cast(object);
791 return AddEntry(object,
792 HeapEntry::kRegExp,
793 names_->GetName(re->Pattern()));
794 } else if (object->IsJSObject()) {
795 const char* name = names_->GetName(
796 GetConstructorName(JSObject::cast(object)));
797 if (object->IsJSGlobalObject()) {
798 const char* tag = objects_tags_.GetTag(object);
799 if (tag != NULL) {
800 name = names_->GetFormatted("%s / %s", name, tag);
801 }
802 }
803 return AddEntry(object, HeapEntry::kObject, name);
804 } else if (object->IsString()) {
805 String* string = String::cast(object);
806 if (string->IsConsString())
807 return AddEntry(object,
808 HeapEntry::kConsString,
809 "(concatenated string)");
810 if (string->IsSlicedString())
811 return AddEntry(object,
812 HeapEntry::kSlicedString,
813 "(sliced string)");
814 return AddEntry(object,
815 HeapEntry::kString,
816 names_->GetName(String::cast(object)));
817 } else if (object->IsSymbol()) {
818 if (Symbol::cast(object)->is_private())
819 return AddEntry(object, HeapEntry::kHidden, "private symbol");
820 else
821 return AddEntry(object, HeapEntry::kSymbol, "symbol");
822 } else if (object->IsCode()) {
823 return AddEntry(object, HeapEntry::kCode, "");
824 } else if (object->IsSharedFunctionInfo()) {
825 String* name = String::cast(SharedFunctionInfo::cast(object)->name());
826 return AddEntry(object,
827 HeapEntry::kCode,
828 names_->GetName(name));
829 } else if (object->IsScript()) {
830 Object* name = Script::cast(object)->name();
831 return AddEntry(object,
832 HeapEntry::kCode,
833 name->IsString()
834 ? names_->GetName(String::cast(name))
835 : "");
836 } else if (object->IsNativeContext()) {
837 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
838 } else if (object->IsContext()) {
839 return AddEntry(object, HeapEntry::kObject, "system / Context");
840 } else if (object->IsFixedArray() || object->IsFixedDoubleArray() ||
841 object->IsByteArray()) {
842 return AddEntry(object, HeapEntry::kArray, "");
843 } else if (object->IsHeapNumber()) {
844 return AddEntry(object, HeapEntry::kHeapNumber, "number");
845 }
846 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
847 }
848
849
AddEntry(HeapObject * object,HeapEntry::Type type,const char * name)850 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
851 HeapEntry::Type type,
852 const char* name) {
853 return AddEntry(object->address(), type, name, object->Size());
854 }
855
856
AddEntry(Address address,HeapEntry::Type type,const char * name,size_t size)857 HeapEntry* V8HeapExplorer::AddEntry(Address address,
858 HeapEntry::Type type,
859 const char* name,
860 size_t size) {
861 SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry(
862 address, static_cast<unsigned int>(size));
863 unsigned trace_node_id = 0;
864 if (AllocationTracker* allocation_tracker =
865 snapshot_->profiler()->allocation_tracker()) {
866 trace_node_id =
867 allocation_tracker->address_to_trace()->GetTraceNodeId(address);
868 }
869 return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
870 }
871
872
873 class SnapshotFiller {
874 public:
SnapshotFiller(HeapSnapshot * snapshot,HeapEntriesMap * entries)875 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
876 : snapshot_(snapshot),
877 names_(snapshot->profiler()->names()),
878 entries_(entries) { }
AddEntry(HeapThing ptr,HeapEntriesAllocator * allocator)879 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
880 HeapEntry* entry = allocator->AllocateEntry(ptr);
881 entries_->Pair(ptr, entry->index());
882 return entry;
883 }
FindEntry(HeapThing ptr)884 HeapEntry* FindEntry(HeapThing ptr) {
885 int index = entries_->Map(ptr);
886 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
887 }
FindOrAddEntry(HeapThing ptr,HeapEntriesAllocator * allocator)888 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
889 HeapEntry* entry = FindEntry(ptr);
890 return entry != NULL ? entry : AddEntry(ptr, allocator);
891 }
SetIndexedReference(HeapGraphEdge::Type type,int parent,int index,HeapEntry * child_entry)892 void SetIndexedReference(HeapGraphEdge::Type type,
893 int parent,
894 int index,
895 HeapEntry* child_entry) {
896 HeapEntry* parent_entry = &snapshot_->entries()[parent];
897 parent_entry->SetIndexedReference(type, index, child_entry);
898 }
SetIndexedAutoIndexReference(HeapGraphEdge::Type type,int parent,HeapEntry * child_entry)899 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
900 int parent,
901 HeapEntry* child_entry) {
902 HeapEntry* parent_entry = &snapshot_->entries()[parent];
903 int index = parent_entry->children_count() + 1;
904 parent_entry->SetIndexedReference(type, index, child_entry);
905 }
SetNamedReference(HeapGraphEdge::Type type,int parent,const char * reference_name,HeapEntry * child_entry)906 void SetNamedReference(HeapGraphEdge::Type type,
907 int parent,
908 const char* reference_name,
909 HeapEntry* child_entry) {
910 HeapEntry* parent_entry = &snapshot_->entries()[parent];
911 parent_entry->SetNamedReference(type, reference_name, child_entry);
912 }
SetNamedAutoIndexReference(HeapGraphEdge::Type type,int parent,HeapEntry * child_entry)913 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
914 int parent,
915 HeapEntry* child_entry) {
916 HeapEntry* parent_entry = &snapshot_->entries()[parent];
917 int index = parent_entry->children_count() + 1;
918 parent_entry->SetNamedReference(
919 type,
920 names_->GetName(index),
921 child_entry);
922 }
923
924 private:
925 HeapSnapshot* snapshot_;
926 StringsStorage* names_;
927 HeapEntriesMap* entries_;
928 };
929
930
GetSystemEntryName(HeapObject * object)931 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
932 switch (object->map()->instance_type()) {
933 case MAP_TYPE:
934 switch (Map::cast(object)->instance_type()) {
935 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
936 case instance_type: return "system / Map (" #Name ")";
937 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
938 #undef MAKE_STRING_MAP_CASE
939 default: return "system / Map";
940 }
941 case CELL_TYPE: return "system / Cell";
942 case PROPERTY_CELL_TYPE: return "system / PropertyCell";
943 case FOREIGN_TYPE: return "system / Foreign";
944 case ODDBALL_TYPE: return "system / Oddball";
945 #define MAKE_STRUCT_CASE(NAME, Name, name) \
946 case NAME##_TYPE: return "system / "#Name;
947 STRUCT_LIST(MAKE_STRUCT_CASE)
948 #undef MAKE_STRUCT_CASE
949 default: return "system";
950 }
951 }
952
953
EstimateObjectsCount(HeapIterator * iterator)954 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
955 int objects_count = 0;
956 for (HeapObject* obj = iterator->next();
957 obj != NULL;
958 obj = iterator->next()) {
959 objects_count++;
960 }
961 return objects_count;
962 }
963
964
965 class IndexedReferencesExtractor : public ObjectVisitor {
966 public:
IndexedReferencesExtractor(V8HeapExplorer * generator,HeapObject * parent_obj,int parent)967 IndexedReferencesExtractor(V8HeapExplorer* generator, HeapObject* parent_obj,
968 int parent)
969 : generator_(generator),
970 parent_obj_(parent_obj),
971 parent_start_(HeapObject::RawField(parent_obj_, 0)),
972 parent_end_(HeapObject::RawField(parent_obj_, parent_obj_->Size())),
973 parent_(parent),
974 next_index_(0) {}
VisitCodeEntry(Address entry_address)975 void VisitCodeEntry(Address entry_address) override {
976 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
977 generator_->SetInternalReference(parent_obj_, parent_, "code", code);
978 generator_->TagCodeObject(code);
979 }
VisitPointers(Object ** start,Object ** end)980 void VisitPointers(Object** start, Object** end) override {
981 for (Object** p = start; p < end; p++) {
982 int index = static_cast<int>(p - HeapObject::RawField(parent_obj_, 0));
983 ++next_index_;
984 // |p| could be outside of the object, e.g., while visiting RelocInfo of
985 // code objects.
986 if (p >= parent_start_ && p < parent_end_ && generator_->marks_[index]) {
987 generator_->marks_[index] = false;
988 continue;
989 }
990 generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p,
991 index * kPointerSize);
992 }
993 }
994
995 private:
996 V8HeapExplorer* generator_;
997 HeapObject* parent_obj_;
998 Object** parent_start_;
999 Object** parent_end_;
1000 int parent_;
1001 int next_index_;
1002 };
1003
1004
ExtractReferencesPass1(int entry,HeapObject * obj)1005 bool V8HeapExplorer::ExtractReferencesPass1(int entry, HeapObject* obj) {
1006 if (obj->IsFixedArray()) return false; // FixedArrays are processed on pass 2
1007
1008 if (obj->IsJSGlobalProxy()) {
1009 ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1010 } else if (obj->IsJSArrayBuffer()) {
1011 ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
1012 } else if (obj->IsJSObject()) {
1013 if (obj->IsJSWeakSet()) {
1014 ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj));
1015 } else if (obj->IsJSWeakMap()) {
1016 ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj));
1017 } else if (obj->IsJSSet()) {
1018 ExtractJSCollectionReferences(entry, JSSet::cast(obj));
1019 } else if (obj->IsJSMap()) {
1020 ExtractJSCollectionReferences(entry, JSMap::cast(obj));
1021 }
1022 ExtractJSObjectReferences(entry, JSObject::cast(obj));
1023 } else if (obj->IsString()) {
1024 ExtractStringReferences(entry, String::cast(obj));
1025 } else if (obj->IsSymbol()) {
1026 ExtractSymbolReferences(entry, Symbol::cast(obj));
1027 } else if (obj->IsMap()) {
1028 ExtractMapReferences(entry, Map::cast(obj));
1029 } else if (obj->IsSharedFunctionInfo()) {
1030 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1031 } else if (obj->IsScript()) {
1032 ExtractScriptReferences(entry, Script::cast(obj));
1033 } else if (obj->IsAccessorInfo()) {
1034 ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj));
1035 } else if (obj->IsAccessorPair()) {
1036 ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1037 } else if (obj->IsCode()) {
1038 ExtractCodeReferences(entry, Code::cast(obj));
1039 } else if (obj->IsCell()) {
1040 ExtractCellReferences(entry, Cell::cast(obj));
1041 } else if (obj->IsWeakCell()) {
1042 ExtractWeakCellReferences(entry, WeakCell::cast(obj));
1043 } else if (obj->IsPropertyCell()) {
1044 ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1045 } else if (obj->IsAllocationSite()) {
1046 ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1047 }
1048 return true;
1049 }
1050
1051
ExtractReferencesPass2(int entry,HeapObject * obj)1052 bool V8HeapExplorer::ExtractReferencesPass2(int entry, HeapObject* obj) {
1053 if (!obj->IsFixedArray()) return false;
1054
1055 if (obj->IsContext()) {
1056 ExtractContextReferences(entry, Context::cast(obj));
1057 } else {
1058 ExtractFixedArrayReferences(entry, FixedArray::cast(obj));
1059 }
1060 return true;
1061 }
1062
1063
ExtractJSGlobalProxyReferences(int entry,JSGlobalProxy * proxy)1064 void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1065 int entry, JSGlobalProxy* proxy) {
1066 SetInternalReference(proxy, entry,
1067 "native_context", proxy->native_context(),
1068 JSGlobalProxy::kNativeContextOffset);
1069 }
1070
1071
ExtractJSObjectReferences(int entry,JSObject * js_obj)1072 void V8HeapExplorer::ExtractJSObjectReferences(
1073 int entry, JSObject* js_obj) {
1074 HeapObject* obj = js_obj;
1075 ExtractPropertyReferences(js_obj, entry);
1076 ExtractElementReferences(js_obj, entry);
1077 ExtractInternalReferences(js_obj, entry);
1078 PrototypeIterator iter(heap_->isolate(), js_obj);
1079 SetPropertyReference(obj, entry, heap_->proto_string(), iter.GetCurrent());
1080 if (obj->IsJSBoundFunction()) {
1081 JSBoundFunction* js_fun = JSBoundFunction::cast(obj);
1082 TagObject(js_fun->bound_arguments(), "(bound arguments)");
1083 SetInternalReference(js_fun, entry, "bindings", js_fun->bound_arguments(),
1084 JSBoundFunction::kBoundArgumentsOffset);
1085 SetInternalReference(js_obj, entry, "bound_this", js_fun->bound_this(),
1086 JSBoundFunction::kBoundThisOffset);
1087 SetInternalReference(js_obj, entry, "bound_function",
1088 js_fun->bound_target_function(),
1089 JSBoundFunction::kBoundTargetFunctionOffset);
1090 FixedArray* bindings = js_fun->bound_arguments();
1091 for (int i = 0; i < bindings->length(); i++) {
1092 const char* reference_name = names_->GetFormatted("bound_argument_%d", i);
1093 SetNativeBindReference(js_obj, entry, reference_name, bindings->get(i));
1094 }
1095 } else if (obj->IsJSFunction()) {
1096 JSFunction* js_fun = JSFunction::cast(js_obj);
1097 Object* proto_or_map = js_fun->prototype_or_initial_map();
1098 if (!proto_or_map->IsTheHole(heap_->isolate())) {
1099 if (!proto_or_map->IsMap()) {
1100 SetPropertyReference(
1101 obj, entry,
1102 heap_->prototype_string(), proto_or_map,
1103 NULL,
1104 JSFunction::kPrototypeOrInitialMapOffset);
1105 } else {
1106 SetPropertyReference(
1107 obj, entry,
1108 heap_->prototype_string(), js_fun->prototype());
1109 SetInternalReference(
1110 obj, entry, "initial_map", proto_or_map,
1111 JSFunction::kPrototypeOrInitialMapOffset);
1112 }
1113 }
1114 SharedFunctionInfo* shared_info = js_fun->shared();
1115 TagObject(js_fun->feedback_vector_cell(),
1116 "(function feedback vector cell)");
1117 SetInternalReference(js_fun, entry, "feedback_vector_cell",
1118 js_fun->feedback_vector_cell(),
1119 JSFunction::kFeedbackVectorOffset);
1120 TagObject(shared_info, "(shared function info)");
1121 SetInternalReference(js_fun, entry,
1122 "shared", shared_info,
1123 JSFunction::kSharedFunctionInfoOffset);
1124 TagObject(js_fun->context(), "(context)");
1125 SetInternalReference(js_fun, entry,
1126 "context", js_fun->context(),
1127 JSFunction::kContextOffset);
1128 // Ensure no new weak references appeared in JSFunction.
1129 STATIC_ASSERT(JSFunction::kCodeEntryOffset ==
1130 JSFunction::kNonWeakFieldsEndOffset);
1131 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
1132 JSFunction::kNextFunctionLinkOffset);
1133 STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset + kPointerSize
1134 == JSFunction::kSize);
1135 } else if (obj->IsJSGlobalObject()) {
1136 JSGlobalObject* global_obj = JSGlobalObject::cast(obj);
1137 SetInternalReference(global_obj, entry, "native_context",
1138 global_obj->native_context(),
1139 JSGlobalObject::kNativeContextOffset);
1140 SetInternalReference(global_obj, entry, "global_proxy",
1141 global_obj->global_proxy(),
1142 JSGlobalObject::kGlobalProxyOffset);
1143 STATIC_ASSERT(JSGlobalObject::kSize - JSObject::kHeaderSize ==
1144 2 * kPointerSize);
1145 } else if (obj->IsJSArrayBufferView()) {
1146 JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1147 SetInternalReference(view, entry, "buffer", view->buffer(),
1148 JSArrayBufferView::kBufferOffset);
1149 }
1150 TagObject(js_obj->properties(), "(object properties)");
1151 SetInternalReference(obj, entry,
1152 "properties", js_obj->properties(),
1153 JSObject::kPropertiesOffset);
1154 TagObject(js_obj->elements(), "(object elements)");
1155 SetInternalReference(obj, entry,
1156 "elements", js_obj->elements(),
1157 JSObject::kElementsOffset);
1158 }
1159
1160
ExtractStringReferences(int entry,String * string)1161 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1162 if (string->IsConsString()) {
1163 ConsString* cs = ConsString::cast(string);
1164 SetInternalReference(cs, entry, "first", cs->first(),
1165 ConsString::kFirstOffset);
1166 SetInternalReference(cs, entry, "second", cs->second(),
1167 ConsString::kSecondOffset);
1168 } else if (string->IsSlicedString()) {
1169 SlicedString* ss = SlicedString::cast(string);
1170 SetInternalReference(ss, entry, "parent", ss->parent(),
1171 SlicedString::kParentOffset);
1172 } else if (string->IsThinString()) {
1173 ThinString* ts = ThinString::cast(string);
1174 SetInternalReference(ts, entry, "actual", ts->actual(),
1175 ThinString::kActualOffset);
1176 }
1177 }
1178
1179
ExtractSymbolReferences(int entry,Symbol * symbol)1180 void V8HeapExplorer::ExtractSymbolReferences(int entry, Symbol* symbol) {
1181 SetInternalReference(symbol, entry,
1182 "name", symbol->name(),
1183 Symbol::kNameOffset);
1184 }
1185
1186
ExtractJSCollectionReferences(int entry,JSCollection * collection)1187 void V8HeapExplorer::ExtractJSCollectionReferences(int entry,
1188 JSCollection* collection) {
1189 SetInternalReference(collection, entry, "table", collection->table(),
1190 JSCollection::kTableOffset);
1191 }
1192
ExtractJSWeakCollectionReferences(int entry,JSWeakCollection * obj)1193 void V8HeapExplorer::ExtractJSWeakCollectionReferences(int entry,
1194 JSWeakCollection* obj) {
1195 if (obj->table()->IsHashTable()) {
1196 ObjectHashTable* table = ObjectHashTable::cast(obj->table());
1197 TagFixedArraySubType(table, JS_WEAK_COLLECTION_SUB_TYPE);
1198 }
1199 SetInternalReference(obj, entry, "table", obj->table(),
1200 JSWeakCollection::kTableOffset);
1201 }
1202
ExtractContextReferences(int entry,Context * context)1203 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1204 if (context == context->declaration_context()) {
1205 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1206 // Add context allocated locals.
1207 int context_locals = scope_info->ContextLocalCount();
1208 for (int i = 0; i < context_locals; ++i) {
1209 String* local_name = scope_info->ContextLocalName(i);
1210 int idx = Context::MIN_CONTEXT_SLOTS + i;
1211 SetContextReference(context, entry, local_name, context->get(idx),
1212 Context::OffsetOfElementAt(idx));
1213 }
1214 if (scope_info->HasFunctionName()) {
1215 String* name = scope_info->FunctionName();
1216 int idx = scope_info->FunctionContextSlotIndex(name);
1217 if (idx >= 0) {
1218 SetContextReference(context, entry, name, context->get(idx),
1219 Context::OffsetOfElementAt(idx));
1220 }
1221 }
1222 }
1223
1224 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1225 if (Context::index < Context::FIRST_WEAK_SLOT || \
1226 Context::index == Context::MAP_CACHE_INDEX) { \
1227 SetInternalReference(context, entry, #name, context->get(Context::index), \
1228 FixedArray::OffsetOfElementAt(Context::index)); \
1229 } else { \
1230 SetWeakReference(context, entry, #name, context->get(Context::index), \
1231 FixedArray::OffsetOfElementAt(Context::index)); \
1232 }
1233 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1234 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1235 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, HeapObject, extension);
1236 EXTRACT_CONTEXT_FIELD(NATIVE_CONTEXT_INDEX, Context, native_context);
1237 if (context->IsNativeContext()) {
1238 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1239 TagObject(context->embedder_data(), "(context data)");
1240 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD)
1241 EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1242 optimized_functions_list);
1243 EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1244 EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1245 #undef EXTRACT_CONTEXT_FIELD
1246 STATIC_ASSERT(Context::OPTIMIZED_FUNCTIONS_LIST ==
1247 Context::FIRST_WEAK_SLOT);
1248 STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 ==
1249 Context::NATIVE_CONTEXT_SLOTS);
1250 STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 4 ==
1251 Context::NATIVE_CONTEXT_SLOTS);
1252 }
1253 }
1254
1255
ExtractMapReferences(int entry,Map * map)1256 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1257 Object* raw_transitions_or_prototype_info = map->raw_transitions();
1258 if (TransitionArray::IsFullTransitionArray(
1259 raw_transitions_or_prototype_info)) {
1260 TransitionArray* transitions =
1261 TransitionArray::cast(raw_transitions_or_prototype_info);
1262 if (map->CanTransition() && transitions->HasPrototypeTransitions()) {
1263 TagObject(transitions->GetPrototypeTransitions(),
1264 "(prototype transitions)");
1265 }
1266
1267 TagObject(transitions, "(transition array)");
1268 SetInternalReference(map, entry, "transitions", transitions,
1269 Map::kTransitionsOrPrototypeInfoOffset);
1270 } else if (TransitionArray::IsSimpleTransition(
1271 raw_transitions_or_prototype_info)) {
1272 TagObject(raw_transitions_or_prototype_info, "(transition)");
1273 SetInternalReference(map, entry, "transition",
1274 raw_transitions_or_prototype_info,
1275 Map::kTransitionsOrPrototypeInfoOffset);
1276 } else if (map->is_prototype_map()) {
1277 TagObject(raw_transitions_or_prototype_info, "prototype_info");
1278 SetInternalReference(map, entry, "prototype_info",
1279 raw_transitions_or_prototype_info,
1280 Map::kTransitionsOrPrototypeInfoOffset);
1281 }
1282 DescriptorArray* descriptors = map->instance_descriptors();
1283 TagObject(descriptors, "(map descriptors)");
1284 SetInternalReference(map, entry, "descriptors", descriptors,
1285 Map::kDescriptorsOffset);
1286 SetInternalReference(map, entry, "code_cache", map->code_cache(),
1287 Map::kCodeCacheOffset);
1288 SetInternalReference(map, entry, "prototype", map->prototype(),
1289 Map::kPrototypeOffset);
1290 #if V8_DOUBLE_FIELDS_UNBOXING
1291 if (FLAG_unbox_double_fields) {
1292 SetInternalReference(map, entry, "layout_descriptor",
1293 map->layout_descriptor(),
1294 Map::kLayoutDescriptorOffset);
1295 }
1296 #endif
1297 Object* constructor_or_backpointer = map->constructor_or_backpointer();
1298 if (constructor_or_backpointer->IsMap()) {
1299 TagObject(constructor_or_backpointer, "(back pointer)");
1300 SetInternalReference(map, entry, "back_pointer", constructor_or_backpointer,
1301 Map::kConstructorOrBackPointerOffset);
1302 } else {
1303 SetInternalReference(map, entry, "constructor", constructor_or_backpointer,
1304 Map::kConstructorOrBackPointerOffset);
1305 }
1306 TagObject(map->dependent_code(), "(dependent code)");
1307 SetInternalReference(map, entry, "dependent_code", map->dependent_code(),
1308 Map::kDependentCodeOffset);
1309 TagObject(map->weak_cell_cache(), "(weak cell)");
1310 SetInternalReference(map, entry, "weak_cell_cache", map->weak_cell_cache(),
1311 Map::kWeakCellCacheOffset);
1312 }
1313
1314
ExtractSharedFunctionInfoReferences(int entry,SharedFunctionInfo * shared)1315 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1316 int entry, SharedFunctionInfo* shared) {
1317 HeapObject* obj = shared;
1318 String* shared_name = shared->DebugName();
1319 const char* name = NULL;
1320 if (shared_name != heap_->empty_string()) {
1321 name = names_->GetName(shared_name);
1322 TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1323 } else {
1324 TagObject(shared->code(), names_->GetFormatted("(%s code)",
1325 Code::Kind2String(shared->code()->kind())));
1326 }
1327
1328 SetInternalReference(obj, entry,
1329 "name", shared->name(),
1330 SharedFunctionInfo::kNameOffset);
1331 SetInternalReference(obj, entry,
1332 "code", shared->code(),
1333 SharedFunctionInfo::kCodeOffset);
1334 TagObject(shared->scope_info(), "(function scope info)");
1335 SetInternalReference(obj, entry,
1336 "scope_info", shared->scope_info(),
1337 SharedFunctionInfo::kScopeInfoOffset);
1338 SetInternalReference(obj, entry,
1339 "instance_class_name", shared->instance_class_name(),
1340 SharedFunctionInfo::kInstanceClassNameOffset);
1341 SetInternalReference(obj, entry,
1342 "script", shared->script(),
1343 SharedFunctionInfo::kScriptOffset);
1344 const char* construct_stub_name = name ?
1345 names_->GetFormatted("(construct stub code for %s)", name) :
1346 "(construct stub code)";
1347 TagObject(shared->construct_stub(), construct_stub_name);
1348 SetInternalReference(obj, entry,
1349 "construct_stub", shared->construct_stub(),
1350 SharedFunctionInfo::kConstructStubOffset);
1351 SetInternalReference(obj, entry,
1352 "function_data", shared->function_data(),
1353 SharedFunctionInfo::kFunctionDataOffset);
1354 SetInternalReference(obj, entry,
1355 "debug_info", shared->debug_info(),
1356 SharedFunctionInfo::kDebugInfoOffset);
1357 SetInternalReference(obj, entry, "function_identifier",
1358 shared->function_identifier(),
1359 SharedFunctionInfo::kFunctionIdentifierOffset);
1360 SetInternalReference(obj, entry,
1361 "optimized_code_map", shared->optimized_code_map(),
1362 SharedFunctionInfo::kOptimizedCodeMapOffset);
1363 SetInternalReference(obj, entry, "feedback_metadata",
1364 shared->feedback_metadata(),
1365 SharedFunctionInfo::kFeedbackMetadataOffset);
1366 }
1367
1368
ExtractScriptReferences(int entry,Script * script)1369 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1370 HeapObject* obj = script;
1371 SetInternalReference(obj, entry,
1372 "source", script->source(),
1373 Script::kSourceOffset);
1374 SetInternalReference(obj, entry,
1375 "name", script->name(),
1376 Script::kNameOffset);
1377 SetInternalReference(obj, entry,
1378 "context_data", script->context_data(),
1379 Script::kContextOffset);
1380 TagObject(script->line_ends(), "(script line ends)");
1381 SetInternalReference(obj, entry,
1382 "line_ends", script->line_ends(),
1383 Script::kLineEndsOffset);
1384 }
1385
1386
ExtractAccessorInfoReferences(int entry,AccessorInfo * accessor_info)1387 void V8HeapExplorer::ExtractAccessorInfoReferences(
1388 int entry, AccessorInfo* accessor_info) {
1389 SetInternalReference(accessor_info, entry, "name", accessor_info->name(),
1390 AccessorInfo::kNameOffset);
1391 SetInternalReference(accessor_info, entry, "expected_receiver_type",
1392 accessor_info->expected_receiver_type(),
1393 AccessorInfo::kExpectedReceiverTypeOffset);
1394 if (accessor_info->IsAccessorInfo()) {
1395 AccessorInfo* executable_accessor_info = AccessorInfo::cast(accessor_info);
1396 SetInternalReference(executable_accessor_info, entry, "getter",
1397 executable_accessor_info->getter(),
1398 AccessorInfo::kGetterOffset);
1399 SetInternalReference(executable_accessor_info, entry, "setter",
1400 executable_accessor_info->setter(),
1401 AccessorInfo::kSetterOffset);
1402 SetInternalReference(executable_accessor_info, entry, "data",
1403 executable_accessor_info->data(),
1404 AccessorInfo::kDataOffset);
1405 }
1406 }
1407
1408
ExtractAccessorPairReferences(int entry,AccessorPair * accessors)1409 void V8HeapExplorer::ExtractAccessorPairReferences(
1410 int entry, AccessorPair* accessors) {
1411 SetInternalReference(accessors, entry, "getter", accessors->getter(),
1412 AccessorPair::kGetterOffset);
1413 SetInternalReference(accessors, entry, "setter", accessors->setter(),
1414 AccessorPair::kSetterOffset);
1415 }
1416
1417
TagBuiltinCodeObject(Code * code,const char * name)1418 void V8HeapExplorer::TagBuiltinCodeObject(Code* code, const char* name) {
1419 TagObject(code, names_->GetFormatted("(%s builtin)", name));
1420 }
1421
1422
TagCodeObject(Code * code)1423 void V8HeapExplorer::TagCodeObject(Code* code) {
1424 if (code->kind() == Code::STUB) {
1425 TagObject(code, names_->GetFormatted(
1426 "(%s code)",
1427 CodeStub::MajorName(CodeStub::GetMajorKey(code))));
1428 }
1429 }
1430
1431
ExtractCodeReferences(int entry,Code * code)1432 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1433 TagCodeObject(code);
1434 TagObject(code->relocation_info(), "(code relocation info)");
1435 SetInternalReference(code, entry,
1436 "relocation_info", code->relocation_info(),
1437 Code::kRelocationInfoOffset);
1438 SetInternalReference(code, entry,
1439 "handler_table", code->handler_table(),
1440 Code::kHandlerTableOffset);
1441 TagObject(code->deoptimization_data(), "(code deopt data)");
1442 SetInternalReference(code, entry,
1443 "deoptimization_data", code->deoptimization_data(),
1444 Code::kDeoptimizationDataOffset);
1445 TagObject(code->source_position_table(), "(source position table)");
1446 SetInternalReference(code, entry, "source_position_table",
1447 code->source_position_table(),
1448 Code::kSourcePositionTableOffset);
1449 if (code->kind() == Code::FUNCTION) {
1450 SetInternalReference(code, entry, "type_feedback_info",
1451 code->type_feedback_info(),
1452 Code::kTypeFeedbackInfoOffset);
1453 }
1454 SetInternalReference(code, entry, "gc_metadata", code->gc_metadata(),
1455 Code::kGCMetadataOffset);
1456 }
1457
ExtractCellReferences(int entry,Cell * cell)1458 void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
1459 SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1460 }
1461
ExtractWeakCellReferences(int entry,WeakCell * weak_cell)1462 void V8HeapExplorer::ExtractWeakCellReferences(int entry, WeakCell* weak_cell) {
1463 TagObject(weak_cell, "(weak cell)");
1464 SetWeakReference(weak_cell, entry, "value", weak_cell->value(),
1465 WeakCell::kValueOffset);
1466 }
1467
ExtractPropertyCellReferences(int entry,PropertyCell * cell)1468 void V8HeapExplorer::ExtractPropertyCellReferences(int entry,
1469 PropertyCell* cell) {
1470 SetInternalReference(cell, entry, "value", cell->value(),
1471 PropertyCell::kValueOffset);
1472 TagObject(cell->dependent_code(), "(dependent code)");
1473 SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1474 PropertyCell::kDependentCodeOffset);
1475 }
1476
1477
ExtractAllocationSiteReferences(int entry,AllocationSite * site)1478 void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
1479 AllocationSite* site) {
1480 SetInternalReference(site, entry, "transition_info", site->transition_info(),
1481 AllocationSite::kTransitionInfoOffset);
1482 SetInternalReference(site, entry, "nested_site", site->nested_site(),
1483 AllocationSite::kNestedSiteOffset);
1484 TagObject(site->dependent_code(), "(dependent code)");
1485 SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1486 AllocationSite::kDependentCodeOffset);
1487 // Do not visit weak_next as it is not visited by the StaticVisitor,
1488 // and we're not very interested in weak_next field here.
1489 STATIC_ASSERT(AllocationSite::kWeakNextOffset >=
1490 AllocationSite::kPointerFieldsEndOffset);
1491 }
1492
1493
1494 class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
1495 public:
JSArrayBufferDataEntryAllocator(size_t size,V8HeapExplorer * explorer)1496 JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer)
1497 : size_(size)
1498 , explorer_(explorer) {
1499 }
AllocateEntry(HeapThing ptr)1500 virtual HeapEntry* AllocateEntry(HeapThing ptr) {
1501 return explorer_->AddEntry(
1502 static_cast<Address>(ptr),
1503 HeapEntry::kNative, "system / JSArrayBufferData", size_);
1504 }
1505 private:
1506 size_t size_;
1507 V8HeapExplorer* explorer_;
1508 };
1509
1510
ExtractJSArrayBufferReferences(int entry,JSArrayBuffer * buffer)1511 void V8HeapExplorer::ExtractJSArrayBufferReferences(
1512 int entry, JSArrayBuffer* buffer) {
1513 // Setup a reference to a native memory backing_store object.
1514 if (!buffer->backing_store())
1515 return;
1516 size_t data_size = NumberToSize(buffer->byte_length());
1517 JSArrayBufferDataEntryAllocator allocator(data_size, this);
1518 HeapEntry* data_entry =
1519 filler_->FindOrAddEntry(buffer->backing_store(), &allocator);
1520 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1521 entry, "backing_store", data_entry);
1522 }
1523
ExtractFixedArrayReferences(int entry,FixedArray * array)1524 void V8HeapExplorer::ExtractFixedArrayReferences(int entry, FixedArray* array) {
1525 auto it = array_types_.find(array);
1526 if (it == array_types_.end()) {
1527 for (int i = 0, l = array->length(); i < l; ++i) {
1528 SetInternalReference(array, entry, i, array->get(i),
1529 array->OffsetOfElementAt(i));
1530 }
1531 return;
1532 }
1533 switch (it->second) {
1534 case JS_WEAK_COLLECTION_SUB_TYPE:
1535 for (int i = 0, l = array->length(); i < l; ++i) {
1536 SetWeakReference(array, entry, i, array->get(i),
1537 array->OffsetOfElementAt(i));
1538 }
1539 break;
1540
1541 // TODO(alph): Add special processing for other types of FixedArrays.
1542
1543 default:
1544 for (int i = 0, l = array->length(); i < l; ++i) {
1545 SetInternalReference(array, entry, i, array->get(i),
1546 array->OffsetOfElementAt(i));
1547 }
1548 break;
1549 }
1550 }
1551
ExtractPropertyReferences(JSObject * js_obj,int entry)1552 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1553 Isolate* isolate = js_obj->GetIsolate();
1554 if (js_obj->HasFastProperties()) {
1555 DescriptorArray* descs = js_obj->map()->instance_descriptors();
1556 int real_size = js_obj->map()->NumberOfOwnDescriptors();
1557 for (int i = 0; i < real_size; i++) {
1558 PropertyDetails details = descs->GetDetails(i);
1559 switch (details.location()) {
1560 case kField: {
1561 Representation r = details.representation();
1562 if (r.IsSmi() || r.IsDouble()) break;
1563
1564 Name* k = descs->GetKey(i);
1565 FieldIndex field_index = FieldIndex::ForDescriptor(js_obj->map(), i);
1566 Object* value = js_obj->RawFastPropertyAt(field_index);
1567 int field_offset =
1568 field_index.is_inobject() ? field_index.offset() : -1;
1569
1570 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry, k,
1571 value, NULL, field_offset);
1572 break;
1573 }
1574 case kDescriptor:
1575 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1576 descs->GetKey(i),
1577 descs->GetValue(i));
1578 break;
1579 }
1580 }
1581 } else if (js_obj->IsJSGlobalObject()) {
1582 // We assume that global objects can only have slow properties.
1583 GlobalDictionary* dictionary = js_obj->global_dictionary();
1584 int length = dictionary->Capacity();
1585 for (int i = 0; i < length; ++i) {
1586 Object* k = dictionary->KeyAt(i);
1587 if (dictionary->IsKey(isolate, k)) {
1588 DCHECK(dictionary->ValueAt(i)->IsPropertyCell());
1589 PropertyCell* cell = PropertyCell::cast(dictionary->ValueAt(i));
1590 Object* value = cell->value();
1591 PropertyDetails details = cell->property_details();
1592 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1593 Name::cast(k), value);
1594 }
1595 }
1596 } else {
1597 NameDictionary* dictionary = js_obj->property_dictionary();
1598 int length = dictionary->Capacity();
1599 for (int i = 0; i < length; ++i) {
1600 Object* k = dictionary->KeyAt(i);
1601 if (dictionary->IsKey(isolate, k)) {
1602 Object* value = dictionary->ValueAt(i);
1603 PropertyDetails details = dictionary->DetailsAt(i);
1604 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1605 Name::cast(k), value);
1606 }
1607 }
1608 }
1609 }
1610
1611
ExtractAccessorPairProperty(JSObject * js_obj,int entry,Name * key,Object * callback_obj,int field_offset)1612 void V8HeapExplorer::ExtractAccessorPairProperty(JSObject* js_obj, int entry,
1613 Name* key,
1614 Object* callback_obj,
1615 int field_offset) {
1616 if (!callback_obj->IsAccessorPair()) return;
1617 AccessorPair* accessors = AccessorPair::cast(callback_obj);
1618 SetPropertyReference(js_obj, entry, key, accessors, NULL, field_offset);
1619 Object* getter = accessors->getter();
1620 if (!getter->IsOddball()) {
1621 SetPropertyReference(js_obj, entry, key, getter, "get %s");
1622 }
1623 Object* setter = accessors->setter();
1624 if (!setter->IsOddball()) {
1625 SetPropertyReference(js_obj, entry, key, setter, "set %s");
1626 }
1627 }
1628
1629
ExtractElementReferences(JSObject * js_obj,int entry)1630 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1631 Isolate* isolate = js_obj->GetIsolate();
1632 if (js_obj->HasFastObjectElements()) {
1633 FixedArray* elements = FixedArray::cast(js_obj->elements());
1634 int length = js_obj->IsJSArray() ?
1635 Smi::cast(JSArray::cast(js_obj)->length())->value() :
1636 elements->length();
1637 for (int i = 0; i < length; ++i) {
1638 if (!elements->get(i)->IsTheHole(isolate)) {
1639 SetElementReference(js_obj, entry, i, elements->get(i));
1640 }
1641 }
1642 } else if (js_obj->HasDictionaryElements()) {
1643 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1644 int length = dictionary->Capacity();
1645 for (int i = 0; i < length; ++i) {
1646 Object* k = dictionary->KeyAt(i);
1647 if (dictionary->IsKey(isolate, k)) {
1648 DCHECK(k->IsNumber());
1649 uint32_t index = static_cast<uint32_t>(k->Number());
1650 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1651 }
1652 }
1653 }
1654 }
1655
1656
ExtractInternalReferences(JSObject * js_obj,int entry)1657 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1658 int length = js_obj->GetInternalFieldCount();
1659 for (int i = 0; i < length; ++i) {
1660 Object* o = js_obj->GetInternalField(i);
1661 SetInternalReference(
1662 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1663 }
1664 }
1665
1666
GetConstructorName(JSObject * object)1667 String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1668 Isolate* isolate = object->GetIsolate();
1669 if (object->IsJSFunction()) return isolate->heap()->closure_string();
1670 DisallowHeapAllocation no_gc;
1671 HandleScope scope(isolate);
1672 return *JSReceiver::GetConstructorName(handle(object, isolate));
1673 }
1674
1675
GetEntry(Object * obj)1676 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1677 if (!obj->IsHeapObject()) return NULL;
1678 return filler_->FindOrAddEntry(obj, this);
1679 }
1680
1681
1682 class RootsReferencesExtractor : public ObjectVisitor {
1683 private:
1684 struct IndexTag {
IndexTagv8::internal::RootsReferencesExtractor::IndexTag1685 IndexTag(int index, VisitorSynchronization::SyncTag tag)
1686 : index(index), tag(tag) { }
1687 int index;
1688 VisitorSynchronization::SyncTag tag;
1689 };
1690
1691 public:
RootsReferencesExtractor(Heap * heap)1692 explicit RootsReferencesExtractor(Heap* heap)
1693 : collecting_all_references_(false),
1694 previous_reference_count_(0),
1695 heap_(heap) {
1696 }
1697
VisitPointers(Object ** start,Object ** end)1698 void VisitPointers(Object** start, Object** end) override {
1699 if (collecting_all_references_) {
1700 for (Object** p = start; p < end; p++) all_references_.Add(*p);
1701 } else {
1702 for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1703 }
1704 }
1705
SetCollectingAllReferences()1706 void SetCollectingAllReferences() { collecting_all_references_ = true; }
1707
FillReferences(V8HeapExplorer * explorer)1708 void FillReferences(V8HeapExplorer* explorer) {
1709 DCHECK(strong_references_.length() <= all_references_.length());
1710 Builtins* builtins = heap_->isolate()->builtins();
1711 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1712 while (all_index < all_references_.length()) {
1713 bool is_strong = strong_index < strong_references_.length()
1714 && strong_references_[strong_index] == all_references_[all_index];
1715 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1716 !is_strong,
1717 all_references_[all_index]);
1718 if (reference_tags_[tags_index].tag ==
1719 VisitorSynchronization::kBuiltins) {
1720 DCHECK(all_references_[all_index]->IsCode());
1721 explorer->TagBuiltinCodeObject(
1722 Code::cast(all_references_[all_index]),
1723 builtins->name(builtin_index++));
1724 }
1725 ++all_index;
1726 if (is_strong) ++strong_index;
1727 if (reference_tags_[tags_index].index == all_index) ++tags_index;
1728 }
1729 }
1730
Synchronize(VisitorSynchronization::SyncTag tag)1731 void Synchronize(VisitorSynchronization::SyncTag tag) override {
1732 if (collecting_all_references_ &&
1733 previous_reference_count_ != all_references_.length()) {
1734 previous_reference_count_ = all_references_.length();
1735 reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1736 }
1737 }
1738
1739 private:
1740 bool collecting_all_references_;
1741 List<Object*> strong_references_;
1742 List<Object*> all_references_;
1743 int previous_reference_count_;
1744 List<IndexTag> reference_tags_;
1745 Heap* heap_;
1746 };
1747
1748
IterateAndExtractReferences(SnapshotFiller * filler)1749 bool V8HeapExplorer::IterateAndExtractReferences(
1750 SnapshotFiller* filler) {
1751 filler_ = filler;
1752
1753 // Create references to the synthetic roots.
1754 SetRootGcRootsReference();
1755 for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
1756 SetGcRootsReference(static_cast<VisitorSynchronization::SyncTag>(tag));
1757 }
1758
1759 // Make sure builtin code objects get their builtin tags
1760 // first. Otherwise a particular JSFunction object could set
1761 // its custom name to a generic builtin.
1762 RootsReferencesExtractor extractor(heap_);
1763 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1764 extractor.SetCollectingAllReferences();
1765 heap_->IterateRoots(&extractor, VISIT_ALL);
1766 extractor.FillReferences(this);
1767
1768 // We have to do two passes as sometimes FixedArrays are used
1769 // to weakly hold their items, and it's impossible to distinguish
1770 // between these cases without processing the array owner first.
1771 bool interrupted =
1772 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass1>() ||
1773 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass2>();
1774
1775 if (interrupted) {
1776 filler_ = NULL;
1777 return false;
1778 }
1779
1780 filler_ = NULL;
1781 return progress_->ProgressReport(true);
1782 }
1783
1784
1785 template<V8HeapExplorer::ExtractReferencesMethod extractor>
IterateAndExtractSinglePass()1786 bool V8HeapExplorer::IterateAndExtractSinglePass() {
1787 // Now iterate the whole heap.
1788 bool interrupted = false;
1789 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1790 // Heap iteration with filtering must be finished in any case.
1791 for (HeapObject* obj = iterator.next();
1792 obj != NULL;
1793 obj = iterator.next(), progress_->ProgressStep()) {
1794 if (interrupted) continue;
1795
1796 size_t max_pointer = obj->Size() / kPointerSize;
1797 if (max_pointer > marks_.size()) {
1798 // Clear the current bits.
1799 std::vector<bool>().swap(marks_);
1800 // Reallocate to right size.
1801 marks_.resize(max_pointer, false);
1802 }
1803
1804 HeapEntry* heap_entry = GetEntry(obj);
1805 int entry = heap_entry->index();
1806 if ((this->*extractor)(entry, obj)) {
1807 SetInternalReference(obj, entry,
1808 "map", obj->map(), HeapObject::kMapOffset);
1809 // Extract unvisited fields as hidden references and restore tags
1810 // of visited fields.
1811 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1812 obj->Iterate(&refs_extractor);
1813 }
1814
1815 if (!progress_->ProgressReport(false)) interrupted = true;
1816 }
1817 return interrupted;
1818 }
1819
1820
IsEssentialObject(Object * object)1821 bool V8HeapExplorer::IsEssentialObject(Object* object) {
1822 return object->IsHeapObject() && !object->IsOddball() &&
1823 object != heap_->empty_byte_array() &&
1824 object != heap_->empty_fixed_array() &&
1825 object != heap_->empty_descriptor_array() &&
1826 object != heap_->fixed_array_map() && object != heap_->cell_map() &&
1827 object != heap_->global_property_cell_map() &&
1828 object != heap_->shared_function_info_map() &&
1829 object != heap_->free_space_map() &&
1830 object != heap_->one_pointer_filler_map() &&
1831 object != heap_->two_pointer_filler_map();
1832 }
1833
IsEssentialHiddenReference(Object * parent,int field_offset)1834 bool V8HeapExplorer::IsEssentialHiddenReference(Object* parent,
1835 int field_offset) {
1836 if (parent->IsAllocationSite() &&
1837 field_offset == AllocationSite::kWeakNextOffset)
1838 return false;
1839 if (parent->IsJSFunction() &&
1840 field_offset == JSFunction::kNextFunctionLinkOffset)
1841 return false;
1842 if (parent->IsCode() && field_offset == Code::kNextCodeLinkOffset)
1843 return false;
1844 if (parent->IsContext() &&
1845 field_offset == Context::OffsetOfElementAt(Context::NEXT_CONTEXT_LINK))
1846 return false;
1847 if (parent->IsWeakCell() && field_offset == WeakCell::kNextOffset)
1848 return false;
1849 return true;
1850 }
1851
SetContextReference(HeapObject * parent_obj,int parent_entry,String * reference_name,Object * child_obj,int field_offset)1852 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1853 int parent_entry,
1854 String* reference_name,
1855 Object* child_obj,
1856 int field_offset) {
1857 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1858 HeapEntry* child_entry = GetEntry(child_obj);
1859 if (child_entry != NULL) {
1860 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1861 parent_entry,
1862 names_->GetName(reference_name),
1863 child_entry);
1864 MarkVisitedField(parent_obj, field_offset);
1865 }
1866 }
1867
1868
MarkVisitedField(HeapObject * obj,int offset)1869 void V8HeapExplorer::MarkVisitedField(HeapObject* obj, int offset) {
1870 if (offset < 0) return;
1871 int index = offset / kPointerSize;
1872 DCHECK(!marks_[index]);
1873 marks_[index] = true;
1874 }
1875
1876
SetNativeBindReference(HeapObject * parent_obj,int parent_entry,const char * reference_name,Object * child_obj)1877 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1878 int parent_entry,
1879 const char* reference_name,
1880 Object* child_obj) {
1881 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1882 HeapEntry* child_entry = GetEntry(child_obj);
1883 if (child_entry != NULL) {
1884 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1885 parent_entry,
1886 reference_name,
1887 child_entry);
1888 }
1889 }
1890
1891
SetElementReference(HeapObject * parent_obj,int parent_entry,int index,Object * child_obj)1892 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1893 int parent_entry,
1894 int index,
1895 Object* child_obj) {
1896 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1897 HeapEntry* child_entry = GetEntry(child_obj);
1898 if (child_entry != NULL) {
1899 filler_->SetIndexedReference(HeapGraphEdge::kElement,
1900 parent_entry,
1901 index,
1902 child_entry);
1903 }
1904 }
1905
1906
SetInternalReference(HeapObject * parent_obj,int parent_entry,const char * reference_name,Object * child_obj,int field_offset)1907 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1908 int parent_entry,
1909 const char* reference_name,
1910 Object* child_obj,
1911 int field_offset) {
1912 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1913 HeapEntry* child_entry = GetEntry(child_obj);
1914 if (child_entry == NULL) return;
1915 if (IsEssentialObject(child_obj)) {
1916 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1917 parent_entry,
1918 reference_name,
1919 child_entry);
1920 }
1921 MarkVisitedField(parent_obj, field_offset);
1922 }
1923
1924
SetInternalReference(HeapObject * parent_obj,int parent_entry,int index,Object * child_obj,int field_offset)1925 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1926 int parent_entry,
1927 int index,
1928 Object* child_obj,
1929 int field_offset) {
1930 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1931 HeapEntry* child_entry = GetEntry(child_obj);
1932 if (child_entry == NULL) return;
1933 if (IsEssentialObject(child_obj)) {
1934 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1935 parent_entry,
1936 names_->GetName(index),
1937 child_entry);
1938 }
1939 MarkVisitedField(parent_obj, field_offset);
1940 }
1941
SetHiddenReference(HeapObject * parent_obj,int parent_entry,int index,Object * child_obj,int field_offset)1942 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1943 int parent_entry, int index,
1944 Object* child_obj, int field_offset) {
1945 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1946 HeapEntry* child_entry = GetEntry(child_obj);
1947 if (child_entry != nullptr && IsEssentialObject(child_obj) &&
1948 IsEssentialHiddenReference(parent_obj, field_offset)) {
1949 filler_->SetIndexedReference(HeapGraphEdge::kHidden, parent_entry, index,
1950 child_entry);
1951 }
1952 }
1953
1954
SetWeakReference(HeapObject * parent_obj,int parent_entry,const char * reference_name,Object * child_obj,int field_offset)1955 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1956 int parent_entry,
1957 const char* reference_name,
1958 Object* child_obj,
1959 int field_offset) {
1960 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1961 HeapEntry* child_entry = GetEntry(child_obj);
1962 if (child_entry == NULL) return;
1963 if (IsEssentialObject(child_obj)) {
1964 filler_->SetNamedReference(HeapGraphEdge::kWeak,
1965 parent_entry,
1966 reference_name,
1967 child_entry);
1968 }
1969 MarkVisitedField(parent_obj, field_offset);
1970 }
1971
1972
SetWeakReference(HeapObject * parent_obj,int parent_entry,int index,Object * child_obj,int field_offset)1973 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1974 int parent_entry,
1975 int index,
1976 Object* child_obj,
1977 int field_offset) {
1978 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1979 HeapEntry* child_entry = GetEntry(child_obj);
1980 if (child_entry == NULL) return;
1981 if (IsEssentialObject(child_obj)) {
1982 filler_->SetNamedReference(HeapGraphEdge::kWeak,
1983 parent_entry,
1984 names_->GetFormatted("%d", index),
1985 child_entry);
1986 }
1987 MarkVisitedField(parent_obj, field_offset);
1988 }
1989
1990
SetDataOrAccessorPropertyReference(PropertyKind kind,JSObject * parent_obj,int parent_entry,Name * reference_name,Object * child_obj,const char * name_format_string,int field_offset)1991 void V8HeapExplorer::SetDataOrAccessorPropertyReference(
1992 PropertyKind kind, JSObject* parent_obj, int parent_entry,
1993 Name* reference_name, Object* child_obj, const char* name_format_string,
1994 int field_offset) {
1995 if (kind == kAccessor) {
1996 ExtractAccessorPairProperty(parent_obj, parent_entry, reference_name,
1997 child_obj, field_offset);
1998 } else {
1999 SetPropertyReference(parent_obj, parent_entry, reference_name, child_obj,
2000 name_format_string, field_offset);
2001 }
2002 }
2003
2004
SetPropertyReference(HeapObject * parent_obj,int parent_entry,Name * reference_name,Object * child_obj,const char * name_format_string,int field_offset)2005 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2006 int parent_entry,
2007 Name* reference_name,
2008 Object* child_obj,
2009 const char* name_format_string,
2010 int field_offset) {
2011 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2012 HeapEntry* child_entry = GetEntry(child_obj);
2013 if (child_entry != NULL) {
2014 HeapGraphEdge::Type type =
2015 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
2016 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2017 const char* name = name_format_string != NULL && reference_name->IsString()
2018 ? names_->GetFormatted(
2019 name_format_string,
2020 String::cast(reference_name)->ToCString(
2021 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) :
2022 names_->GetName(reference_name);
2023
2024 filler_->SetNamedReference(type,
2025 parent_entry,
2026 name,
2027 child_entry);
2028 MarkVisitedField(parent_obj, field_offset);
2029 }
2030 }
2031
2032
SetRootGcRootsReference()2033 void V8HeapExplorer::SetRootGcRootsReference() {
2034 filler_->SetIndexedAutoIndexReference(
2035 HeapGraphEdge::kElement,
2036 snapshot_->root()->index(),
2037 snapshot_->gc_roots());
2038 }
2039
2040
SetUserGlobalReference(Object * child_obj)2041 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2042 HeapEntry* child_entry = GetEntry(child_obj);
2043 DCHECK(child_entry != NULL);
2044 filler_->SetNamedAutoIndexReference(
2045 HeapGraphEdge::kShortcut,
2046 snapshot_->root()->index(),
2047 child_entry);
2048 }
2049
2050
SetGcRootsReference(VisitorSynchronization::SyncTag tag)2051 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2052 filler_->SetIndexedAutoIndexReference(
2053 HeapGraphEdge::kElement,
2054 snapshot_->gc_roots()->index(),
2055 snapshot_->gc_subroot(tag));
2056 }
2057
2058
SetGcSubrootReference(VisitorSynchronization::SyncTag tag,bool is_weak,Object * child_obj)2059 void V8HeapExplorer::SetGcSubrootReference(
2060 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2061 HeapEntry* child_entry = GetEntry(child_obj);
2062 if (child_entry != NULL) {
2063 const char* name = GetStrongGcSubrootName(child_obj);
2064 if (name != NULL) {
2065 filler_->SetNamedReference(
2066 HeapGraphEdge::kInternal,
2067 snapshot_->gc_subroot(tag)->index(),
2068 name,
2069 child_entry);
2070 } else {
2071 if (is_weak) {
2072 filler_->SetNamedAutoIndexReference(
2073 HeapGraphEdge::kWeak,
2074 snapshot_->gc_subroot(tag)->index(),
2075 child_entry);
2076 } else {
2077 filler_->SetIndexedAutoIndexReference(
2078 HeapGraphEdge::kElement,
2079 snapshot_->gc_subroot(tag)->index(),
2080 child_entry);
2081 }
2082 }
2083
2084 // Add a shortcut to JS global object reference at snapshot root.
2085 if (child_obj->IsNativeContext()) {
2086 Context* context = Context::cast(child_obj);
2087 JSGlobalObject* global = context->global_object();
2088 if (global->IsJSGlobalObject()) {
2089 bool is_debug_object = false;
2090 is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
2091 if (!is_debug_object && !user_roots_.Contains(global)) {
2092 user_roots_.Insert(global);
2093 SetUserGlobalReference(global);
2094 }
2095 }
2096 }
2097 }
2098 }
2099
2100
GetStrongGcSubrootName(Object * object)2101 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2102 if (strong_gc_subroot_names_.is_empty()) {
2103 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2104 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2105 STRONG_ROOT_LIST(ROOT_NAME)
2106 #undef ROOT_NAME
2107 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2108 STRUCT_LIST(STRUCT_MAP_NAME)
2109 #undef STRUCT_MAP_NAME
2110 #define STRING_NAME(name, str) NAME_ENTRY(name)
2111 INTERNALIZED_STRING_LIST(STRING_NAME)
2112 #undef STRING_NAME
2113 #define SYMBOL_NAME(name) NAME_ENTRY(name)
2114 PRIVATE_SYMBOL_LIST(SYMBOL_NAME)
2115 #undef SYMBOL_NAME
2116 #define SYMBOL_NAME(name, description) NAME_ENTRY(name)
2117 PUBLIC_SYMBOL_LIST(SYMBOL_NAME)
2118 WELL_KNOWN_SYMBOL_LIST(SYMBOL_NAME)
2119 #undef SYMBOL_NAME
2120 #undef NAME_ENTRY
2121 CHECK(!strong_gc_subroot_names_.is_empty());
2122 }
2123 return strong_gc_subroot_names_.GetTag(object);
2124 }
2125
2126
TagObject(Object * obj,const char * tag)2127 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2128 if (IsEssentialObject(obj)) {
2129 HeapEntry* entry = GetEntry(obj);
2130 if (entry->name()[0] == '\0') {
2131 entry->set_name(tag);
2132 }
2133 }
2134 }
2135
TagFixedArraySubType(const FixedArray * array,FixedArraySubInstanceType type)2136 void V8HeapExplorer::TagFixedArraySubType(const FixedArray* array,
2137 FixedArraySubInstanceType type) {
2138 DCHECK(array_types_.find(array) == array_types_.end());
2139 array_types_[array] = type;
2140 }
2141
2142 class GlobalObjectsEnumerator : public ObjectVisitor {
2143 public:
VisitPointers(Object ** start,Object ** end)2144 void VisitPointers(Object** start, Object** end) override {
2145 for (Object** p = start; p < end; p++) {
2146 if ((*p)->IsNativeContext()) {
2147 Context* context = Context::cast(*p);
2148 JSObject* proxy = context->global_proxy();
2149 if (proxy->IsJSGlobalProxy()) {
2150 Object* global = proxy->map()->prototype();
2151 if (global->IsJSGlobalObject()) {
2152 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2153 }
2154 }
2155 }
2156 }
2157 }
count()2158 int count() { return objects_.length(); }
at(int i)2159 Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2160
2161 private:
2162 List<Handle<JSGlobalObject> > objects_;
2163 };
2164
2165
2166 // Modifies heap. Must not be run during heap traversal.
TagGlobalObjects()2167 void V8HeapExplorer::TagGlobalObjects() {
2168 Isolate* isolate = heap_->isolate();
2169 HandleScope scope(isolate);
2170 GlobalObjectsEnumerator enumerator;
2171 isolate->global_handles()->IterateAllRoots(&enumerator);
2172 const char** urls = NewArray<const char*>(enumerator.count());
2173 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2174 if (global_object_name_resolver_) {
2175 HandleScope scope(isolate);
2176 Handle<JSGlobalObject> global_obj = enumerator.at(i);
2177 urls[i] = global_object_name_resolver_->GetName(
2178 Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2179 } else {
2180 urls[i] = NULL;
2181 }
2182 }
2183
2184 DisallowHeapAllocation no_allocation;
2185 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2186 objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2187 }
2188
2189 DeleteArray(urls);
2190 }
2191
2192
2193 class GlobalHandlesExtractor : public ObjectVisitor {
2194 public:
GlobalHandlesExtractor(NativeObjectsExplorer * explorer)2195 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2196 : explorer_(explorer) {}
~GlobalHandlesExtractor()2197 ~GlobalHandlesExtractor() override {}
VisitPointers(Object ** start,Object ** end)2198 void VisitPointers(Object** start, Object** end) override { UNREACHABLE(); }
VisitEmbedderReference(Object ** p,uint16_t class_id)2199 void VisitEmbedderReference(Object** p, uint16_t class_id) override {
2200 explorer_->VisitSubtreeWrapper(p, class_id);
2201 }
2202 private:
2203 NativeObjectsExplorer* explorer_;
2204 };
2205
2206
2207 class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2208 public:
BasicHeapEntriesAllocator(HeapSnapshot * snapshot,HeapEntry::Type entries_type)2209 BasicHeapEntriesAllocator(
2210 HeapSnapshot* snapshot,
2211 HeapEntry::Type entries_type)
2212 : snapshot_(snapshot),
2213 names_(snapshot_->profiler()->names()),
2214 heap_object_map_(snapshot_->profiler()->heap_object_map()),
2215 entries_type_(entries_type) {
2216 }
2217 virtual HeapEntry* AllocateEntry(HeapThing ptr);
2218 private:
2219 HeapSnapshot* snapshot_;
2220 StringsStorage* names_;
2221 HeapObjectsMap* heap_object_map_;
2222 HeapEntry::Type entries_type_;
2223 };
2224
2225
AllocateEntry(HeapThing ptr)2226 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2227 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2228 intptr_t elements = info->GetElementCount();
2229 intptr_t size = info->GetSizeInBytes();
2230 const char* name = elements != -1
2231 ? names_->GetFormatted("%s / %" V8PRIdPTR " entries",
2232 info->GetLabel(), elements)
2233 : names_->GetCopy(info->GetLabel());
2234 return snapshot_->AddEntry(
2235 entries_type_,
2236 name,
2237 heap_object_map_->GenerateId(info),
2238 size != -1 ? static_cast<int>(size) : 0,
2239 0);
2240 }
2241
2242
NativeObjectsExplorer(HeapSnapshot * snapshot,SnapshottingProgressReportingInterface * progress)2243 NativeObjectsExplorer::NativeObjectsExplorer(
2244 HeapSnapshot* snapshot,
2245 SnapshottingProgressReportingInterface* progress)
2246 : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2247 snapshot_(snapshot),
2248 names_(snapshot_->profiler()->names()),
2249 embedder_queried_(false),
2250 objects_by_info_(RetainedInfosMatch),
2251 native_groups_(StringsMatch),
2252 filler_(NULL) {
2253 synthetic_entries_allocator_ =
2254 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2255 native_entries_allocator_ =
2256 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2257 }
2258
2259
~NativeObjectsExplorer()2260 NativeObjectsExplorer::~NativeObjectsExplorer() {
2261 for (base::HashMap::Entry* p = objects_by_info_.Start(); p != NULL;
2262 p = objects_by_info_.Next(p)) {
2263 v8::RetainedObjectInfo* info =
2264 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2265 info->Dispose();
2266 List<HeapObject*>* objects =
2267 reinterpret_cast<List<HeapObject*>* >(p->value);
2268 delete objects;
2269 }
2270 for (base::HashMap::Entry* p = native_groups_.Start(); p != NULL;
2271 p = native_groups_.Next(p)) {
2272 v8::RetainedObjectInfo* info =
2273 reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2274 info->Dispose();
2275 }
2276 delete synthetic_entries_allocator_;
2277 delete native_entries_allocator_;
2278 }
2279
2280
EstimateObjectsCount()2281 int NativeObjectsExplorer::EstimateObjectsCount() {
2282 FillRetainedObjects();
2283 return objects_by_info_.occupancy();
2284 }
2285
2286
FillRetainedObjects()2287 void NativeObjectsExplorer::FillRetainedObjects() {
2288 if (embedder_queried_) return;
2289 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_));
2290 v8::HeapProfiler::RetainerInfos infos =
2291 snapshot_->profiler()->GetRetainerInfos(isolate_);
2292 for (auto& pair : infos.groups) {
2293 List<HeapObject*>* list = GetListMaybeDisposeInfo(pair.first);
2294 for (auto& persistent : pair.second) {
2295 if (persistent->IsEmpty()) continue;
2296
2297 Handle<Object> object = v8::Utils::OpenHandle(
2298 *persistent->Get(reinterpret_cast<v8::Isolate*>(isolate_)));
2299 DCHECK(!object.is_null());
2300 HeapObject* heap_object = HeapObject::cast(*object);
2301 list->Add(heap_object);
2302 in_groups_.Insert(heap_object);
2303 }
2304 }
2305
2306 // Record objects that are not in ObjectGroups, but have class ID.
2307 GlobalHandlesExtractor extractor(this);
2308 isolate_->global_handles()->IterateAllRootsWithClassIds(&extractor);
2309
2310 edges_ = std::move(infos.edges);
2311 embedder_queried_ = true;
2312 }
2313
FillEdges()2314 void NativeObjectsExplorer::FillEdges() {
2315 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_));
2316 // Fill in actual edges found.
2317 for (auto& pair : edges_) {
2318 if (pair.first->IsEmpty() || pair.second->IsEmpty()) continue;
2319
2320 Handle<Object> parent_object = v8::Utils::OpenHandle(
2321 *pair.first->Get(reinterpret_cast<v8::Isolate*>(isolate_)));
2322 HeapObject* parent = HeapObject::cast(*parent_object);
2323 int parent_entry =
2324 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2325 DCHECK(parent_entry != HeapEntry::kNoEntry);
2326 Handle<Object> child_object = v8::Utils::OpenHandle(
2327 *pair.second->Get(reinterpret_cast<v8::Isolate*>(isolate_)));
2328 HeapObject* child = HeapObject::cast(*child_object);
2329 HeapEntry* child_entry =
2330 filler_->FindOrAddEntry(child, native_entries_allocator_);
2331 filler_->SetNamedReference(HeapGraphEdge::kInternal, parent_entry, "native",
2332 child_entry);
2333 }
2334 edges_.clear();
2335 }
2336
GetListMaybeDisposeInfo(v8::RetainedObjectInfo * info)2337 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2338 v8::RetainedObjectInfo* info) {
2339 base::HashMap::Entry* entry =
2340 objects_by_info_.LookupOrInsert(info, InfoHash(info));
2341 if (entry->value != NULL) {
2342 info->Dispose();
2343 } else {
2344 entry->value = new List<HeapObject*>(4);
2345 }
2346 return reinterpret_cast<List<HeapObject*>* >(entry->value);
2347 }
2348
2349
IterateAndExtractReferences(SnapshotFiller * filler)2350 bool NativeObjectsExplorer::IterateAndExtractReferences(
2351 SnapshotFiller* filler) {
2352 filler_ = filler;
2353 FillRetainedObjects();
2354 FillEdges();
2355 if (EstimateObjectsCount() > 0) {
2356 for (base::HashMap::Entry* p = objects_by_info_.Start(); p != NULL;
2357 p = objects_by_info_.Next(p)) {
2358 v8::RetainedObjectInfo* info =
2359 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2360 SetNativeRootReference(info);
2361 List<HeapObject*>* objects =
2362 reinterpret_cast<List<HeapObject*>* >(p->value);
2363 for (int i = 0; i < objects->length(); ++i) {
2364 SetWrapperNativeReferences(objects->at(i), info);
2365 }
2366 }
2367 SetRootNativeRootsReference();
2368 }
2369 filler_ = NULL;
2370 return true;
2371 }
2372
2373
2374 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2375 public:
NativeGroupRetainedObjectInfo(const char * label)2376 explicit NativeGroupRetainedObjectInfo(const char* label)
2377 : disposed_(false),
2378 hash_(reinterpret_cast<intptr_t>(label)),
2379 label_(label) {
2380 }
2381
~NativeGroupRetainedObjectInfo()2382 virtual ~NativeGroupRetainedObjectInfo() {}
Dispose()2383 virtual void Dispose() {
2384 CHECK(!disposed_);
2385 disposed_ = true;
2386 delete this;
2387 }
IsEquivalent(RetainedObjectInfo * other)2388 virtual bool IsEquivalent(RetainedObjectInfo* other) {
2389 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2390 }
GetHash()2391 virtual intptr_t GetHash() { return hash_; }
GetLabel()2392 virtual const char* GetLabel() { return label_; }
2393
2394 private:
2395 bool disposed_;
2396 intptr_t hash_;
2397 const char* label_;
2398 };
2399
2400
FindOrAddGroupInfo(const char * label)2401 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2402 const char* label) {
2403 const char* label_copy = names_->GetCopy(label);
2404 uint32_t hash = StringHasher::HashSequentialString(
2405 label_copy,
2406 static_cast<int>(strlen(label_copy)),
2407 isolate_->heap()->HashSeed());
2408 base::HashMap::Entry* entry =
2409 native_groups_.LookupOrInsert(const_cast<char*>(label_copy), hash);
2410 if (entry->value == NULL) {
2411 entry->value = new NativeGroupRetainedObjectInfo(label);
2412 }
2413 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2414 }
2415
2416
SetNativeRootReference(v8::RetainedObjectInfo * info)2417 void NativeObjectsExplorer::SetNativeRootReference(
2418 v8::RetainedObjectInfo* info) {
2419 HeapEntry* child_entry =
2420 filler_->FindOrAddEntry(info, native_entries_allocator_);
2421 DCHECK(child_entry != NULL);
2422 NativeGroupRetainedObjectInfo* group_info =
2423 FindOrAddGroupInfo(info->GetGroupLabel());
2424 HeapEntry* group_entry =
2425 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2426 // |FindOrAddEntry| can move and resize the entries backing store. Reload
2427 // potentially-stale pointer.
2428 child_entry = filler_->FindEntry(info);
2429 filler_->SetNamedAutoIndexReference(
2430 HeapGraphEdge::kInternal,
2431 group_entry->index(),
2432 child_entry);
2433 }
2434
2435
SetWrapperNativeReferences(HeapObject * wrapper,v8::RetainedObjectInfo * info)2436 void NativeObjectsExplorer::SetWrapperNativeReferences(
2437 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2438 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2439 DCHECK(wrapper_entry != NULL);
2440 HeapEntry* info_entry =
2441 filler_->FindOrAddEntry(info, native_entries_allocator_);
2442 DCHECK(info_entry != NULL);
2443 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2444 wrapper_entry->index(),
2445 "native",
2446 info_entry);
2447 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2448 info_entry->index(),
2449 wrapper_entry);
2450 }
2451
2452
SetRootNativeRootsReference()2453 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2454 for (base::HashMap::Entry* entry = native_groups_.Start(); entry;
2455 entry = native_groups_.Next(entry)) {
2456 NativeGroupRetainedObjectInfo* group_info =
2457 static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2458 HeapEntry* group_entry =
2459 filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2460 DCHECK(group_entry != NULL);
2461 filler_->SetIndexedAutoIndexReference(
2462 HeapGraphEdge::kElement,
2463 snapshot_->root()->index(),
2464 group_entry);
2465 }
2466 }
2467
2468
VisitSubtreeWrapper(Object ** p,uint16_t class_id)2469 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2470 if (in_groups_.Contains(*p)) return;
2471 Isolate* isolate = isolate_;
2472 v8::RetainedObjectInfo* info =
2473 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2474 if (info == NULL) return;
2475 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2476 }
2477
2478
HeapSnapshotGenerator(HeapSnapshot * snapshot,v8::ActivityControl * control,v8::HeapProfiler::ObjectNameResolver * resolver,Heap * heap)2479 HeapSnapshotGenerator::HeapSnapshotGenerator(
2480 HeapSnapshot* snapshot,
2481 v8::ActivityControl* control,
2482 v8::HeapProfiler::ObjectNameResolver* resolver,
2483 Heap* heap)
2484 : snapshot_(snapshot),
2485 control_(control),
2486 v8_heap_explorer_(snapshot_, this, resolver),
2487 dom_explorer_(snapshot_, this),
2488 heap_(heap) {
2489 }
2490
2491 namespace {
2492 class NullContextScope {
2493 public:
NullContextScope(Isolate * isolate)2494 explicit NullContextScope(Isolate* isolate)
2495 : isolate_(isolate), prev_(isolate->context()) {
2496 isolate_->set_context(nullptr);
2497 }
~NullContextScope()2498 ~NullContextScope() { isolate_->set_context(prev_); }
2499
2500 private:
2501 Isolate* isolate_;
2502 Context* prev_;
2503 };
2504 } // namespace
2505
GenerateSnapshot()2506 bool HeapSnapshotGenerator::GenerateSnapshot() {
2507 v8_heap_explorer_.TagGlobalObjects();
2508
2509 // TODO(1562) Profiler assumes that any object that is in the heap after
2510 // full GC is reachable from the root when computing dominators.
2511 // This is not true for weakly reachable objects.
2512 // As a temporary solution we call GC twice.
2513 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
2514 GarbageCollectionReason::kHeapProfiler);
2515 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
2516 GarbageCollectionReason::kHeapProfiler);
2517
2518 NullContextScope null_context_scope(heap_->isolate());
2519
2520 #ifdef VERIFY_HEAP
2521 Heap* debug_heap = heap_;
2522 if (FLAG_verify_heap) {
2523 debug_heap->Verify();
2524 }
2525 #endif
2526
2527 SetProgressTotal(2); // 2 passes.
2528
2529 #ifdef VERIFY_HEAP
2530 if (FLAG_verify_heap) {
2531 debug_heap->Verify();
2532 }
2533 #endif
2534
2535 snapshot_->AddSyntheticRootEntries();
2536
2537 if (!FillReferences()) return false;
2538
2539 snapshot_->FillChildren();
2540 snapshot_->RememberLastJSObjectId();
2541
2542 progress_counter_ = progress_total_;
2543 if (!ProgressReport(true)) return false;
2544 return true;
2545 }
2546
2547
ProgressStep()2548 void HeapSnapshotGenerator::ProgressStep() {
2549 ++progress_counter_;
2550 }
2551
2552
ProgressReport(bool force)2553 bool HeapSnapshotGenerator::ProgressReport(bool force) {
2554 const int kProgressReportGranularity = 10000;
2555 if (control_ != NULL
2556 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2557 return
2558 control_->ReportProgressValue(progress_counter_, progress_total_) ==
2559 v8::ActivityControl::kContinue;
2560 }
2561 return true;
2562 }
2563
2564
SetProgressTotal(int iterations_count)2565 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2566 if (control_ == NULL) return;
2567 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2568 progress_total_ = iterations_count * (
2569 v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2570 dom_explorer_.EstimateObjectsCount());
2571 progress_counter_ = 0;
2572 }
2573
2574
FillReferences()2575 bool HeapSnapshotGenerator::FillReferences() {
2576 SnapshotFiller filler(snapshot_, &entries_);
2577 return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2578 && dom_explorer_.IterateAndExtractReferences(&filler);
2579 }
2580
2581
2582 template<int bytes> struct MaxDecimalDigitsIn;
2583 template<> struct MaxDecimalDigitsIn<4> {
2584 static const int kSigned = 11;
2585 static const int kUnsigned = 10;
2586 };
2587 template<> struct MaxDecimalDigitsIn<8> {
2588 static const int kSigned = 20;
2589 static const int kUnsigned = 20;
2590 };
2591
2592
2593 class OutputStreamWriter {
2594 public:
OutputStreamWriter(v8::OutputStream * stream)2595 explicit OutputStreamWriter(v8::OutputStream* stream)
2596 : stream_(stream),
2597 chunk_size_(stream->GetChunkSize()),
2598 chunk_(chunk_size_),
2599 chunk_pos_(0),
2600 aborted_(false) {
2601 DCHECK(chunk_size_ > 0);
2602 }
aborted()2603 bool aborted() { return aborted_; }
AddCharacter(char c)2604 void AddCharacter(char c) {
2605 DCHECK(c != '\0');
2606 DCHECK(chunk_pos_ < chunk_size_);
2607 chunk_[chunk_pos_++] = c;
2608 MaybeWriteChunk();
2609 }
AddString(const char * s)2610 void AddString(const char* s) {
2611 AddSubstring(s, StrLength(s));
2612 }
AddSubstring(const char * s,int n)2613 void AddSubstring(const char* s, int n) {
2614 if (n <= 0) return;
2615 DCHECK(static_cast<size_t>(n) <= strlen(s));
2616 const char* s_end = s + n;
2617 while (s < s_end) {
2618 int s_chunk_size =
2619 Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2620 DCHECK(s_chunk_size > 0);
2621 MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2622 s += s_chunk_size;
2623 chunk_pos_ += s_chunk_size;
2624 MaybeWriteChunk();
2625 }
2626 }
AddNumber(unsigned n)2627 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
Finalize()2628 void Finalize() {
2629 if (aborted_) return;
2630 DCHECK(chunk_pos_ < chunk_size_);
2631 if (chunk_pos_ != 0) {
2632 WriteChunk();
2633 }
2634 stream_->EndOfStream();
2635 }
2636
2637 private:
2638 template<typename T>
AddNumberImpl(T n,const char * format)2639 void AddNumberImpl(T n, const char* format) {
2640 // Buffer for the longest value plus trailing \0
2641 static const int kMaxNumberSize =
2642 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2643 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2644 int result = SNPrintF(
2645 chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2646 DCHECK(result != -1);
2647 chunk_pos_ += result;
2648 MaybeWriteChunk();
2649 } else {
2650 EmbeddedVector<char, kMaxNumberSize> buffer;
2651 int result = SNPrintF(buffer, format, n);
2652 USE(result);
2653 DCHECK(result != -1);
2654 AddString(buffer.start());
2655 }
2656 }
MaybeWriteChunk()2657 void MaybeWriteChunk() {
2658 DCHECK(chunk_pos_ <= chunk_size_);
2659 if (chunk_pos_ == chunk_size_) {
2660 WriteChunk();
2661 }
2662 }
WriteChunk()2663 void WriteChunk() {
2664 if (aborted_) return;
2665 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2666 v8::OutputStream::kAbort) aborted_ = true;
2667 chunk_pos_ = 0;
2668 }
2669
2670 v8::OutputStream* stream_;
2671 int chunk_size_;
2672 ScopedVector<char> chunk_;
2673 int chunk_pos_;
2674 bool aborted_;
2675 };
2676
2677
2678 // type, name|index, to_node.
2679 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2680 // type, name, id, self_size, edge_count, trace_node_id.
2681 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6;
2682
Serialize(v8::OutputStream * stream)2683 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2684 if (AllocationTracker* allocation_tracker =
2685 snapshot_->profiler()->allocation_tracker()) {
2686 allocation_tracker->PrepareForSerialization();
2687 }
2688 DCHECK(writer_ == NULL);
2689 writer_ = new OutputStreamWriter(stream);
2690 SerializeImpl();
2691 delete writer_;
2692 writer_ = NULL;
2693 }
2694
2695
SerializeImpl()2696 void HeapSnapshotJSONSerializer::SerializeImpl() {
2697 DCHECK(0 == snapshot_->root()->index());
2698 writer_->AddCharacter('{');
2699 writer_->AddString("\"snapshot\":{");
2700 SerializeSnapshot();
2701 if (writer_->aborted()) return;
2702 writer_->AddString("},\n");
2703 writer_->AddString("\"nodes\":[");
2704 SerializeNodes();
2705 if (writer_->aborted()) return;
2706 writer_->AddString("],\n");
2707 writer_->AddString("\"edges\":[");
2708 SerializeEdges();
2709 if (writer_->aborted()) return;
2710 writer_->AddString("],\n");
2711
2712 writer_->AddString("\"trace_function_infos\":[");
2713 SerializeTraceNodeInfos();
2714 if (writer_->aborted()) return;
2715 writer_->AddString("],\n");
2716 writer_->AddString("\"trace_tree\":[");
2717 SerializeTraceTree();
2718 if (writer_->aborted()) return;
2719 writer_->AddString("],\n");
2720
2721 writer_->AddString("\"samples\":[");
2722 SerializeSamples();
2723 if (writer_->aborted()) return;
2724 writer_->AddString("],\n");
2725
2726 writer_->AddString("\"strings\":[");
2727 SerializeStrings();
2728 if (writer_->aborted()) return;
2729 writer_->AddCharacter(']');
2730 writer_->AddCharacter('}');
2731 writer_->Finalize();
2732 }
2733
2734
GetStringId(const char * s)2735 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2736 base::HashMap::Entry* cache_entry =
2737 strings_.LookupOrInsert(const_cast<char*>(s), StringHash(s));
2738 if (cache_entry->value == NULL) {
2739 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2740 }
2741 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2742 }
2743
2744
2745 namespace {
2746
2747 template<size_t size> struct ToUnsigned;
2748
2749 template<> struct ToUnsigned<4> {
2750 typedef uint32_t Type;
2751 };
2752
2753 template<> struct ToUnsigned<8> {
2754 typedef uint64_t Type;
2755 };
2756
2757 } // namespace
2758
2759
2760 template<typename T>
utoa_impl(T value,const Vector<char> & buffer,int buffer_pos)2761 static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) {
2762 STATIC_ASSERT(static_cast<T>(-1) > 0); // Check that T is unsigned
2763 int number_of_digits = 0;
2764 T t = value;
2765 do {
2766 ++number_of_digits;
2767 } while (t /= 10);
2768
2769 buffer_pos += number_of_digits;
2770 int result = buffer_pos;
2771 do {
2772 int last_digit = static_cast<int>(value % 10);
2773 buffer[--buffer_pos] = '0' + last_digit;
2774 value /= 10;
2775 } while (value);
2776 return result;
2777 }
2778
2779
2780 template<typename T>
utoa(T value,const Vector<char> & buffer,int buffer_pos)2781 static int utoa(T value, const Vector<char>& buffer, int buffer_pos) {
2782 typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2783 STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value));
2784 return utoa_impl(unsigned_value, buffer, buffer_pos);
2785 }
2786
2787
SerializeEdge(HeapGraphEdge * edge,bool first_edge)2788 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2789 bool first_edge) {
2790 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2791 static const int kBufferSize =
2792 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2793 EmbeddedVector<char, kBufferSize> buffer;
2794 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2795 || edge->type() == HeapGraphEdge::kHidden
2796 ? edge->index() : GetStringId(edge->name());
2797 int buffer_pos = 0;
2798 if (!first_edge) {
2799 buffer[buffer_pos++] = ',';
2800 }
2801 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2802 buffer[buffer_pos++] = ',';
2803 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2804 buffer[buffer_pos++] = ',';
2805 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2806 buffer[buffer_pos++] = '\n';
2807 buffer[buffer_pos++] = '\0';
2808 writer_->AddString(buffer.start());
2809 }
2810
2811
SerializeEdges()2812 void HeapSnapshotJSONSerializer::SerializeEdges() {
2813 std::deque<HeapGraphEdge*>& edges = snapshot_->children();
2814 for (size_t i = 0; i < edges.size(); ++i) {
2815 DCHECK(i == 0 ||
2816 edges[i - 1]->from()->index() <= edges[i]->from()->index());
2817 SerializeEdge(edges[i], i == 0);
2818 if (writer_->aborted()) return;
2819 }
2820 }
2821
2822
SerializeNode(HeapEntry * entry)2823 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2824 // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0
2825 static const int kBufferSize =
2826 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2827 + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned // NOLINT
2828 + 6 + 1 + 1;
2829 EmbeddedVector<char, kBufferSize> buffer;
2830 int buffer_pos = 0;
2831 if (entry_index(entry) != 0) {
2832 buffer[buffer_pos++] = ',';
2833 }
2834 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2835 buffer[buffer_pos++] = ',';
2836 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2837 buffer[buffer_pos++] = ',';
2838 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2839 buffer[buffer_pos++] = ',';
2840 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2841 buffer[buffer_pos++] = ',';
2842 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2843 buffer[buffer_pos++] = ',';
2844 buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2845 buffer[buffer_pos++] = '\n';
2846 buffer[buffer_pos++] = '\0';
2847 writer_->AddString(buffer.start());
2848 }
2849
2850
SerializeNodes()2851 void HeapSnapshotJSONSerializer::SerializeNodes() {
2852 List<HeapEntry>& entries = snapshot_->entries();
2853 for (int i = 0; i < entries.length(); ++i) {
2854 SerializeNode(&entries[i]);
2855 if (writer_->aborted()) return;
2856 }
2857 }
2858
2859
SerializeSnapshot()2860 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2861 writer_->AddString("\"meta\":");
2862 // The object describing node serialization layout.
2863 // We use a set of macros to improve readability.
2864 #define JSON_A(s) "[" s "]"
2865 #define JSON_O(s) "{" s "}"
2866 #define JSON_S(s) "\"" s "\""
2867 writer_->AddString(JSON_O(
2868 JSON_S("node_fields") ":" JSON_A(
2869 JSON_S("type") ","
2870 JSON_S("name") ","
2871 JSON_S("id") ","
2872 JSON_S("self_size") ","
2873 JSON_S("edge_count") ","
2874 JSON_S("trace_node_id")) ","
2875 JSON_S("node_types") ":" JSON_A(
2876 JSON_A(
2877 JSON_S("hidden") ","
2878 JSON_S("array") ","
2879 JSON_S("string") ","
2880 JSON_S("object") ","
2881 JSON_S("code") ","
2882 JSON_S("closure") ","
2883 JSON_S("regexp") ","
2884 JSON_S("number") ","
2885 JSON_S("native") ","
2886 JSON_S("synthetic") ","
2887 JSON_S("concatenated string") ","
2888 JSON_S("sliced string")) ","
2889 JSON_S("string") ","
2890 JSON_S("number") ","
2891 JSON_S("number") ","
2892 JSON_S("number") ","
2893 JSON_S("number") ","
2894 JSON_S("number")) ","
2895 JSON_S("edge_fields") ":" JSON_A(
2896 JSON_S("type") ","
2897 JSON_S("name_or_index") ","
2898 JSON_S("to_node")) ","
2899 JSON_S("edge_types") ":" JSON_A(
2900 JSON_A(
2901 JSON_S("context") ","
2902 JSON_S("element") ","
2903 JSON_S("property") ","
2904 JSON_S("internal") ","
2905 JSON_S("hidden") ","
2906 JSON_S("shortcut") ","
2907 JSON_S("weak")) ","
2908 JSON_S("string_or_number") ","
2909 JSON_S("node")) ","
2910 JSON_S("trace_function_info_fields") ":" JSON_A(
2911 JSON_S("function_id") ","
2912 JSON_S("name") ","
2913 JSON_S("script_name") ","
2914 JSON_S("script_id") ","
2915 JSON_S("line") ","
2916 JSON_S("column")) ","
2917 JSON_S("trace_node_fields") ":" JSON_A(
2918 JSON_S("id") ","
2919 JSON_S("function_info_index") ","
2920 JSON_S("count") ","
2921 JSON_S("size") ","
2922 JSON_S("children")) ","
2923 JSON_S("sample_fields") ":" JSON_A(
2924 JSON_S("timestamp_us") ","
2925 JSON_S("last_assigned_id"))));
2926 #undef JSON_S
2927 #undef JSON_O
2928 #undef JSON_A
2929 writer_->AddString(",\"node_count\":");
2930 writer_->AddNumber(snapshot_->entries().length());
2931 writer_->AddString(",\"edge_count\":");
2932 writer_->AddNumber(static_cast<double>(snapshot_->edges().size()));
2933 writer_->AddString(",\"trace_function_count\":");
2934 uint32_t count = 0;
2935 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2936 if (tracker) {
2937 count = tracker->function_info_list().length();
2938 }
2939 writer_->AddNumber(count);
2940 }
2941
2942
WriteUChar(OutputStreamWriter * w,unibrow::uchar u)2943 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2944 static const char hex_chars[] = "0123456789ABCDEF";
2945 w->AddString("\\u");
2946 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2947 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2948 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2949 w->AddCharacter(hex_chars[u & 0xf]);
2950 }
2951
2952
SerializeTraceTree()2953 void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2954 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2955 if (!tracker) return;
2956 AllocationTraceTree* traces = tracker->trace_tree();
2957 SerializeTraceNode(traces->root());
2958 }
2959
2960
SerializeTraceNode(AllocationTraceNode * node)2961 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2962 // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2963 const int kBufferSize =
2964 4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2965 + 4 + 1 + 1;
2966 EmbeddedVector<char, kBufferSize> buffer;
2967 int buffer_pos = 0;
2968 buffer_pos = utoa(node->id(), buffer, buffer_pos);
2969 buffer[buffer_pos++] = ',';
2970 buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
2971 buffer[buffer_pos++] = ',';
2972 buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
2973 buffer[buffer_pos++] = ',';
2974 buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
2975 buffer[buffer_pos++] = ',';
2976 buffer[buffer_pos++] = '[';
2977 buffer[buffer_pos++] = '\0';
2978 writer_->AddString(buffer.start());
2979
2980 Vector<AllocationTraceNode*> children = node->children();
2981 for (int i = 0; i < children.length(); i++) {
2982 if (i > 0) {
2983 writer_->AddCharacter(',');
2984 }
2985 SerializeTraceNode(children[i]);
2986 }
2987 writer_->AddCharacter(']');
2988 }
2989
2990
2991 // 0-based position is converted to 1-based during the serialization.
SerializePosition(int position,const Vector<char> & buffer,int buffer_pos)2992 static int SerializePosition(int position, const Vector<char>& buffer,
2993 int buffer_pos) {
2994 if (position == -1) {
2995 buffer[buffer_pos++] = '0';
2996 } else {
2997 DCHECK(position >= 0);
2998 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
2999 }
3000 return buffer_pos;
3001 }
3002
3003
SerializeTraceNodeInfos()3004 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
3005 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
3006 if (!tracker) return;
3007 // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
3008 const int kBufferSize =
3009 6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
3010 + 6 + 1 + 1;
3011 EmbeddedVector<char, kBufferSize> buffer;
3012 const List<AllocationTracker::FunctionInfo*>& list =
3013 tracker->function_info_list();
3014 for (int i = 0; i < list.length(); i++) {
3015 AllocationTracker::FunctionInfo* info = list[i];
3016 int buffer_pos = 0;
3017 if (i > 0) {
3018 buffer[buffer_pos++] = ',';
3019 }
3020 buffer_pos = utoa(info->function_id, buffer, buffer_pos);
3021 buffer[buffer_pos++] = ',';
3022 buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
3023 buffer[buffer_pos++] = ',';
3024 buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3025 buffer[buffer_pos++] = ',';
3026 // The cast is safe because script id is a non-negative Smi.
3027 buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3028 buffer_pos);
3029 buffer[buffer_pos++] = ',';
3030 buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3031 buffer[buffer_pos++] = ',';
3032 buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3033 buffer[buffer_pos++] = '\n';
3034 buffer[buffer_pos++] = '\0';
3035 writer_->AddString(buffer.start());
3036 }
3037 }
3038
3039
SerializeSamples()3040 void HeapSnapshotJSONSerializer::SerializeSamples() {
3041 const List<HeapObjectsMap::TimeInterval>& samples =
3042 snapshot_->profiler()->heap_object_map()->samples();
3043 if (samples.is_empty()) return;
3044 base::TimeTicks start_time = samples[0].timestamp;
3045 // The buffer needs space for 2 unsigned ints, 2 commas, \n and \0
3046 const int kBufferSize = MaxDecimalDigitsIn<sizeof(
3047 base::TimeDelta().InMicroseconds())>::kUnsigned +
3048 MaxDecimalDigitsIn<sizeof(samples[0].id)>::kUnsigned +
3049 2 + 1 + 1;
3050 EmbeddedVector<char, kBufferSize> buffer;
3051 for (int i = 0; i < samples.length(); i++) {
3052 HeapObjectsMap::TimeInterval& sample = samples[i];
3053 int buffer_pos = 0;
3054 if (i > 0) {
3055 buffer[buffer_pos++] = ',';
3056 }
3057 base::TimeDelta time_delta = sample.timestamp - start_time;
3058 buffer_pos = utoa(time_delta.InMicroseconds(), buffer, buffer_pos);
3059 buffer[buffer_pos++] = ',';
3060 buffer_pos = utoa(sample.last_assigned_id(), buffer, buffer_pos);
3061 buffer[buffer_pos++] = '\n';
3062 buffer[buffer_pos++] = '\0';
3063 writer_->AddString(buffer.start());
3064 }
3065 }
3066
3067
SerializeString(const unsigned char * s)3068 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3069 writer_->AddCharacter('\n');
3070 writer_->AddCharacter('\"');
3071 for ( ; *s != '\0'; ++s) {
3072 switch (*s) {
3073 case '\b':
3074 writer_->AddString("\\b");
3075 continue;
3076 case '\f':
3077 writer_->AddString("\\f");
3078 continue;
3079 case '\n':
3080 writer_->AddString("\\n");
3081 continue;
3082 case '\r':
3083 writer_->AddString("\\r");
3084 continue;
3085 case '\t':
3086 writer_->AddString("\\t");
3087 continue;
3088 case '\"':
3089 case '\\':
3090 writer_->AddCharacter('\\');
3091 writer_->AddCharacter(*s);
3092 continue;
3093 default:
3094 if (*s > 31 && *s < 128) {
3095 writer_->AddCharacter(*s);
3096 } else if (*s <= 31) {
3097 // Special character with no dedicated literal.
3098 WriteUChar(writer_, *s);
3099 } else {
3100 // Convert UTF-8 into \u UTF-16 literal.
3101 size_t length = 1, cursor = 0;
3102 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3103 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3104 if (c != unibrow::Utf8::kBadChar) {
3105 WriteUChar(writer_, c);
3106 DCHECK(cursor != 0);
3107 s += cursor - 1;
3108 } else {
3109 writer_->AddCharacter('?');
3110 }
3111 }
3112 }
3113 }
3114 writer_->AddCharacter('\"');
3115 }
3116
3117
SerializeStrings()3118 void HeapSnapshotJSONSerializer::SerializeStrings() {
3119 ScopedVector<const unsigned char*> sorted_strings(
3120 strings_.occupancy() + 1);
3121 for (base::HashMap::Entry* entry = strings_.Start(); entry != NULL;
3122 entry = strings_.Next(entry)) {
3123 int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3124 sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3125 }
3126 writer_->AddString("\"<dummy>\"");
3127 for (int i = 1; i < sorted_strings.length(); ++i) {
3128 writer_->AddCharacter(',');
3129 SerializeString(sorted_strings[i]);
3130 if (writer_->aborted()) return;
3131 }
3132 }
3133
3134
3135 } // namespace internal
3136 } // namespace v8
3137