1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #include "heap-snapshot-generator-inl.h"
31
32 #include "allocation-tracker.h"
33 #include "code-stubs.h"
34 #include "heap-profiler.h"
35 #include "debug.h"
36 #include "types.h"
37
38 namespace v8 {
39 namespace internal {
40
41
HeapGraphEdge(Type type,const char * name,int from,int to)42 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
43 : type_(type),
44 from_index_(from),
45 to_index_(to),
46 name_(name) {
47 ASSERT(type == kContextVariable
48 || type == kProperty
49 || type == kInternal
50 || type == kShortcut);
51 }
52
53
HeapGraphEdge(Type type,int index,int from,int to)54 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
55 : type_(type),
56 from_index_(from),
57 to_index_(to),
58 index_(index) {
59 ASSERT(type == kElement || type == kHidden || type == kWeak);
60 }
61
62
ReplaceToIndexWithEntry(HeapSnapshot * snapshot)63 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
64 to_entry_ = &snapshot->entries()[to_index_];
65 }
66
67
68 const int HeapEntry::kNoEntry = -1;
69
HeapEntry(HeapSnapshot * snapshot,Type type,const char * name,SnapshotObjectId id,int self_size)70 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
71 Type type,
72 const char* name,
73 SnapshotObjectId id,
74 int self_size)
75 : type_(type),
76 children_count_(0),
77 children_index_(-1),
78 self_size_(self_size),
79 id_(id),
80 snapshot_(snapshot),
81 name_(name) { }
82
83
SetNamedReference(HeapGraphEdge::Type type,const char * name,HeapEntry * entry)84 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
85 const char* name,
86 HeapEntry* entry) {
87 HeapGraphEdge edge(type, name, this->index(), entry->index());
88 snapshot_->edges().Add(edge);
89 ++children_count_;
90 }
91
92
SetIndexedReference(HeapGraphEdge::Type type,int index,HeapEntry * entry)93 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
94 int index,
95 HeapEntry* entry) {
96 HeapGraphEdge edge(type, index, this->index(), entry->index());
97 snapshot_->edges().Add(edge);
98 ++children_count_;
99 }
100
101
GetHeapObject()102 Handle<HeapObject> HeapEntry::GetHeapObject() {
103 return snapshot_->profiler()->FindHeapObjectById(id());
104 }
105
106
Print(const char * prefix,const char * edge_name,int max_depth,int indent)107 void HeapEntry::Print(
108 const char* prefix, const char* edge_name, int max_depth, int indent) {
109 STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
110 OS::Print("%6d @%6u %*c %s%s: ",
111 self_size(), id(), indent, ' ', prefix, edge_name);
112 if (type() != kString) {
113 OS::Print("%s %.40s\n", TypeAsString(), name_);
114 } else {
115 OS::Print("\"");
116 const char* c = name_;
117 while (*c && (c - name_) <= 40) {
118 if (*c != '\n')
119 OS::Print("%c", *c);
120 else
121 OS::Print("\\n");
122 ++c;
123 }
124 OS::Print("\"\n");
125 }
126 if (--max_depth == 0) return;
127 Vector<HeapGraphEdge*> ch = children();
128 for (int i = 0; i < ch.length(); ++i) {
129 HeapGraphEdge& edge = *ch[i];
130 const char* edge_prefix = "";
131 EmbeddedVector<char, 64> index;
132 const char* edge_name = index.start();
133 switch (edge.type()) {
134 case HeapGraphEdge::kContextVariable:
135 edge_prefix = "#";
136 edge_name = edge.name();
137 break;
138 case HeapGraphEdge::kElement:
139 OS::SNPrintF(index, "%d", edge.index());
140 break;
141 case HeapGraphEdge::kInternal:
142 edge_prefix = "$";
143 edge_name = edge.name();
144 break;
145 case HeapGraphEdge::kProperty:
146 edge_name = edge.name();
147 break;
148 case HeapGraphEdge::kHidden:
149 edge_prefix = "$";
150 OS::SNPrintF(index, "%d", edge.index());
151 break;
152 case HeapGraphEdge::kShortcut:
153 edge_prefix = "^";
154 edge_name = edge.name();
155 break;
156 case HeapGraphEdge::kWeak:
157 edge_prefix = "w";
158 OS::SNPrintF(index, "%d", edge.index());
159 break;
160 default:
161 OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
162 }
163 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
164 }
165 }
166
167
TypeAsString()168 const char* HeapEntry::TypeAsString() {
169 switch (type()) {
170 case kHidden: return "/hidden/";
171 case kObject: return "/object/";
172 case kClosure: return "/closure/";
173 case kString: return "/string/";
174 case kCode: return "/code/";
175 case kArray: return "/array/";
176 case kRegExp: return "/regexp/";
177 case kHeapNumber: return "/number/";
178 case kNative: return "/native/";
179 case kSynthetic: return "/synthetic/";
180 case kConsString: return "/concatenated string/";
181 case kSlicedString: return "/sliced string/";
182 default: return "???";
183 }
184 }
185
186
187 // It is very important to keep objects that form a heap snapshot
188 // as small as possible.
189 namespace { // Avoid littering the global namespace.
190
191 template <size_t ptr_size> struct SnapshotSizeConstants;
192
193 template <> struct SnapshotSizeConstants<4> {
194 static const int kExpectedHeapGraphEdgeSize = 12;
195 static const int kExpectedHeapEntrySize = 24;
196 };
197
198 template <> struct SnapshotSizeConstants<8> {
199 static const int kExpectedHeapGraphEdgeSize = 24;
200 static const int kExpectedHeapEntrySize = 32;
201 };
202
203 } // namespace
204
HeapSnapshot(HeapProfiler * profiler,const char * title,unsigned uid)205 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler,
206 const char* title,
207 unsigned uid)
208 : profiler_(profiler),
209 title_(title),
210 uid_(uid),
211 root_index_(HeapEntry::kNoEntry),
212 gc_roots_index_(HeapEntry::kNoEntry),
213 natives_root_index_(HeapEntry::kNoEntry),
214 max_snapshot_js_object_id_(0) {
215 STATIC_CHECK(
216 sizeof(HeapGraphEdge) ==
217 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
218 STATIC_CHECK(
219 sizeof(HeapEntry) ==
220 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
221 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
222 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
223 }
224 }
225
226
Delete()227 void HeapSnapshot::Delete() {
228 profiler_->RemoveSnapshot(this);
229 delete this;
230 }
231
232
RememberLastJSObjectId()233 void HeapSnapshot::RememberLastJSObjectId() {
234 max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
235 }
236
237
AddRootEntry()238 HeapEntry* HeapSnapshot::AddRootEntry() {
239 ASSERT(root_index_ == HeapEntry::kNoEntry);
240 ASSERT(entries_.is_empty()); // Root entry must be the first one.
241 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
242 "",
243 HeapObjectsMap::kInternalRootObjectId,
244 0);
245 root_index_ = entry->index();
246 ASSERT(root_index_ == 0);
247 return entry;
248 }
249
250
AddGcRootsEntry()251 HeapEntry* HeapSnapshot::AddGcRootsEntry() {
252 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
253 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
254 "(GC roots)",
255 HeapObjectsMap::kGcRootsObjectId,
256 0);
257 gc_roots_index_ = entry->index();
258 return entry;
259 }
260
261
AddGcSubrootEntry(int tag)262 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
263 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
264 ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
265 HeapEntry* entry = AddEntry(
266 HeapEntry::kSynthetic,
267 VisitorSynchronization::kTagNames[tag],
268 HeapObjectsMap::GetNthGcSubrootId(tag),
269 0);
270 gc_subroot_indexes_[tag] = entry->index();
271 return entry;
272 }
273
274
AddEntry(HeapEntry::Type type,const char * name,SnapshotObjectId id,int size)275 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
276 const char* name,
277 SnapshotObjectId id,
278 int size) {
279 HeapEntry entry(this, type, name, id, size);
280 entries_.Add(entry);
281 return &entries_.last();
282 }
283
284
FillChildren()285 void HeapSnapshot::FillChildren() {
286 ASSERT(children().is_empty());
287 children().Allocate(edges().length());
288 int children_index = 0;
289 for (int i = 0; i < entries().length(); ++i) {
290 HeapEntry* entry = &entries()[i];
291 children_index = entry->set_children_index(children_index);
292 }
293 ASSERT(edges().length() == children_index);
294 for (int i = 0; i < edges().length(); ++i) {
295 HeapGraphEdge* edge = &edges()[i];
296 edge->ReplaceToIndexWithEntry(this);
297 edge->from()->add_child(edge);
298 }
299 }
300
301
302 class FindEntryById {
303 public:
FindEntryById(SnapshotObjectId id)304 explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
operator ()(HeapEntry * const * entry)305 int operator()(HeapEntry* const* entry) {
306 if ((*entry)->id() == id_) return 0;
307 return (*entry)->id() < id_ ? -1 : 1;
308 }
309 private:
310 SnapshotObjectId id_;
311 };
312
313
GetEntryById(SnapshotObjectId id)314 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
315 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
316 // Perform a binary search by id.
317 int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
318 if (index == -1)
319 return NULL;
320 return entries_by_id->at(index);
321 }
322
323
324 template<class T>
SortByIds(const T * entry1_ptr,const T * entry2_ptr)325 static int SortByIds(const T* entry1_ptr,
326 const T* entry2_ptr) {
327 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
328 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
329 }
330
331
GetSortedEntriesList()332 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
333 if (sorted_entries_.is_empty()) {
334 sorted_entries_.Allocate(entries_.length());
335 for (int i = 0; i < entries_.length(); ++i) {
336 sorted_entries_[i] = &entries_[i];
337 }
338 sorted_entries_.Sort(SortByIds);
339 }
340 return &sorted_entries_;
341 }
342
343
Print(int max_depth)344 void HeapSnapshot::Print(int max_depth) {
345 root()->Print("", "", max_depth, 0);
346 }
347
348
RawSnapshotSize() const349 size_t HeapSnapshot::RawSnapshotSize() const {
350 return
351 sizeof(*this) +
352 GetMemoryUsedByList(entries_) +
353 GetMemoryUsedByList(edges_) +
354 GetMemoryUsedByList(children_) +
355 GetMemoryUsedByList(sorted_entries_);
356 }
357
358
359 // We split IDs on evens for embedder objects (see
360 // HeapObjectsMap::GenerateId) and odds for native objects.
361 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
362 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
363 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
364 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
365 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
366 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
367 HeapObjectsMap::kGcRootsFirstSubrootId +
368 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
369
370
AddressesMatch(void * key1,void * key2)371 static bool AddressesMatch(void* key1, void* key2) {
372 return key1 == key2;
373 }
374
375
HeapObjectsMap(Heap * heap)376 HeapObjectsMap::HeapObjectsMap(Heap* heap)
377 : next_id_(kFirstAvailableObjectId),
378 entries_map_(AddressesMatch),
379 heap_(heap) {
380 // This dummy element solves a problem with entries_map_.
381 // When we do lookup in HashMap we see no difference between two cases:
382 // it has an entry with NULL as the value or it has created
383 // a new entry on the fly with NULL as the default value.
384 // With such dummy element we have a guaranty that all entries_map_ entries
385 // will have the value field grater than 0.
386 // This fact is using in MoveObject method.
387 entries_.Add(EntryInfo(0, NULL, 0));
388 }
389
390
MoveObject(Address from,Address to,int object_size)391 void HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
392 ASSERT(to != NULL);
393 ASSERT(from != NULL);
394 if (from == to) return;
395 void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
396 if (from_value == NULL) {
397 // It may occur that some untracked object moves to an address X and there
398 // is a tracked object at that address. In this case we should remove the
399 // entry as we know that the object has died.
400 void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
401 if (to_value != NULL) {
402 int to_entry_info_index =
403 static_cast<int>(reinterpret_cast<intptr_t>(to_value));
404 entries_.at(to_entry_info_index).addr = NULL;
405 }
406 } else {
407 HashMap::Entry* to_entry = entries_map_.Lookup(to, ComputePointerHash(to),
408 true);
409 if (to_entry->value != NULL) {
410 // We found the existing entry with to address for an old object.
411 // Without this operation we will have two EntryInfo's with the same
412 // value in addr field. It is bad because later at RemoveDeadEntries
413 // one of this entry will be removed with the corresponding entries_map_
414 // entry.
415 int to_entry_info_index =
416 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
417 entries_.at(to_entry_info_index).addr = NULL;
418 }
419 int from_entry_info_index =
420 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
421 entries_.at(from_entry_info_index).addr = to;
422 // Size of an object can change during its life, so to keep information
423 // about the object in entries_ consistent, we have to adjust size when the
424 // object is migrated.
425 if (FLAG_heap_profiler_trace_objects) {
426 PrintF("Move object from %p to %p old size %6d new size %6d\n",
427 from,
428 to,
429 entries_.at(from_entry_info_index).size,
430 object_size);
431 }
432 entries_.at(from_entry_info_index).size = object_size;
433 to_entry->value = from_value;
434 }
435 }
436
437
UpdateObjectSize(Address addr,int size)438 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
439 FindOrAddEntry(addr, size, false);
440 }
441
442
FindEntry(Address addr)443 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
444 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
445 false);
446 if (entry == NULL) return 0;
447 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
448 EntryInfo& entry_info = entries_.at(entry_index);
449 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
450 return entry_info.id;
451 }
452
453
FindOrAddEntry(Address addr,unsigned int size,bool accessed)454 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
455 unsigned int size,
456 bool accessed) {
457 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
458 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
459 true);
460 if (entry->value != NULL) {
461 int entry_index =
462 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
463 EntryInfo& entry_info = entries_.at(entry_index);
464 entry_info.accessed = accessed;
465 if (FLAG_heap_profiler_trace_objects) {
466 PrintF("Update object size : %p with old size %d and new size %d\n",
467 addr,
468 entry_info.size,
469 size);
470 }
471 entry_info.size = size;
472 return entry_info.id;
473 }
474 entry->value = reinterpret_cast<void*>(entries_.length());
475 SnapshotObjectId id = next_id_;
476 next_id_ += kObjectIdStep;
477 entries_.Add(EntryInfo(id, addr, size, accessed));
478 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
479 return id;
480 }
481
482
StopHeapObjectsTracking()483 void HeapObjectsMap::StopHeapObjectsTracking() {
484 time_intervals_.Clear();
485 }
486
487
UpdateHeapObjectsMap()488 void HeapObjectsMap::UpdateHeapObjectsMap() {
489 if (FLAG_heap_profiler_trace_objects) {
490 PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
491 entries_map_.occupancy());
492 }
493 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
494 "HeapObjectsMap::UpdateHeapObjectsMap");
495 HeapIterator iterator(heap_);
496 for (HeapObject* obj = iterator.next();
497 obj != NULL;
498 obj = iterator.next()) {
499 FindOrAddEntry(obj->address(), obj->Size());
500 if (FLAG_heap_profiler_trace_objects) {
501 PrintF("Update object : %p %6d. Next address is %p\n",
502 obj->address(),
503 obj->Size(),
504 obj->address() + obj->Size());
505 }
506 }
507 RemoveDeadEntries();
508 if (FLAG_heap_profiler_trace_objects) {
509 PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
510 entries_map_.occupancy());
511 }
512 }
513
514
515 namespace {
516
517
518 struct HeapObjectInfo {
HeapObjectInfov8::internal::__anon48044cac0211::HeapObjectInfo519 HeapObjectInfo(HeapObject* obj, int expected_size)
520 : obj(obj),
521 expected_size(expected_size) {
522 }
523
524 HeapObject* obj;
525 int expected_size;
526
IsValidv8::internal::__anon48044cac0211::HeapObjectInfo527 bool IsValid() const { return expected_size == obj->Size(); }
528
Printv8::internal::__anon48044cac0211::HeapObjectInfo529 void Print() const {
530 if (expected_size == 0) {
531 PrintF("Untracked object : %p %6d. Next address is %p\n",
532 obj->address(),
533 obj->Size(),
534 obj->address() + obj->Size());
535 } else if (obj->Size() != expected_size) {
536 PrintF("Wrong size %6d: %p %6d. Next address is %p\n",
537 expected_size,
538 obj->address(),
539 obj->Size(),
540 obj->address() + obj->Size());
541 } else {
542 PrintF("Good object : %p %6d. Next address is %p\n",
543 obj->address(),
544 expected_size,
545 obj->address() + obj->Size());
546 }
547 }
548 };
549
550
comparator(const HeapObjectInfo * a,const HeapObjectInfo * b)551 static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
552 if (a->obj < b->obj) return -1;
553 if (a->obj > b->obj) return 1;
554 return 0;
555 }
556
557
558 } // namespace
559
560
FindUntrackedObjects()561 int HeapObjectsMap::FindUntrackedObjects() {
562 List<HeapObjectInfo> heap_objects(1000);
563
564 HeapIterator iterator(heap_);
565 int untracked = 0;
566 for (HeapObject* obj = iterator.next();
567 obj != NULL;
568 obj = iterator.next()) {
569 HashMap::Entry* entry = entries_map_.Lookup(
570 obj->address(), ComputePointerHash(obj->address()), false);
571 if (entry == NULL) {
572 ++untracked;
573 if (FLAG_heap_profiler_trace_objects) {
574 heap_objects.Add(HeapObjectInfo(obj, 0));
575 }
576 } else {
577 int entry_index = static_cast<int>(
578 reinterpret_cast<intptr_t>(entry->value));
579 EntryInfo& entry_info = entries_.at(entry_index);
580 if (FLAG_heap_profiler_trace_objects) {
581 heap_objects.Add(HeapObjectInfo(obj,
582 static_cast<int>(entry_info.size)));
583 if (obj->Size() != static_cast<int>(entry_info.size))
584 ++untracked;
585 } else {
586 CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
587 }
588 }
589 }
590 if (FLAG_heap_profiler_trace_objects) {
591 PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
592 entries_map_.occupancy());
593 heap_objects.Sort(comparator);
594 int last_printed_object = -1;
595 bool print_next_object = false;
596 for (int i = 0; i < heap_objects.length(); ++i) {
597 const HeapObjectInfo& object_info = heap_objects[i];
598 if (!object_info.IsValid()) {
599 ++untracked;
600 if (last_printed_object != i - 1) {
601 if (i > 0) {
602 PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
603 heap_objects[i - 1].Print();
604 }
605 }
606 object_info.Print();
607 last_printed_object = i;
608 print_next_object = true;
609 } else if (print_next_object) {
610 object_info.Print();
611 print_next_object = false;
612 last_printed_object = i;
613 }
614 }
615 if (last_printed_object < heap_objects.length() - 1) {
616 PrintF("Last %d objects were skipped\n",
617 heap_objects.length() - 1 - last_printed_object);
618 }
619 PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
620 entries_map_.occupancy());
621 }
622 return untracked;
623 }
624
625
PushHeapObjectsStats(OutputStream * stream)626 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
627 UpdateHeapObjectsMap();
628 time_intervals_.Add(TimeInterval(next_id_));
629 int prefered_chunk_size = stream->GetChunkSize();
630 List<v8::HeapStatsUpdate> stats_buffer;
631 ASSERT(!entries_.is_empty());
632 EntryInfo* entry_info = &entries_.first();
633 EntryInfo* end_entry_info = &entries_.last() + 1;
634 for (int time_interval_index = 0;
635 time_interval_index < time_intervals_.length();
636 ++time_interval_index) {
637 TimeInterval& time_interval = time_intervals_[time_interval_index];
638 SnapshotObjectId time_interval_id = time_interval.id;
639 uint32_t entries_size = 0;
640 EntryInfo* start_entry_info = entry_info;
641 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
642 entries_size += entry_info->size;
643 ++entry_info;
644 }
645 uint32_t entries_count =
646 static_cast<uint32_t>(entry_info - start_entry_info);
647 if (time_interval.count != entries_count ||
648 time_interval.size != entries_size) {
649 stats_buffer.Add(v8::HeapStatsUpdate(
650 time_interval_index,
651 time_interval.count = entries_count,
652 time_interval.size = entries_size));
653 if (stats_buffer.length() >= prefered_chunk_size) {
654 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
655 &stats_buffer.first(), stats_buffer.length());
656 if (result == OutputStream::kAbort) return last_assigned_id();
657 stats_buffer.Clear();
658 }
659 }
660 }
661 ASSERT(entry_info == end_entry_info);
662 if (!stats_buffer.is_empty()) {
663 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
664 &stats_buffer.first(), stats_buffer.length());
665 if (result == OutputStream::kAbort) return last_assigned_id();
666 }
667 stream->EndOfStream();
668 return last_assigned_id();
669 }
670
671
RemoveDeadEntries()672 void HeapObjectsMap::RemoveDeadEntries() {
673 ASSERT(entries_.length() > 0 &&
674 entries_.at(0).id == 0 &&
675 entries_.at(0).addr == NULL);
676 int first_free_entry = 1;
677 for (int i = 1; i < entries_.length(); ++i) {
678 EntryInfo& entry_info = entries_.at(i);
679 if (entry_info.accessed) {
680 if (first_free_entry != i) {
681 entries_.at(first_free_entry) = entry_info;
682 }
683 entries_.at(first_free_entry).accessed = false;
684 HashMap::Entry* entry = entries_map_.Lookup(
685 entry_info.addr, ComputePointerHash(entry_info.addr), false);
686 ASSERT(entry);
687 entry->value = reinterpret_cast<void*>(first_free_entry);
688 ++first_free_entry;
689 } else {
690 if (entry_info.addr) {
691 entries_map_.Remove(entry_info.addr,
692 ComputePointerHash(entry_info.addr));
693 }
694 }
695 }
696 entries_.Rewind(first_free_entry);
697 ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
698 entries_map_.occupancy());
699 }
700
701
GenerateId(v8::RetainedObjectInfo * info)702 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
703 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
704 const char* label = info->GetLabel();
705 id ^= StringHasher::HashSequentialString(label,
706 static_cast<int>(strlen(label)),
707 heap_->HashSeed());
708 intptr_t element_count = info->GetElementCount();
709 if (element_count != -1)
710 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
711 v8::internal::kZeroHashSeed);
712 return id << 1;
713 }
714
715
GetUsedMemorySize() const716 size_t HeapObjectsMap::GetUsedMemorySize() const {
717 return
718 sizeof(*this) +
719 sizeof(HashMap::Entry) * entries_map_.capacity() +
720 GetMemoryUsedByList(entries_) +
721 GetMemoryUsedByList(time_intervals_);
722 }
723
724
HeapEntriesMap()725 HeapEntriesMap::HeapEntriesMap()
726 : entries_(HeapThingsMatch) {
727 }
728
729
Map(HeapThing thing)730 int HeapEntriesMap::Map(HeapThing thing) {
731 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
732 if (cache_entry == NULL) return HeapEntry::kNoEntry;
733 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
734 }
735
736
Pair(HeapThing thing,int entry)737 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
738 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
739 ASSERT(cache_entry->value == NULL);
740 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
741 }
742
743
HeapObjectsSet()744 HeapObjectsSet::HeapObjectsSet()
745 : entries_(HeapEntriesMap::HeapThingsMatch) {
746 }
747
748
Clear()749 void HeapObjectsSet::Clear() {
750 entries_.Clear();
751 }
752
753
Contains(Object * obj)754 bool HeapObjectsSet::Contains(Object* obj) {
755 if (!obj->IsHeapObject()) return false;
756 HeapObject* object = HeapObject::cast(obj);
757 return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
758 }
759
760
Insert(Object * obj)761 void HeapObjectsSet::Insert(Object* obj) {
762 if (!obj->IsHeapObject()) return;
763 HeapObject* object = HeapObject::cast(obj);
764 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
765 }
766
767
GetTag(Object * obj)768 const char* HeapObjectsSet::GetTag(Object* obj) {
769 HeapObject* object = HeapObject::cast(obj);
770 HashMap::Entry* cache_entry =
771 entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
772 return cache_entry != NULL
773 ? reinterpret_cast<const char*>(cache_entry->value)
774 : NULL;
775 }
776
777
SetTag(Object * obj,const char * tag)778 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
779 if (!obj->IsHeapObject()) return;
780 HeapObject* object = HeapObject::cast(obj);
781 HashMap::Entry* cache_entry =
782 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
783 cache_entry->value = const_cast<char*>(tag);
784 }
785
786
787 HeapObject* const V8HeapExplorer::kInternalRootObject =
788 reinterpret_cast<HeapObject*>(
789 static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
790 HeapObject* const V8HeapExplorer::kGcRootsObject =
791 reinterpret_cast<HeapObject*>(
792 static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
793 HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
794 reinterpret_cast<HeapObject*>(
795 static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
796 HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
797 reinterpret_cast<HeapObject*>(
798 static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
799
800
V8HeapExplorer(HeapSnapshot * snapshot,SnapshottingProgressReportingInterface * progress,v8::HeapProfiler::ObjectNameResolver * resolver)801 V8HeapExplorer::V8HeapExplorer(
802 HeapSnapshot* snapshot,
803 SnapshottingProgressReportingInterface* progress,
804 v8::HeapProfiler::ObjectNameResolver* resolver)
805 : heap_(snapshot->profiler()->heap_object_map()->heap()),
806 snapshot_(snapshot),
807 names_(snapshot_->profiler()->names()),
808 heap_object_map_(snapshot_->profiler()->heap_object_map()),
809 progress_(progress),
810 filler_(NULL),
811 global_object_name_resolver_(resolver) {
812 }
813
814
~V8HeapExplorer()815 V8HeapExplorer::~V8HeapExplorer() {
816 }
817
818
AllocateEntry(HeapThing ptr)819 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
820 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
821 }
822
823
AddEntry(HeapObject * object)824 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
825 if (object == kInternalRootObject) {
826 snapshot_->AddRootEntry();
827 return snapshot_->root();
828 } else if (object == kGcRootsObject) {
829 HeapEntry* entry = snapshot_->AddGcRootsEntry();
830 return entry;
831 } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
832 HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
833 return entry;
834 } else if (object->IsJSFunction()) {
835 JSFunction* func = JSFunction::cast(object);
836 SharedFunctionInfo* shared = func->shared();
837 const char* name = shared->bound() ? "native_bind" :
838 names_->GetName(String::cast(shared->name()));
839 return AddEntry(object, HeapEntry::kClosure, name);
840 } else if (object->IsJSRegExp()) {
841 JSRegExp* re = JSRegExp::cast(object);
842 return AddEntry(object,
843 HeapEntry::kRegExp,
844 names_->GetName(re->Pattern()));
845 } else if (object->IsJSObject()) {
846 const char* name = names_->GetName(
847 GetConstructorName(JSObject::cast(object)));
848 if (object->IsJSGlobalObject()) {
849 const char* tag = objects_tags_.GetTag(object);
850 if (tag != NULL) {
851 name = names_->GetFormatted("%s / %s", name, tag);
852 }
853 }
854 return AddEntry(object, HeapEntry::kObject, name);
855 } else if (object->IsString()) {
856 String* string = String::cast(object);
857 if (string->IsConsString())
858 return AddEntry(object,
859 HeapEntry::kConsString,
860 "(concatenated string)");
861 if (string->IsSlicedString())
862 return AddEntry(object,
863 HeapEntry::kSlicedString,
864 "(sliced string)");
865 return AddEntry(object,
866 HeapEntry::kString,
867 names_->GetName(String::cast(object)));
868 } else if (object->IsCode()) {
869 return AddEntry(object, HeapEntry::kCode, "");
870 } else if (object->IsSharedFunctionInfo()) {
871 String* name = String::cast(SharedFunctionInfo::cast(object)->name());
872 return AddEntry(object,
873 HeapEntry::kCode,
874 names_->GetName(name));
875 } else if (object->IsScript()) {
876 Object* name = Script::cast(object)->name();
877 return AddEntry(object,
878 HeapEntry::kCode,
879 name->IsString()
880 ? names_->GetName(String::cast(name))
881 : "");
882 } else if (object->IsNativeContext()) {
883 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
884 } else if (object->IsContext()) {
885 return AddEntry(object, HeapEntry::kObject, "system / Context");
886 } else if (object->IsFixedArray() ||
887 object->IsFixedDoubleArray() ||
888 object->IsByteArray() ||
889 object->IsExternalArray()) {
890 return AddEntry(object, HeapEntry::kArray, "");
891 } else if (object->IsHeapNumber()) {
892 return AddEntry(object, HeapEntry::kHeapNumber, "number");
893 }
894 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
895 }
896
897
AddEntry(HeapObject * object,HeapEntry::Type type,const char * name)898 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
899 HeapEntry::Type type,
900 const char* name) {
901 int object_size = object->Size();
902 SnapshotObjectId object_id =
903 heap_object_map_->FindOrAddEntry(object->address(), object_size);
904 return snapshot_->AddEntry(type, name, object_id, object_size);
905 }
906
907
908 class GcSubrootsEnumerator : public ObjectVisitor {
909 public:
GcSubrootsEnumerator(SnapshotFillerInterface * filler,V8HeapExplorer * explorer)910 GcSubrootsEnumerator(
911 SnapshotFillerInterface* filler, V8HeapExplorer* explorer)
912 : filler_(filler),
913 explorer_(explorer),
914 previous_object_count_(0),
915 object_count_(0) {
916 }
VisitPointers(Object ** start,Object ** end)917 void VisitPointers(Object** start, Object** end) {
918 object_count_ += end - start;
919 }
Synchronize(VisitorSynchronization::SyncTag tag)920 void Synchronize(VisitorSynchronization::SyncTag tag) {
921 // Skip empty subroots.
922 if (previous_object_count_ != object_count_) {
923 previous_object_count_ = object_count_;
924 filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
925 }
926 }
927 private:
928 SnapshotFillerInterface* filler_;
929 V8HeapExplorer* explorer_;
930 intptr_t previous_object_count_;
931 intptr_t object_count_;
932 };
933
934
AddRootEntries(SnapshotFillerInterface * filler)935 void V8HeapExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
936 filler->AddEntry(kInternalRootObject, this);
937 filler->AddEntry(kGcRootsObject, this);
938 GcSubrootsEnumerator enumerator(filler, this);
939 heap_->IterateRoots(&enumerator, VISIT_ALL);
940 }
941
942
GetSystemEntryName(HeapObject * object)943 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
944 switch (object->map()->instance_type()) {
945 case MAP_TYPE:
946 switch (Map::cast(object)->instance_type()) {
947 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
948 case instance_type: return "system / Map (" #Name ")";
949 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
950 #undef MAKE_STRING_MAP_CASE
951 default: return "system / Map";
952 }
953 case CELL_TYPE: return "system / Cell";
954 case PROPERTY_CELL_TYPE: return "system / PropertyCell";
955 case FOREIGN_TYPE: return "system / Foreign";
956 case ODDBALL_TYPE: return "system / Oddball";
957 #define MAKE_STRUCT_CASE(NAME, Name, name) \
958 case NAME##_TYPE: return "system / "#Name;
959 STRUCT_LIST(MAKE_STRUCT_CASE)
960 #undef MAKE_STRUCT_CASE
961 default: return "system";
962 }
963 }
964
965
EstimateObjectsCount(HeapIterator * iterator)966 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
967 int objects_count = 0;
968 for (HeapObject* obj = iterator->next();
969 obj != NULL;
970 obj = iterator->next()) {
971 objects_count++;
972 }
973 return objects_count;
974 }
975
976
977 class IndexedReferencesExtractor : public ObjectVisitor {
978 public:
IndexedReferencesExtractor(V8HeapExplorer * generator,HeapObject * parent_obj,int parent)979 IndexedReferencesExtractor(V8HeapExplorer* generator,
980 HeapObject* parent_obj,
981 int parent)
982 : generator_(generator),
983 parent_obj_(parent_obj),
984 parent_(parent),
985 next_index_(0) {
986 }
VisitCodeEntry(Address entry_address)987 void VisitCodeEntry(Address entry_address) {
988 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
989 generator_->SetInternalReference(parent_obj_, parent_, "code", code);
990 generator_->TagCodeObject(code);
991 }
VisitPointers(Object ** start,Object ** end)992 void VisitPointers(Object** start, Object** end) {
993 for (Object** p = start; p < end; p++) {
994 if (CheckVisitedAndUnmark(p)) continue;
995 generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p);
996 }
997 }
MarkVisitedField(HeapObject * obj,int offset)998 static void MarkVisitedField(HeapObject* obj, int offset) {
999 if (offset < 0) return;
1000 Address field = obj->address() + offset;
1001 ASSERT(!Memory::Object_at(field)->IsFailure());
1002 ASSERT(Memory::Object_at(field)->IsHeapObject());
1003 *field |= kFailureTag;
1004 }
1005
1006 private:
CheckVisitedAndUnmark(Object ** field)1007 bool CheckVisitedAndUnmark(Object** field) {
1008 if ((*field)->IsFailure()) {
1009 intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
1010 *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
1011 ASSERT((*field)->IsHeapObject());
1012 return true;
1013 }
1014 return false;
1015 }
1016 V8HeapExplorer* generator_;
1017 HeapObject* parent_obj_;
1018 int parent_;
1019 int next_index_;
1020 };
1021
1022
ExtractReferences(HeapObject * obj)1023 void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
1024 HeapEntry* heap_entry = GetEntry(obj);
1025 if (heap_entry == NULL) return; // No interest in this object.
1026 int entry = heap_entry->index();
1027
1028 if (obj->IsJSGlobalProxy()) {
1029 ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1030 } else if (obj->IsJSObject()) {
1031 ExtractJSObjectReferences(entry, JSObject::cast(obj));
1032 } else if (obj->IsString()) {
1033 ExtractStringReferences(entry, String::cast(obj));
1034 } else if (obj->IsContext()) {
1035 ExtractContextReferences(entry, Context::cast(obj));
1036 } else if (obj->IsMap()) {
1037 ExtractMapReferences(entry, Map::cast(obj));
1038 } else if (obj->IsSharedFunctionInfo()) {
1039 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1040 } else if (obj->IsScript()) {
1041 ExtractScriptReferences(entry, Script::cast(obj));
1042 } else if (obj->IsAccessorPair()) {
1043 ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1044 } else if (obj->IsCodeCache()) {
1045 ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1046 } else if (obj->IsCode()) {
1047 ExtractCodeReferences(entry, Code::cast(obj));
1048 } else if (obj->IsCell()) {
1049 ExtractCellReferences(entry, Cell::cast(obj));
1050 } else if (obj->IsPropertyCell()) {
1051 ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1052 } else if (obj->IsAllocationSite()) {
1053 ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1054 }
1055 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1056
1057 // Extract unvisited fields as hidden references and restore tags
1058 // of visited fields.
1059 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1060 obj->Iterate(&refs_extractor);
1061 }
1062
1063
ExtractJSGlobalProxyReferences(int entry,JSGlobalProxy * proxy)1064 void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1065 int entry, JSGlobalProxy* proxy) {
1066 SetInternalReference(proxy, entry,
1067 "native_context", proxy->native_context(),
1068 JSGlobalProxy::kNativeContextOffset);
1069 }
1070
1071
ExtractJSObjectReferences(int entry,JSObject * js_obj)1072 void V8HeapExplorer::ExtractJSObjectReferences(
1073 int entry, JSObject* js_obj) {
1074 HeapObject* obj = js_obj;
1075 ExtractClosureReferences(js_obj, entry);
1076 ExtractPropertyReferences(js_obj, entry);
1077 ExtractElementReferences(js_obj, entry);
1078 ExtractInternalReferences(js_obj, entry);
1079 SetPropertyReference(
1080 obj, entry, heap_->proto_string(), js_obj->GetPrototype());
1081 if (obj->IsJSFunction()) {
1082 JSFunction* js_fun = JSFunction::cast(js_obj);
1083 Object* proto_or_map = js_fun->prototype_or_initial_map();
1084 if (!proto_or_map->IsTheHole()) {
1085 if (!proto_or_map->IsMap()) {
1086 SetPropertyReference(
1087 obj, entry,
1088 heap_->prototype_string(), proto_or_map,
1089 NULL,
1090 JSFunction::kPrototypeOrInitialMapOffset);
1091 } else {
1092 SetPropertyReference(
1093 obj, entry,
1094 heap_->prototype_string(), js_fun->prototype());
1095 SetInternalReference(
1096 obj, entry, "initial_map", proto_or_map,
1097 JSFunction::kPrototypeOrInitialMapOffset);
1098 }
1099 }
1100 SharedFunctionInfo* shared_info = js_fun->shared();
1101 // JSFunction has either bindings or literals and never both.
1102 bool bound = shared_info->bound();
1103 TagObject(js_fun->literals_or_bindings(),
1104 bound ? "(function bindings)" : "(function literals)");
1105 SetInternalReference(js_fun, entry,
1106 bound ? "bindings" : "literals",
1107 js_fun->literals_or_bindings(),
1108 JSFunction::kLiteralsOffset);
1109 TagObject(shared_info, "(shared function info)");
1110 SetInternalReference(js_fun, entry,
1111 "shared", shared_info,
1112 JSFunction::kSharedFunctionInfoOffset);
1113 TagObject(js_fun->context(), "(context)");
1114 SetInternalReference(js_fun, entry,
1115 "context", js_fun->context(),
1116 JSFunction::kContextOffset);
1117 for (int i = JSFunction::kNonWeakFieldsEndOffset;
1118 i < JSFunction::kSize;
1119 i += kPointerSize) {
1120 SetWeakReference(js_fun, entry, i, *HeapObject::RawField(js_fun, i), i);
1121 }
1122 } else if (obj->IsGlobalObject()) {
1123 GlobalObject* global_obj = GlobalObject::cast(obj);
1124 SetInternalReference(global_obj, entry,
1125 "builtins", global_obj->builtins(),
1126 GlobalObject::kBuiltinsOffset);
1127 SetInternalReference(global_obj, entry,
1128 "native_context", global_obj->native_context(),
1129 GlobalObject::kNativeContextOffset);
1130 SetInternalReference(global_obj, entry,
1131 "global_receiver", global_obj->global_receiver(),
1132 GlobalObject::kGlobalReceiverOffset);
1133 }
1134 TagObject(js_obj->properties(), "(object properties)");
1135 SetInternalReference(obj, entry,
1136 "properties", js_obj->properties(),
1137 JSObject::kPropertiesOffset);
1138 TagObject(js_obj->elements(), "(object elements)");
1139 SetInternalReference(obj, entry,
1140 "elements", js_obj->elements(),
1141 JSObject::kElementsOffset);
1142 }
1143
1144
ExtractStringReferences(int entry,String * string)1145 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1146 if (string->IsConsString()) {
1147 ConsString* cs = ConsString::cast(string);
1148 SetInternalReference(cs, entry, "first", cs->first(),
1149 ConsString::kFirstOffset);
1150 SetInternalReference(cs, entry, "second", cs->second(),
1151 ConsString::kSecondOffset);
1152 } else if (string->IsSlicedString()) {
1153 SlicedString* ss = SlicedString::cast(string);
1154 SetInternalReference(ss, entry, "parent", ss->parent(),
1155 SlicedString::kParentOffset);
1156 }
1157 }
1158
1159
ExtractContextReferences(int entry,Context * context)1160 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1161 if (context == context->declaration_context()) {
1162 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1163 // Add context allocated locals.
1164 int context_locals = scope_info->ContextLocalCount();
1165 for (int i = 0; i < context_locals; ++i) {
1166 String* local_name = scope_info->ContextLocalName(i);
1167 int idx = Context::MIN_CONTEXT_SLOTS + i;
1168 SetContextReference(context, entry, local_name, context->get(idx),
1169 Context::OffsetOfElementAt(idx));
1170 }
1171 if (scope_info->HasFunctionName()) {
1172 String* name = scope_info->FunctionName();
1173 VariableMode mode;
1174 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1175 if (idx >= 0) {
1176 SetContextReference(context, entry, name, context->get(idx),
1177 Context::OffsetOfElementAt(idx));
1178 }
1179 }
1180 }
1181
1182 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1183 SetInternalReference(context, entry, #name, context->get(Context::index), \
1184 FixedArray::OffsetOfElementAt(Context::index));
1185 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1186 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1187 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1188 EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1189 if (context->IsNativeContext()) {
1190 TagObject(context->jsfunction_result_caches(),
1191 "(context func. result caches)");
1192 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1193 TagObject(context->runtime_context(), "(runtime context)");
1194 TagObject(context->embedder_data(), "(context data)");
1195 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1196 #undef EXTRACT_CONTEXT_FIELD
1197 for (int i = Context::FIRST_WEAK_SLOT;
1198 i < Context::NATIVE_CONTEXT_SLOTS;
1199 ++i) {
1200 SetWeakReference(context, entry, i, context->get(i),
1201 FixedArray::OffsetOfElementAt(i));
1202 }
1203 }
1204 }
1205
1206
ExtractMapReferences(int entry,Map * map)1207 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1208 if (map->HasTransitionArray()) {
1209 TransitionArray* transitions = map->transitions();
1210 int transitions_entry = GetEntry(transitions)->index();
1211 Object* back_pointer = transitions->back_pointer_storage();
1212 TagObject(back_pointer, "(back pointer)");
1213 SetInternalReference(transitions, transitions_entry,
1214 "back_pointer", back_pointer);
1215 TagObject(transitions, "(transition array)");
1216 SetInternalReference(map, entry,
1217 "transitions", transitions,
1218 Map::kTransitionsOrBackPointerOffset);
1219 } else {
1220 Object* back_pointer = map->GetBackPointer();
1221 TagObject(back_pointer, "(back pointer)");
1222 SetInternalReference(map, entry,
1223 "back_pointer", back_pointer,
1224 Map::kTransitionsOrBackPointerOffset);
1225 }
1226 DescriptorArray* descriptors = map->instance_descriptors();
1227 TagObject(descriptors, "(map descriptors)");
1228 SetInternalReference(map, entry,
1229 "descriptors", descriptors,
1230 Map::kDescriptorsOffset);
1231
1232 SetInternalReference(map, entry,
1233 "code_cache", map->code_cache(),
1234 Map::kCodeCacheOffset);
1235 SetInternalReference(map, entry,
1236 "prototype", map->prototype(), Map::kPrototypeOffset);
1237 SetInternalReference(map, entry,
1238 "constructor", map->constructor(),
1239 Map::kConstructorOffset);
1240 TagObject(map->dependent_code(), "(dependent code)");
1241 SetInternalReference(map, entry,
1242 "dependent_code", map->dependent_code(),
1243 Map::kDependentCodeOffset);
1244 }
1245
1246
ExtractSharedFunctionInfoReferences(int entry,SharedFunctionInfo * shared)1247 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1248 int entry, SharedFunctionInfo* shared) {
1249 HeapObject* obj = shared;
1250 String* shared_name = shared->DebugName();
1251 const char* name = NULL;
1252 if (shared_name != *heap_->isolate()->factory()->empty_string()) {
1253 name = names_->GetName(shared_name);
1254 TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1255 } else {
1256 TagObject(shared->code(), names_->GetFormatted("(%s code)",
1257 Code::Kind2String(shared->code()->kind())));
1258 }
1259
1260 SetInternalReference(obj, entry,
1261 "name", shared->name(),
1262 SharedFunctionInfo::kNameOffset);
1263 SetInternalReference(obj, entry,
1264 "code", shared->code(),
1265 SharedFunctionInfo::kCodeOffset);
1266 TagObject(shared->scope_info(), "(function scope info)");
1267 SetInternalReference(obj, entry,
1268 "scope_info", shared->scope_info(),
1269 SharedFunctionInfo::kScopeInfoOffset);
1270 SetInternalReference(obj, entry,
1271 "instance_class_name", shared->instance_class_name(),
1272 SharedFunctionInfo::kInstanceClassNameOffset);
1273 SetInternalReference(obj, entry,
1274 "script", shared->script(),
1275 SharedFunctionInfo::kScriptOffset);
1276 const char* construct_stub_name = name ?
1277 names_->GetFormatted("(construct stub code for %s)", name) :
1278 "(construct stub code)";
1279 TagObject(shared->construct_stub(), construct_stub_name);
1280 SetInternalReference(obj, entry,
1281 "construct_stub", shared->construct_stub(),
1282 SharedFunctionInfo::kConstructStubOffset);
1283 SetInternalReference(obj, entry,
1284 "function_data", shared->function_data(),
1285 SharedFunctionInfo::kFunctionDataOffset);
1286 SetInternalReference(obj, entry,
1287 "debug_info", shared->debug_info(),
1288 SharedFunctionInfo::kDebugInfoOffset);
1289 SetInternalReference(obj, entry,
1290 "inferred_name", shared->inferred_name(),
1291 SharedFunctionInfo::kInferredNameOffset);
1292 SetInternalReference(obj, entry,
1293 "optimized_code_map", shared->optimized_code_map(),
1294 SharedFunctionInfo::kOptimizedCodeMapOffset);
1295 SetWeakReference(obj, entry,
1296 1, shared->initial_map(),
1297 SharedFunctionInfo::kInitialMapOffset);
1298 }
1299
1300
ExtractScriptReferences(int entry,Script * script)1301 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1302 HeapObject* obj = script;
1303 SetInternalReference(obj, entry,
1304 "source", script->source(),
1305 Script::kSourceOffset);
1306 SetInternalReference(obj, entry,
1307 "name", script->name(),
1308 Script::kNameOffset);
1309 SetInternalReference(obj, entry,
1310 "data", script->data(),
1311 Script::kDataOffset);
1312 SetInternalReference(obj, entry,
1313 "context_data", script->context_data(),
1314 Script::kContextOffset);
1315 TagObject(script->line_ends(), "(script line ends)");
1316 SetInternalReference(obj, entry,
1317 "line_ends", script->line_ends(),
1318 Script::kLineEndsOffset);
1319 }
1320
1321
ExtractAccessorPairReferences(int entry,AccessorPair * accessors)1322 void V8HeapExplorer::ExtractAccessorPairReferences(
1323 int entry, AccessorPair* accessors) {
1324 SetInternalReference(accessors, entry, "getter", accessors->getter(),
1325 AccessorPair::kGetterOffset);
1326 SetInternalReference(accessors, entry, "setter", accessors->setter(),
1327 AccessorPair::kSetterOffset);
1328 }
1329
1330
ExtractCodeCacheReferences(int entry,CodeCache * code_cache)1331 void V8HeapExplorer::ExtractCodeCacheReferences(
1332 int entry, CodeCache* code_cache) {
1333 TagObject(code_cache->default_cache(), "(default code cache)");
1334 SetInternalReference(code_cache, entry,
1335 "default_cache", code_cache->default_cache(),
1336 CodeCache::kDefaultCacheOffset);
1337 TagObject(code_cache->normal_type_cache(), "(code type cache)");
1338 SetInternalReference(code_cache, entry,
1339 "type_cache", code_cache->normal_type_cache(),
1340 CodeCache::kNormalTypeCacheOffset);
1341 }
1342
1343
TagCodeObject(Code * code,const char * external_name)1344 void V8HeapExplorer::TagCodeObject(Code* code, const char* external_name) {
1345 TagObject(code, names_->GetFormatted("(%s code)", external_name));
1346 }
1347
1348
TagCodeObject(Code * code)1349 void V8HeapExplorer::TagCodeObject(Code* code) {
1350 if (code->kind() == Code::STUB) {
1351 TagObject(code, names_->GetFormatted(
1352 "(%s code)", CodeStub::MajorName(
1353 static_cast<CodeStub::Major>(code->major_key()), true)));
1354 }
1355 }
1356
1357
ExtractCodeReferences(int entry,Code * code)1358 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1359 TagCodeObject(code);
1360 TagObject(code->relocation_info(), "(code relocation info)");
1361 SetInternalReference(code, entry,
1362 "relocation_info", code->relocation_info(),
1363 Code::kRelocationInfoOffset);
1364 SetInternalReference(code, entry,
1365 "handler_table", code->handler_table(),
1366 Code::kHandlerTableOffset);
1367 TagObject(code->deoptimization_data(), "(code deopt data)");
1368 SetInternalReference(code, entry,
1369 "deoptimization_data", code->deoptimization_data(),
1370 Code::kDeoptimizationDataOffset);
1371 if (code->kind() == Code::FUNCTION) {
1372 SetInternalReference(code, entry,
1373 "type_feedback_info", code->type_feedback_info(),
1374 Code::kTypeFeedbackInfoOffset);
1375 }
1376 SetInternalReference(code, entry,
1377 "gc_metadata", code->gc_metadata(),
1378 Code::kGCMetadataOffset);
1379 }
1380
1381
ExtractCellReferences(int entry,Cell * cell)1382 void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
1383 SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1384 }
1385
1386
ExtractPropertyCellReferences(int entry,PropertyCell * cell)1387 void V8HeapExplorer::ExtractPropertyCellReferences(int entry,
1388 PropertyCell* cell) {
1389 ExtractCellReferences(entry, cell);
1390 SetInternalReference(cell, entry, "type", cell->type(),
1391 PropertyCell::kTypeOffset);
1392 SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1393 PropertyCell::kDependentCodeOffset);
1394 }
1395
1396
ExtractAllocationSiteReferences(int entry,AllocationSite * site)1397 void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
1398 AllocationSite* site) {
1399 SetInternalReference(site, entry, "transition_info", site->transition_info(),
1400 AllocationSite::kTransitionInfoOffset);
1401 SetInternalReference(site, entry, "nested_site", site->nested_site(),
1402 AllocationSite::kNestedSiteOffset);
1403 SetInternalReference(site, entry, "memento_found_count",
1404 site->memento_found_count(),
1405 AllocationSite::kMementoFoundCountOffset);
1406 SetInternalReference(site, entry, "memento_create_count",
1407 site->memento_create_count(),
1408 AllocationSite::kMementoCreateCountOffset);
1409 SetInternalReference(site, entry, "pretenure_decision",
1410 site->pretenure_decision(),
1411 AllocationSite::kPretenureDecisionOffset);
1412 SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1413 AllocationSite::kDependentCodeOffset);
1414 }
1415
1416
ExtractClosureReferences(JSObject * js_obj,int entry)1417 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
1418 if (!js_obj->IsJSFunction()) return;
1419
1420 JSFunction* func = JSFunction::cast(js_obj);
1421 if (func->shared()->bound()) {
1422 FixedArray* bindings = func->function_bindings();
1423 SetNativeBindReference(js_obj, entry, "bound_this",
1424 bindings->get(JSFunction::kBoundThisIndex));
1425 SetNativeBindReference(js_obj, entry, "bound_function",
1426 bindings->get(JSFunction::kBoundFunctionIndex));
1427 for (int i = JSFunction::kBoundArgumentsStartIndex;
1428 i < bindings->length(); i++) {
1429 const char* reference_name = names_->GetFormatted(
1430 "bound_argument_%d",
1431 i - JSFunction::kBoundArgumentsStartIndex);
1432 SetNativeBindReference(js_obj, entry, reference_name,
1433 bindings->get(i));
1434 }
1435 }
1436 }
1437
1438
ExtractPropertyReferences(JSObject * js_obj,int entry)1439 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1440 if (js_obj->HasFastProperties()) {
1441 DescriptorArray* descs = js_obj->map()->instance_descriptors();
1442 int real_size = js_obj->map()->NumberOfOwnDescriptors();
1443 for (int i = 0; i < real_size; i++) {
1444 switch (descs->GetType(i)) {
1445 case FIELD: {
1446 int index = descs->GetFieldIndex(i);
1447
1448 Name* k = descs->GetKey(i);
1449 if (index < js_obj->map()->inobject_properties()) {
1450 Object* value = js_obj->InObjectPropertyAt(index);
1451 if (k != heap_->hidden_string()) {
1452 SetPropertyReference(
1453 js_obj, entry,
1454 k, value,
1455 NULL,
1456 js_obj->GetInObjectPropertyOffset(index));
1457 } else {
1458 TagObject(value, "(hidden properties)");
1459 SetInternalReference(
1460 js_obj, entry,
1461 "hidden_properties", value,
1462 js_obj->GetInObjectPropertyOffset(index));
1463 }
1464 } else {
1465 Object* value = js_obj->RawFastPropertyAt(index);
1466 if (k != heap_->hidden_string()) {
1467 SetPropertyReference(js_obj, entry, k, value);
1468 } else {
1469 TagObject(value, "(hidden properties)");
1470 SetInternalReference(js_obj, entry, "hidden_properties", value);
1471 }
1472 }
1473 break;
1474 }
1475 case CONSTANT:
1476 SetPropertyReference(
1477 js_obj, entry,
1478 descs->GetKey(i), descs->GetConstant(i));
1479 break;
1480 case CALLBACKS:
1481 ExtractAccessorPairProperty(
1482 js_obj, entry,
1483 descs->GetKey(i), descs->GetValue(i));
1484 break;
1485 case NORMAL: // only in slow mode
1486 case HANDLER: // only in lookup results, not in descriptors
1487 case INTERCEPTOR: // only in lookup results, not in descriptors
1488 break;
1489 case TRANSITION:
1490 case NONEXISTENT:
1491 UNREACHABLE();
1492 break;
1493 }
1494 }
1495 } else {
1496 NameDictionary* dictionary = js_obj->property_dictionary();
1497 int length = dictionary->Capacity();
1498 for (int i = 0; i < length; ++i) {
1499 Object* k = dictionary->KeyAt(i);
1500 if (dictionary->IsKey(k)) {
1501 Object* target = dictionary->ValueAt(i);
1502 // We assume that global objects can only have slow properties.
1503 Object* value = target->IsPropertyCell()
1504 ? PropertyCell::cast(target)->value()
1505 : target;
1506 if (k == heap_->hidden_string()) {
1507 TagObject(value, "(hidden properties)");
1508 SetInternalReference(js_obj, entry, "hidden_properties", value);
1509 continue;
1510 }
1511 if (ExtractAccessorPairProperty(js_obj, entry, k, value)) continue;
1512 SetPropertyReference(js_obj, entry, String::cast(k), value);
1513 }
1514 }
1515 }
1516 }
1517
1518
ExtractAccessorPairProperty(JSObject * js_obj,int entry,Object * key,Object * callback_obj)1519 bool V8HeapExplorer::ExtractAccessorPairProperty(
1520 JSObject* js_obj, int entry, Object* key, Object* callback_obj) {
1521 if (!callback_obj->IsAccessorPair()) return false;
1522 AccessorPair* accessors = AccessorPair::cast(callback_obj);
1523 Object* getter = accessors->getter();
1524 if (!getter->IsOddball()) {
1525 SetPropertyReference(js_obj, entry, String::cast(key), getter, "get %s");
1526 }
1527 Object* setter = accessors->setter();
1528 if (!setter->IsOddball()) {
1529 SetPropertyReference(js_obj, entry, String::cast(key), setter, "set %s");
1530 }
1531 return true;
1532 }
1533
1534
ExtractElementReferences(JSObject * js_obj,int entry)1535 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1536 if (js_obj->HasFastObjectElements()) {
1537 FixedArray* elements = FixedArray::cast(js_obj->elements());
1538 int length = js_obj->IsJSArray() ?
1539 Smi::cast(JSArray::cast(js_obj)->length())->value() :
1540 elements->length();
1541 for (int i = 0; i < length; ++i) {
1542 if (!elements->get(i)->IsTheHole()) {
1543 SetElementReference(js_obj, entry, i, elements->get(i));
1544 }
1545 }
1546 } else if (js_obj->HasDictionaryElements()) {
1547 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1548 int length = dictionary->Capacity();
1549 for (int i = 0; i < length; ++i) {
1550 Object* k = dictionary->KeyAt(i);
1551 if (dictionary->IsKey(k)) {
1552 ASSERT(k->IsNumber());
1553 uint32_t index = static_cast<uint32_t>(k->Number());
1554 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1555 }
1556 }
1557 }
1558 }
1559
1560
ExtractInternalReferences(JSObject * js_obj,int entry)1561 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1562 int length = js_obj->GetInternalFieldCount();
1563 for (int i = 0; i < length; ++i) {
1564 Object* o = js_obj->GetInternalField(i);
1565 SetInternalReference(
1566 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1567 }
1568 }
1569
1570
GetConstructorName(JSObject * object)1571 String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1572 Heap* heap = object->GetHeap();
1573 if (object->IsJSFunction()) return heap->closure_string();
1574 String* constructor_name = object->constructor_name();
1575 if (constructor_name == heap->Object_string()) {
1576 // Look up an immediate "constructor" property, if it is a function,
1577 // return its name. This is for instances of binding objects, which
1578 // have prototype constructor type "Object".
1579 Object* constructor_prop = NULL;
1580 LookupResult result(heap->isolate());
1581 object->LocalLookupRealNamedProperty(heap->constructor_string(), &result);
1582 if (!result.IsFound()) return object->constructor_name();
1583
1584 constructor_prop = result.GetLazyValue();
1585 if (constructor_prop->IsJSFunction()) {
1586 Object* maybe_name =
1587 JSFunction::cast(constructor_prop)->shared()->name();
1588 if (maybe_name->IsString()) {
1589 String* name = String::cast(maybe_name);
1590 if (name->length() > 0) return name;
1591 }
1592 }
1593 }
1594 return object->constructor_name();
1595 }
1596
1597
GetEntry(Object * obj)1598 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1599 if (!obj->IsHeapObject()) return NULL;
1600 return filler_->FindOrAddEntry(obj, this);
1601 }
1602
1603
1604 class RootsReferencesExtractor : public ObjectVisitor {
1605 private:
1606 struct IndexTag {
IndexTagv8::internal::RootsReferencesExtractor::IndexTag1607 IndexTag(int index, VisitorSynchronization::SyncTag tag)
1608 : index(index), tag(tag) { }
1609 int index;
1610 VisitorSynchronization::SyncTag tag;
1611 };
1612
1613 public:
RootsReferencesExtractor(Heap * heap)1614 explicit RootsReferencesExtractor(Heap* heap)
1615 : collecting_all_references_(false),
1616 previous_reference_count_(0),
1617 heap_(heap) {
1618 }
1619
VisitPointers(Object ** start,Object ** end)1620 void VisitPointers(Object** start, Object** end) {
1621 if (collecting_all_references_) {
1622 for (Object** p = start; p < end; p++) all_references_.Add(*p);
1623 } else {
1624 for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1625 }
1626 }
1627
SetCollectingAllReferences()1628 void SetCollectingAllReferences() { collecting_all_references_ = true; }
1629
FillReferences(V8HeapExplorer * explorer)1630 void FillReferences(V8HeapExplorer* explorer) {
1631 ASSERT(strong_references_.length() <= all_references_.length());
1632 Builtins* builtins = heap_->isolate()->builtins();
1633 for (int i = 0; i < reference_tags_.length(); ++i) {
1634 explorer->SetGcRootsReference(reference_tags_[i].tag);
1635 }
1636 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1637 while (all_index < all_references_.length()) {
1638 if (strong_index < strong_references_.length() &&
1639 strong_references_[strong_index] == all_references_[all_index]) {
1640 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1641 false,
1642 all_references_[all_index]);
1643 ++strong_index;
1644 } else {
1645 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1646 true,
1647 all_references_[all_index]);
1648 }
1649 if (reference_tags_[tags_index].tag ==
1650 VisitorSynchronization::kBuiltins) {
1651 ASSERT(all_references_[all_index]->IsCode());
1652 explorer->TagCodeObject(Code::cast(all_references_[all_index]),
1653 builtins->name(builtin_index++));
1654 }
1655 ++all_index;
1656 if (reference_tags_[tags_index].index == all_index) ++tags_index;
1657 }
1658 }
1659
Synchronize(VisitorSynchronization::SyncTag tag)1660 void Synchronize(VisitorSynchronization::SyncTag tag) {
1661 if (collecting_all_references_ &&
1662 previous_reference_count_ != all_references_.length()) {
1663 previous_reference_count_ = all_references_.length();
1664 reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1665 }
1666 }
1667
1668 private:
1669 bool collecting_all_references_;
1670 List<Object*> strong_references_;
1671 List<Object*> all_references_;
1672 int previous_reference_count_;
1673 List<IndexTag> reference_tags_;
1674 Heap* heap_;
1675 };
1676
1677
IterateAndExtractReferences(SnapshotFillerInterface * filler)1678 bool V8HeapExplorer::IterateAndExtractReferences(
1679 SnapshotFillerInterface* filler) {
1680 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1681
1682 filler_ = filler;
1683 bool interrupted = false;
1684
1685 // Heap iteration with filtering must be finished in any case.
1686 for (HeapObject* obj = iterator.next();
1687 obj != NULL;
1688 obj = iterator.next(), progress_->ProgressStep()) {
1689 if (!interrupted) {
1690 ExtractReferences(obj);
1691 if (!progress_->ProgressReport(false)) interrupted = true;
1692 }
1693 }
1694 if (interrupted) {
1695 filler_ = NULL;
1696 return false;
1697 }
1698
1699 SetRootGcRootsReference();
1700 RootsReferencesExtractor extractor(heap_);
1701 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1702 extractor.SetCollectingAllReferences();
1703 heap_->IterateRoots(&extractor, VISIT_ALL);
1704 extractor.FillReferences(this);
1705 filler_ = NULL;
1706 return progress_->ProgressReport(true);
1707 }
1708
1709
IsEssentialObject(Object * object)1710 bool V8HeapExplorer::IsEssentialObject(Object* object) {
1711 return object->IsHeapObject()
1712 && !object->IsOddball()
1713 && object != heap_->empty_byte_array()
1714 && object != heap_->empty_fixed_array()
1715 && object != heap_->empty_descriptor_array()
1716 && object != heap_->fixed_array_map()
1717 && object != heap_->cell_map()
1718 && object != heap_->global_property_cell_map()
1719 && object != heap_->shared_function_info_map()
1720 && object != heap_->free_space_map()
1721 && object != heap_->one_pointer_filler_map()
1722 && object != heap_->two_pointer_filler_map();
1723 }
1724
1725
SetContextReference(HeapObject * parent_obj,int parent_entry,String * reference_name,Object * child_obj,int field_offset)1726 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1727 int parent_entry,
1728 String* reference_name,
1729 Object* child_obj,
1730 int field_offset) {
1731 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1732 HeapEntry* child_entry = GetEntry(child_obj);
1733 if (child_entry != NULL) {
1734 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1735 parent_entry,
1736 names_->GetName(reference_name),
1737 child_entry);
1738 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1739 }
1740 }
1741
1742
SetNativeBindReference(HeapObject * parent_obj,int parent_entry,const char * reference_name,Object * child_obj)1743 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1744 int parent_entry,
1745 const char* reference_name,
1746 Object* child_obj) {
1747 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1748 HeapEntry* child_entry = GetEntry(child_obj);
1749 if (child_entry != NULL) {
1750 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1751 parent_entry,
1752 reference_name,
1753 child_entry);
1754 }
1755 }
1756
1757
SetElementReference(HeapObject * parent_obj,int parent_entry,int index,Object * child_obj)1758 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1759 int parent_entry,
1760 int index,
1761 Object* child_obj) {
1762 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1763 HeapEntry* child_entry = GetEntry(child_obj);
1764 if (child_entry != NULL) {
1765 filler_->SetIndexedReference(HeapGraphEdge::kElement,
1766 parent_entry,
1767 index,
1768 child_entry);
1769 }
1770 }
1771
1772
SetInternalReference(HeapObject * parent_obj,int parent_entry,const char * reference_name,Object * child_obj,int field_offset)1773 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1774 int parent_entry,
1775 const char* reference_name,
1776 Object* child_obj,
1777 int field_offset) {
1778 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1779 HeapEntry* child_entry = GetEntry(child_obj);
1780 if (child_entry == NULL) return;
1781 if (IsEssentialObject(child_obj)) {
1782 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1783 parent_entry,
1784 reference_name,
1785 child_entry);
1786 }
1787 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1788 }
1789
1790
SetInternalReference(HeapObject * parent_obj,int parent_entry,int index,Object * child_obj,int field_offset)1791 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1792 int parent_entry,
1793 int index,
1794 Object* child_obj,
1795 int field_offset) {
1796 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1797 HeapEntry* child_entry = GetEntry(child_obj);
1798 if (child_entry == NULL) return;
1799 if (IsEssentialObject(child_obj)) {
1800 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1801 parent_entry,
1802 names_->GetName(index),
1803 child_entry);
1804 }
1805 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1806 }
1807
1808
SetHiddenReference(HeapObject * parent_obj,int parent_entry,int index,Object * child_obj)1809 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1810 int parent_entry,
1811 int index,
1812 Object* child_obj) {
1813 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1814 HeapEntry* child_entry = GetEntry(child_obj);
1815 if (child_entry != NULL && IsEssentialObject(child_obj)) {
1816 filler_->SetIndexedReference(HeapGraphEdge::kHidden,
1817 parent_entry,
1818 index,
1819 child_entry);
1820 }
1821 }
1822
1823
SetWeakReference(HeapObject * parent_obj,int parent_entry,int index,Object * child_obj,int field_offset)1824 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1825 int parent_entry,
1826 int index,
1827 Object* child_obj,
1828 int field_offset) {
1829 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1830 HeapEntry* child_entry = GetEntry(child_obj);
1831 if (child_entry == NULL) return;
1832 if (IsEssentialObject(child_obj)) {
1833 filler_->SetIndexedReference(HeapGraphEdge::kWeak,
1834 parent_entry,
1835 index,
1836 child_entry);
1837 }
1838 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1839 }
1840
1841
SetPropertyReference(HeapObject * parent_obj,int parent_entry,Name * reference_name,Object * child_obj,const char * name_format_string,int field_offset)1842 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
1843 int parent_entry,
1844 Name* reference_name,
1845 Object* child_obj,
1846 const char* name_format_string,
1847 int field_offset) {
1848 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1849 HeapEntry* child_entry = GetEntry(child_obj);
1850 if (child_entry != NULL) {
1851 HeapGraphEdge::Type type =
1852 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
1853 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
1854 const char* name = name_format_string != NULL && reference_name->IsString()
1855 ? names_->GetFormatted(
1856 name_format_string,
1857 *String::cast(reference_name)->ToCString(
1858 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)) :
1859 names_->GetName(reference_name);
1860
1861 filler_->SetNamedReference(type,
1862 parent_entry,
1863 name,
1864 child_entry);
1865 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1866 }
1867 }
1868
1869
SetRootGcRootsReference()1870 void V8HeapExplorer::SetRootGcRootsReference() {
1871 filler_->SetIndexedAutoIndexReference(
1872 HeapGraphEdge::kElement,
1873 snapshot_->root()->index(),
1874 snapshot_->gc_roots());
1875 }
1876
1877
SetUserGlobalReference(Object * child_obj)1878 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
1879 HeapEntry* child_entry = GetEntry(child_obj);
1880 ASSERT(child_entry != NULL);
1881 filler_->SetNamedAutoIndexReference(
1882 HeapGraphEdge::kShortcut,
1883 snapshot_->root()->index(),
1884 child_entry);
1885 }
1886
1887
SetGcRootsReference(VisitorSynchronization::SyncTag tag)1888 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
1889 filler_->SetIndexedAutoIndexReference(
1890 HeapGraphEdge::kElement,
1891 snapshot_->gc_roots()->index(),
1892 snapshot_->gc_subroot(tag));
1893 }
1894
1895
SetGcSubrootReference(VisitorSynchronization::SyncTag tag,bool is_weak,Object * child_obj)1896 void V8HeapExplorer::SetGcSubrootReference(
1897 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
1898 HeapEntry* child_entry = GetEntry(child_obj);
1899 if (child_entry != NULL) {
1900 const char* name = GetStrongGcSubrootName(child_obj);
1901 if (name != NULL) {
1902 filler_->SetNamedReference(
1903 HeapGraphEdge::kInternal,
1904 snapshot_->gc_subroot(tag)->index(),
1905 name,
1906 child_entry);
1907 } else {
1908 filler_->SetIndexedAutoIndexReference(
1909 is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kElement,
1910 snapshot_->gc_subroot(tag)->index(),
1911 child_entry);
1912 }
1913
1914 // Add a shortcut to JS global object reference at snapshot root.
1915 if (child_obj->IsNativeContext()) {
1916 Context* context = Context::cast(child_obj);
1917 GlobalObject* global = context->global_object();
1918 if (global->IsJSGlobalObject()) {
1919 bool is_debug_object = false;
1920 #ifdef ENABLE_DEBUGGER_SUPPORT
1921 is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
1922 #endif
1923 if (!is_debug_object && !user_roots_.Contains(global)) {
1924 user_roots_.Insert(global);
1925 SetUserGlobalReference(global);
1926 }
1927 }
1928 }
1929 }
1930 }
1931
1932
GetStrongGcSubrootName(Object * object)1933 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
1934 if (strong_gc_subroot_names_.is_empty()) {
1935 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
1936 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
1937 STRONG_ROOT_LIST(ROOT_NAME)
1938 #undef ROOT_NAME
1939 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
1940 STRUCT_LIST(STRUCT_MAP_NAME)
1941 #undef STRUCT_MAP_NAME
1942 #define STRING_NAME(name, str) NAME_ENTRY(name)
1943 INTERNALIZED_STRING_LIST(STRING_NAME)
1944 #undef STRING_NAME
1945 #undef NAME_ENTRY
1946 CHECK(!strong_gc_subroot_names_.is_empty());
1947 }
1948 return strong_gc_subroot_names_.GetTag(object);
1949 }
1950
1951
TagObject(Object * obj,const char * tag)1952 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
1953 if (IsEssentialObject(obj)) {
1954 HeapEntry* entry = GetEntry(obj);
1955 if (entry->name()[0] == '\0') {
1956 entry->set_name(tag);
1957 }
1958 }
1959 }
1960
1961
1962 class GlobalObjectsEnumerator : public ObjectVisitor {
1963 public:
VisitPointers(Object ** start,Object ** end)1964 virtual void VisitPointers(Object** start, Object** end) {
1965 for (Object** p = start; p < end; p++) {
1966 if ((*p)->IsNativeContext()) {
1967 Context* context = Context::cast(*p);
1968 JSObject* proxy = context->global_proxy();
1969 if (proxy->IsJSGlobalProxy()) {
1970 Object* global = proxy->map()->prototype();
1971 if (global->IsJSGlobalObject()) {
1972 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
1973 }
1974 }
1975 }
1976 }
1977 }
count()1978 int count() { return objects_.length(); }
at(int i)1979 Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
1980
1981 private:
1982 List<Handle<JSGlobalObject> > objects_;
1983 };
1984
1985
1986 // Modifies heap. Must not be run during heap traversal.
TagGlobalObjects()1987 void V8HeapExplorer::TagGlobalObjects() {
1988 Isolate* isolate = heap_->isolate();
1989 HandleScope scope(isolate);
1990 GlobalObjectsEnumerator enumerator;
1991 isolate->global_handles()->IterateAllRoots(&enumerator);
1992 const char** urls = NewArray<const char*>(enumerator.count());
1993 for (int i = 0, l = enumerator.count(); i < l; ++i) {
1994 if (global_object_name_resolver_) {
1995 HandleScope scope(isolate);
1996 Handle<JSGlobalObject> global_obj = enumerator.at(i);
1997 urls[i] = global_object_name_resolver_->GetName(
1998 Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
1999 } else {
2000 urls[i] = NULL;
2001 }
2002 }
2003
2004 DisallowHeapAllocation no_allocation;
2005 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2006 objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2007 }
2008
2009 DeleteArray(urls);
2010 }
2011
2012
2013 class GlobalHandlesExtractor : public ObjectVisitor {
2014 public:
GlobalHandlesExtractor(NativeObjectsExplorer * explorer)2015 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2016 : explorer_(explorer) {}
~GlobalHandlesExtractor()2017 virtual ~GlobalHandlesExtractor() {}
VisitPointers(Object ** start,Object ** end)2018 virtual void VisitPointers(Object** start, Object** end) {
2019 UNREACHABLE();
2020 }
VisitEmbedderReference(Object ** p,uint16_t class_id)2021 virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2022 explorer_->VisitSubtreeWrapper(p, class_id);
2023 }
2024 private:
2025 NativeObjectsExplorer* explorer_;
2026 };
2027
2028
2029 class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2030 public:
BasicHeapEntriesAllocator(HeapSnapshot * snapshot,HeapEntry::Type entries_type)2031 BasicHeapEntriesAllocator(
2032 HeapSnapshot* snapshot,
2033 HeapEntry::Type entries_type)
2034 : snapshot_(snapshot),
2035 names_(snapshot_->profiler()->names()),
2036 heap_object_map_(snapshot_->profiler()->heap_object_map()),
2037 entries_type_(entries_type) {
2038 }
2039 virtual HeapEntry* AllocateEntry(HeapThing ptr);
2040 private:
2041 HeapSnapshot* snapshot_;
2042 StringsStorage* names_;
2043 HeapObjectsMap* heap_object_map_;
2044 HeapEntry::Type entries_type_;
2045 };
2046
2047
AllocateEntry(HeapThing ptr)2048 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2049 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2050 intptr_t elements = info->GetElementCount();
2051 intptr_t size = info->GetSizeInBytes();
2052 const char* name = elements != -1
2053 ? names_->GetFormatted(
2054 "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2055 : names_->GetCopy(info->GetLabel());
2056 return snapshot_->AddEntry(
2057 entries_type_,
2058 name,
2059 heap_object_map_->GenerateId(info),
2060 size != -1 ? static_cast<int>(size) : 0);
2061 }
2062
2063
NativeObjectsExplorer(HeapSnapshot * snapshot,SnapshottingProgressReportingInterface * progress)2064 NativeObjectsExplorer::NativeObjectsExplorer(
2065 HeapSnapshot* snapshot,
2066 SnapshottingProgressReportingInterface* progress)
2067 : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2068 snapshot_(snapshot),
2069 names_(snapshot_->profiler()->names()),
2070 progress_(progress),
2071 embedder_queried_(false),
2072 objects_by_info_(RetainedInfosMatch),
2073 native_groups_(StringsMatch),
2074 filler_(NULL) {
2075 synthetic_entries_allocator_ =
2076 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2077 native_entries_allocator_ =
2078 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2079 }
2080
2081
~NativeObjectsExplorer()2082 NativeObjectsExplorer::~NativeObjectsExplorer() {
2083 for (HashMap::Entry* p = objects_by_info_.Start();
2084 p != NULL;
2085 p = objects_by_info_.Next(p)) {
2086 v8::RetainedObjectInfo* info =
2087 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2088 info->Dispose();
2089 List<HeapObject*>* objects =
2090 reinterpret_cast<List<HeapObject*>* >(p->value);
2091 delete objects;
2092 }
2093 for (HashMap::Entry* p = native_groups_.Start();
2094 p != NULL;
2095 p = native_groups_.Next(p)) {
2096 v8::RetainedObjectInfo* info =
2097 reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2098 info->Dispose();
2099 }
2100 delete synthetic_entries_allocator_;
2101 delete native_entries_allocator_;
2102 }
2103
2104
EstimateObjectsCount()2105 int NativeObjectsExplorer::EstimateObjectsCount() {
2106 FillRetainedObjects();
2107 return objects_by_info_.occupancy();
2108 }
2109
2110
FillRetainedObjects()2111 void NativeObjectsExplorer::FillRetainedObjects() {
2112 if (embedder_queried_) return;
2113 Isolate* isolate = isolate_;
2114 const GCType major_gc_type = kGCTypeMarkSweepCompact;
2115 // Record objects that are joined into ObjectGroups.
2116 isolate->heap()->CallGCPrologueCallbacks(
2117 major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos);
2118 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2119 for (int i = 0; i < groups->length(); ++i) {
2120 ObjectGroup* group = groups->at(i);
2121 if (group->info == NULL) continue;
2122 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info);
2123 for (size_t j = 0; j < group->length; ++j) {
2124 HeapObject* obj = HeapObject::cast(*group->objects[j]);
2125 list->Add(obj);
2126 in_groups_.Insert(obj);
2127 }
2128 group->info = NULL; // Acquire info object ownership.
2129 }
2130 isolate->global_handles()->RemoveObjectGroups();
2131 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type);
2132 // Record objects that are not in ObjectGroups, but have class ID.
2133 GlobalHandlesExtractor extractor(this);
2134 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2135 embedder_queried_ = true;
2136 }
2137
2138
FillImplicitReferences()2139 void NativeObjectsExplorer::FillImplicitReferences() {
2140 Isolate* isolate = isolate_;
2141 List<ImplicitRefGroup*>* groups =
2142 isolate->global_handles()->implicit_ref_groups();
2143 for (int i = 0; i < groups->length(); ++i) {
2144 ImplicitRefGroup* group = groups->at(i);
2145 HeapObject* parent = *group->parent;
2146 int parent_entry =
2147 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2148 ASSERT(parent_entry != HeapEntry::kNoEntry);
2149 Object*** children = group->children;
2150 for (size_t j = 0; j < group->length; ++j) {
2151 Object* child = *children[j];
2152 HeapEntry* child_entry =
2153 filler_->FindOrAddEntry(child, native_entries_allocator_);
2154 filler_->SetNamedReference(
2155 HeapGraphEdge::kInternal,
2156 parent_entry,
2157 "native",
2158 child_entry);
2159 }
2160 }
2161 isolate->global_handles()->RemoveImplicitRefGroups();
2162 }
2163
GetListMaybeDisposeInfo(v8::RetainedObjectInfo * info)2164 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2165 v8::RetainedObjectInfo* info) {
2166 HashMap::Entry* entry =
2167 objects_by_info_.Lookup(info, InfoHash(info), true);
2168 if (entry->value != NULL) {
2169 info->Dispose();
2170 } else {
2171 entry->value = new List<HeapObject*>(4);
2172 }
2173 return reinterpret_cast<List<HeapObject*>* >(entry->value);
2174 }
2175
2176
IterateAndExtractReferences(SnapshotFillerInterface * filler)2177 bool NativeObjectsExplorer::IterateAndExtractReferences(
2178 SnapshotFillerInterface* filler) {
2179 filler_ = filler;
2180 FillRetainedObjects();
2181 FillImplicitReferences();
2182 if (EstimateObjectsCount() > 0) {
2183 for (HashMap::Entry* p = objects_by_info_.Start();
2184 p != NULL;
2185 p = objects_by_info_.Next(p)) {
2186 v8::RetainedObjectInfo* info =
2187 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2188 SetNativeRootReference(info);
2189 List<HeapObject*>* objects =
2190 reinterpret_cast<List<HeapObject*>* >(p->value);
2191 for (int i = 0; i < objects->length(); ++i) {
2192 SetWrapperNativeReferences(objects->at(i), info);
2193 }
2194 }
2195 SetRootNativeRootsReference();
2196 }
2197 filler_ = NULL;
2198 return true;
2199 }
2200
2201
2202 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2203 public:
NativeGroupRetainedObjectInfo(const char * label)2204 explicit NativeGroupRetainedObjectInfo(const char* label)
2205 : disposed_(false),
2206 hash_(reinterpret_cast<intptr_t>(label)),
2207 label_(label) {
2208 }
2209
~NativeGroupRetainedObjectInfo()2210 virtual ~NativeGroupRetainedObjectInfo() {}
Dispose()2211 virtual void Dispose() {
2212 CHECK(!disposed_);
2213 disposed_ = true;
2214 delete this;
2215 }
IsEquivalent(RetainedObjectInfo * other)2216 virtual bool IsEquivalent(RetainedObjectInfo* other) {
2217 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2218 }
GetHash()2219 virtual intptr_t GetHash() { return hash_; }
GetLabel()2220 virtual const char* GetLabel() { return label_; }
2221
2222 private:
2223 bool disposed_;
2224 intptr_t hash_;
2225 const char* label_;
2226 };
2227
2228
FindOrAddGroupInfo(const char * label)2229 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2230 const char* label) {
2231 const char* label_copy = names_->GetCopy(label);
2232 uint32_t hash = StringHasher::HashSequentialString(
2233 label_copy,
2234 static_cast<int>(strlen(label_copy)),
2235 isolate_->heap()->HashSeed());
2236 HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2237 hash, true);
2238 if (entry->value == NULL) {
2239 entry->value = new NativeGroupRetainedObjectInfo(label);
2240 }
2241 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2242 }
2243
2244
SetNativeRootReference(v8::RetainedObjectInfo * info)2245 void NativeObjectsExplorer::SetNativeRootReference(
2246 v8::RetainedObjectInfo* info) {
2247 HeapEntry* child_entry =
2248 filler_->FindOrAddEntry(info, native_entries_allocator_);
2249 ASSERT(child_entry != NULL);
2250 NativeGroupRetainedObjectInfo* group_info =
2251 FindOrAddGroupInfo(info->GetGroupLabel());
2252 HeapEntry* group_entry =
2253 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2254 filler_->SetNamedAutoIndexReference(
2255 HeapGraphEdge::kInternal,
2256 group_entry->index(),
2257 child_entry);
2258 }
2259
2260
SetWrapperNativeReferences(HeapObject * wrapper,v8::RetainedObjectInfo * info)2261 void NativeObjectsExplorer::SetWrapperNativeReferences(
2262 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2263 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2264 ASSERT(wrapper_entry != NULL);
2265 HeapEntry* info_entry =
2266 filler_->FindOrAddEntry(info, native_entries_allocator_);
2267 ASSERT(info_entry != NULL);
2268 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2269 wrapper_entry->index(),
2270 "native",
2271 info_entry);
2272 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2273 info_entry->index(),
2274 wrapper_entry);
2275 }
2276
2277
SetRootNativeRootsReference()2278 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2279 for (HashMap::Entry* entry = native_groups_.Start();
2280 entry;
2281 entry = native_groups_.Next(entry)) {
2282 NativeGroupRetainedObjectInfo* group_info =
2283 static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2284 HeapEntry* group_entry =
2285 filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2286 ASSERT(group_entry != NULL);
2287 filler_->SetIndexedAutoIndexReference(
2288 HeapGraphEdge::kElement,
2289 snapshot_->root()->index(),
2290 group_entry);
2291 }
2292 }
2293
2294
VisitSubtreeWrapper(Object ** p,uint16_t class_id)2295 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2296 if (in_groups_.Contains(*p)) return;
2297 Isolate* isolate = isolate_;
2298 v8::RetainedObjectInfo* info =
2299 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2300 if (info == NULL) return;
2301 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2302 }
2303
2304
2305 class SnapshotFiller : public SnapshotFillerInterface {
2306 public:
SnapshotFiller(HeapSnapshot * snapshot,HeapEntriesMap * entries)2307 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
2308 : snapshot_(snapshot),
2309 names_(snapshot->profiler()->names()),
2310 entries_(entries) { }
AddEntry(HeapThing ptr,HeapEntriesAllocator * allocator)2311 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2312 HeapEntry* entry = allocator->AllocateEntry(ptr);
2313 entries_->Pair(ptr, entry->index());
2314 return entry;
2315 }
FindEntry(HeapThing ptr)2316 HeapEntry* FindEntry(HeapThing ptr) {
2317 int index = entries_->Map(ptr);
2318 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
2319 }
FindOrAddEntry(HeapThing ptr,HeapEntriesAllocator * allocator)2320 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2321 HeapEntry* entry = FindEntry(ptr);
2322 return entry != NULL ? entry : AddEntry(ptr, allocator);
2323 }
SetIndexedReference(HeapGraphEdge::Type type,int parent,int index,HeapEntry * child_entry)2324 void SetIndexedReference(HeapGraphEdge::Type type,
2325 int parent,
2326 int index,
2327 HeapEntry* child_entry) {
2328 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2329 parent_entry->SetIndexedReference(type, index, child_entry);
2330 }
SetIndexedAutoIndexReference(HeapGraphEdge::Type type,int parent,HeapEntry * child_entry)2331 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
2332 int parent,
2333 HeapEntry* child_entry) {
2334 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2335 int index = parent_entry->children_count() + 1;
2336 parent_entry->SetIndexedReference(type, index, child_entry);
2337 }
SetNamedReference(HeapGraphEdge::Type type,int parent,const char * reference_name,HeapEntry * child_entry)2338 void SetNamedReference(HeapGraphEdge::Type type,
2339 int parent,
2340 const char* reference_name,
2341 HeapEntry* child_entry) {
2342 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2343 parent_entry->SetNamedReference(type, reference_name, child_entry);
2344 }
SetNamedAutoIndexReference(HeapGraphEdge::Type type,int parent,HeapEntry * child_entry)2345 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
2346 int parent,
2347 HeapEntry* child_entry) {
2348 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2349 int index = parent_entry->children_count() + 1;
2350 parent_entry->SetNamedReference(
2351 type,
2352 names_->GetName(index),
2353 child_entry);
2354 }
2355
2356 private:
2357 HeapSnapshot* snapshot_;
2358 StringsStorage* names_;
2359 HeapEntriesMap* entries_;
2360 };
2361
2362
HeapSnapshotGenerator(HeapSnapshot * snapshot,v8::ActivityControl * control,v8::HeapProfiler::ObjectNameResolver * resolver,Heap * heap)2363 HeapSnapshotGenerator::HeapSnapshotGenerator(
2364 HeapSnapshot* snapshot,
2365 v8::ActivityControl* control,
2366 v8::HeapProfiler::ObjectNameResolver* resolver,
2367 Heap* heap)
2368 : snapshot_(snapshot),
2369 control_(control),
2370 v8_heap_explorer_(snapshot_, this, resolver),
2371 dom_explorer_(snapshot_, this),
2372 heap_(heap) {
2373 }
2374
2375
GenerateSnapshot()2376 bool HeapSnapshotGenerator::GenerateSnapshot() {
2377 v8_heap_explorer_.TagGlobalObjects();
2378
2379 // TODO(1562) Profiler assumes that any object that is in the heap after
2380 // full GC is reachable from the root when computing dominators.
2381 // This is not true for weakly reachable objects.
2382 // As a temporary solution we call GC twice.
2383 heap_->CollectAllGarbage(
2384 Heap::kMakeHeapIterableMask,
2385 "HeapSnapshotGenerator::GenerateSnapshot");
2386 heap_->CollectAllGarbage(
2387 Heap::kMakeHeapIterableMask,
2388 "HeapSnapshotGenerator::GenerateSnapshot");
2389
2390 #ifdef VERIFY_HEAP
2391 Heap* debug_heap = heap_;
2392 CHECK(!debug_heap->old_data_space()->was_swept_conservatively());
2393 CHECK(!debug_heap->old_pointer_space()->was_swept_conservatively());
2394 CHECK(!debug_heap->code_space()->was_swept_conservatively());
2395 CHECK(!debug_heap->cell_space()->was_swept_conservatively());
2396 CHECK(!debug_heap->property_cell_space()->
2397 was_swept_conservatively());
2398 CHECK(!debug_heap->map_space()->was_swept_conservatively());
2399 #endif
2400
2401 // The following code uses heap iterators, so we want the heap to be
2402 // stable. It should follow TagGlobalObjects as that can allocate.
2403 DisallowHeapAllocation no_alloc;
2404
2405 #ifdef VERIFY_HEAP
2406 debug_heap->Verify();
2407 #endif
2408
2409 SetProgressTotal(1); // 1 pass.
2410
2411 #ifdef VERIFY_HEAP
2412 debug_heap->Verify();
2413 #endif
2414
2415 if (!FillReferences()) return false;
2416
2417 snapshot_->FillChildren();
2418 snapshot_->RememberLastJSObjectId();
2419
2420 progress_counter_ = progress_total_;
2421 if (!ProgressReport(true)) return false;
2422 return true;
2423 }
2424
2425
ProgressStep()2426 void HeapSnapshotGenerator::ProgressStep() {
2427 ++progress_counter_;
2428 }
2429
2430
ProgressReport(bool force)2431 bool HeapSnapshotGenerator::ProgressReport(bool force) {
2432 const int kProgressReportGranularity = 10000;
2433 if (control_ != NULL
2434 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2435 return
2436 control_->ReportProgressValue(progress_counter_, progress_total_) ==
2437 v8::ActivityControl::kContinue;
2438 }
2439 return true;
2440 }
2441
2442
SetProgressTotal(int iterations_count)2443 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2444 if (control_ == NULL) return;
2445 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2446 progress_total_ = iterations_count * (
2447 v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2448 dom_explorer_.EstimateObjectsCount());
2449 progress_counter_ = 0;
2450 }
2451
2452
FillReferences()2453 bool HeapSnapshotGenerator::FillReferences() {
2454 SnapshotFiller filler(snapshot_, &entries_);
2455 v8_heap_explorer_.AddRootEntries(&filler);
2456 return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2457 && dom_explorer_.IterateAndExtractReferences(&filler);
2458 }
2459
2460
2461 template<int bytes> struct MaxDecimalDigitsIn;
2462 template<> struct MaxDecimalDigitsIn<4> {
2463 static const int kSigned = 11;
2464 static const int kUnsigned = 10;
2465 };
2466 template<> struct MaxDecimalDigitsIn<8> {
2467 static const int kSigned = 20;
2468 static const int kUnsigned = 20;
2469 };
2470
2471
2472 class OutputStreamWriter {
2473 public:
OutputStreamWriter(v8::OutputStream * stream)2474 explicit OutputStreamWriter(v8::OutputStream* stream)
2475 : stream_(stream),
2476 chunk_size_(stream->GetChunkSize()),
2477 chunk_(chunk_size_),
2478 chunk_pos_(0),
2479 aborted_(false) {
2480 ASSERT(chunk_size_ > 0);
2481 }
aborted()2482 bool aborted() { return aborted_; }
AddCharacter(char c)2483 void AddCharacter(char c) {
2484 ASSERT(c != '\0');
2485 ASSERT(chunk_pos_ < chunk_size_);
2486 chunk_[chunk_pos_++] = c;
2487 MaybeWriteChunk();
2488 }
AddString(const char * s)2489 void AddString(const char* s) {
2490 AddSubstring(s, StrLength(s));
2491 }
AddSubstring(const char * s,int n)2492 void AddSubstring(const char* s, int n) {
2493 if (n <= 0) return;
2494 ASSERT(static_cast<size_t>(n) <= strlen(s));
2495 const char* s_end = s + n;
2496 while (s < s_end) {
2497 int s_chunk_size = Min(
2498 chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2499 ASSERT(s_chunk_size > 0);
2500 OS::MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2501 s += s_chunk_size;
2502 chunk_pos_ += s_chunk_size;
2503 MaybeWriteChunk();
2504 }
2505 }
AddNumber(unsigned n)2506 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
Finalize()2507 void Finalize() {
2508 if (aborted_) return;
2509 ASSERT(chunk_pos_ < chunk_size_);
2510 if (chunk_pos_ != 0) {
2511 WriteChunk();
2512 }
2513 stream_->EndOfStream();
2514 }
2515
2516 private:
2517 template<typename T>
AddNumberImpl(T n,const char * format)2518 void AddNumberImpl(T n, const char* format) {
2519 // Buffer for the longest value plus trailing \0
2520 static const int kMaxNumberSize =
2521 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2522 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2523 int result = OS::SNPrintF(
2524 chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2525 ASSERT(result != -1);
2526 chunk_pos_ += result;
2527 MaybeWriteChunk();
2528 } else {
2529 EmbeddedVector<char, kMaxNumberSize> buffer;
2530 int result = OS::SNPrintF(buffer, format, n);
2531 USE(result);
2532 ASSERT(result != -1);
2533 AddString(buffer.start());
2534 }
2535 }
MaybeWriteChunk()2536 void MaybeWriteChunk() {
2537 ASSERT(chunk_pos_ <= chunk_size_);
2538 if (chunk_pos_ == chunk_size_) {
2539 WriteChunk();
2540 }
2541 }
WriteChunk()2542 void WriteChunk() {
2543 if (aborted_) return;
2544 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2545 v8::OutputStream::kAbort) aborted_ = true;
2546 chunk_pos_ = 0;
2547 }
2548
2549 v8::OutputStream* stream_;
2550 int chunk_size_;
2551 ScopedVector<char> chunk_;
2552 int chunk_pos_;
2553 bool aborted_;
2554 };
2555
2556
2557 // type, name|index, to_node.
2558 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2559 // type, name, id, self_size, children_index.
2560 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
2561
Serialize(v8::OutputStream * stream)2562 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2563 if (AllocationTracker* allocation_tracker =
2564 snapshot_->profiler()->allocation_tracker()) {
2565 allocation_tracker->PrepareForSerialization();
2566 }
2567 ASSERT(writer_ == NULL);
2568 writer_ = new OutputStreamWriter(stream);
2569 SerializeImpl();
2570 delete writer_;
2571 writer_ = NULL;
2572 }
2573
2574
SerializeImpl()2575 void HeapSnapshotJSONSerializer::SerializeImpl() {
2576 ASSERT(0 == snapshot_->root()->index());
2577 writer_->AddCharacter('{');
2578 writer_->AddString("\"snapshot\":{");
2579 SerializeSnapshot();
2580 if (writer_->aborted()) return;
2581 writer_->AddString("},\n");
2582 writer_->AddString("\"nodes\":[");
2583 SerializeNodes();
2584 if (writer_->aborted()) return;
2585 writer_->AddString("],\n");
2586 writer_->AddString("\"edges\":[");
2587 SerializeEdges();
2588 if (writer_->aborted()) return;
2589 writer_->AddString("],\n");
2590
2591 writer_->AddString("\"trace_function_infos\":[");
2592 SerializeTraceNodeInfos();
2593 if (writer_->aborted()) return;
2594 writer_->AddString("],\n");
2595 writer_->AddString("\"trace_tree\":[");
2596 SerializeTraceTree();
2597 if (writer_->aborted()) return;
2598 writer_->AddString("],\n");
2599
2600 writer_->AddString("\"strings\":[");
2601 SerializeStrings();
2602 if (writer_->aborted()) return;
2603 writer_->AddCharacter(']');
2604 writer_->AddCharacter('}');
2605 writer_->Finalize();
2606 }
2607
2608
GetStringId(const char * s)2609 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2610 HashMap::Entry* cache_entry = strings_.Lookup(
2611 const_cast<char*>(s), StringHash(s), true);
2612 if (cache_entry->value == NULL) {
2613 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2614 }
2615 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2616 }
2617
2618
utoa(unsigned value,const Vector<char> & buffer,int buffer_pos)2619 static int utoa(unsigned value, const Vector<char>& buffer, int buffer_pos) {
2620 int number_of_digits = 0;
2621 unsigned t = value;
2622 do {
2623 ++number_of_digits;
2624 } while (t /= 10);
2625
2626 buffer_pos += number_of_digits;
2627 int result = buffer_pos;
2628 do {
2629 int last_digit = value % 10;
2630 buffer[--buffer_pos] = '0' + last_digit;
2631 value /= 10;
2632 } while (value);
2633 return result;
2634 }
2635
2636
SerializeEdge(HeapGraphEdge * edge,bool first_edge)2637 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2638 bool first_edge) {
2639 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2640 static const int kBufferSize =
2641 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2642 EmbeddedVector<char, kBufferSize> buffer;
2643 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2644 || edge->type() == HeapGraphEdge::kHidden
2645 || edge->type() == HeapGraphEdge::kWeak
2646 ? edge->index() : GetStringId(edge->name());
2647 int buffer_pos = 0;
2648 if (!first_edge) {
2649 buffer[buffer_pos++] = ',';
2650 }
2651 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2652 buffer[buffer_pos++] = ',';
2653 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2654 buffer[buffer_pos++] = ',';
2655 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2656 buffer[buffer_pos++] = '\n';
2657 buffer[buffer_pos++] = '\0';
2658 writer_->AddString(buffer.start());
2659 }
2660
2661
SerializeEdges()2662 void HeapSnapshotJSONSerializer::SerializeEdges() {
2663 List<HeapGraphEdge*>& edges = snapshot_->children();
2664 for (int i = 0; i < edges.length(); ++i) {
2665 ASSERT(i == 0 ||
2666 edges[i - 1]->from()->index() <= edges[i]->from()->index());
2667 SerializeEdge(edges[i], i == 0);
2668 if (writer_->aborted()) return;
2669 }
2670 }
2671
2672
SerializeNode(HeapEntry * entry)2673 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2674 // The buffer needs space for 5 unsigned ints, 5 commas, \n and \0
2675 static const int kBufferSize =
2676 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2677 + 5 + 1 + 1;
2678 EmbeddedVector<char, kBufferSize> buffer;
2679 int buffer_pos = 0;
2680 if (entry_index(entry) != 0) {
2681 buffer[buffer_pos++] = ',';
2682 }
2683 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2684 buffer[buffer_pos++] = ',';
2685 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2686 buffer[buffer_pos++] = ',';
2687 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2688 buffer[buffer_pos++] = ',';
2689 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2690 buffer[buffer_pos++] = ',';
2691 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2692 buffer[buffer_pos++] = '\n';
2693 buffer[buffer_pos++] = '\0';
2694 writer_->AddString(buffer.start());
2695 }
2696
2697
SerializeNodes()2698 void HeapSnapshotJSONSerializer::SerializeNodes() {
2699 List<HeapEntry>& entries = snapshot_->entries();
2700 for (int i = 0; i < entries.length(); ++i) {
2701 SerializeNode(&entries[i]);
2702 if (writer_->aborted()) return;
2703 }
2704 }
2705
2706
SerializeSnapshot()2707 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2708 writer_->AddString("\"title\":\"");
2709 writer_->AddString(snapshot_->title());
2710 writer_->AddString("\"");
2711 writer_->AddString(",\"uid\":");
2712 writer_->AddNumber(snapshot_->uid());
2713 writer_->AddString(",\"meta\":");
2714 // The object describing node serialization layout.
2715 // We use a set of macros to improve readability.
2716 #define JSON_A(s) "[" s "]"
2717 #define JSON_O(s) "{" s "}"
2718 #define JSON_S(s) "\"" s "\""
2719 writer_->AddString(JSON_O(
2720 JSON_S("node_fields") ":" JSON_A(
2721 JSON_S("type") ","
2722 JSON_S("name") ","
2723 JSON_S("id") ","
2724 JSON_S("self_size") ","
2725 JSON_S("edge_count")) ","
2726 JSON_S("node_types") ":" JSON_A(
2727 JSON_A(
2728 JSON_S("hidden") ","
2729 JSON_S("array") ","
2730 JSON_S("string") ","
2731 JSON_S("object") ","
2732 JSON_S("code") ","
2733 JSON_S("closure") ","
2734 JSON_S("regexp") ","
2735 JSON_S("number") ","
2736 JSON_S("native") ","
2737 JSON_S("synthetic") ","
2738 JSON_S("concatenated string") ","
2739 JSON_S("sliced string")) ","
2740 JSON_S("string") ","
2741 JSON_S("number") ","
2742 JSON_S("number") ","
2743 JSON_S("number") ","
2744 JSON_S("number") ","
2745 JSON_S("number")) ","
2746 JSON_S("edge_fields") ":" JSON_A(
2747 JSON_S("type") ","
2748 JSON_S("name_or_index") ","
2749 JSON_S("to_node")) ","
2750 JSON_S("edge_types") ":" JSON_A(
2751 JSON_A(
2752 JSON_S("context") ","
2753 JSON_S("element") ","
2754 JSON_S("property") ","
2755 JSON_S("internal") ","
2756 JSON_S("hidden") ","
2757 JSON_S("shortcut") ","
2758 JSON_S("weak")) ","
2759 JSON_S("string_or_number") ","
2760 JSON_S("node")) ","
2761 JSON_S("trace_function_info_fields") ":" JSON_A(
2762 JSON_S("function_id") ","
2763 JSON_S("name") ","
2764 JSON_S("script_name") ","
2765 JSON_S("script_id") ","
2766 JSON_S("line") ","
2767 JSON_S("column")) ","
2768 JSON_S("trace_node_fields") ":" JSON_A(
2769 JSON_S("id") ","
2770 JSON_S("function_id") ","
2771 JSON_S("count") ","
2772 JSON_S("size") ","
2773 JSON_S("children"))));
2774 #undef JSON_S
2775 #undef JSON_O
2776 #undef JSON_A
2777 writer_->AddString(",\"node_count\":");
2778 writer_->AddNumber(snapshot_->entries().length());
2779 writer_->AddString(",\"edge_count\":");
2780 writer_->AddNumber(snapshot_->edges().length());
2781 writer_->AddString(",\"trace_function_count\":");
2782 uint32_t count = 0;
2783 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2784 if (tracker) {
2785 count = tracker->id_to_function_info()->occupancy();
2786 }
2787 writer_->AddNumber(count);
2788 }
2789
2790
WriteUChar(OutputStreamWriter * w,unibrow::uchar u)2791 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2792 static const char hex_chars[] = "0123456789ABCDEF";
2793 w->AddString("\\u");
2794 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2795 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2796 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2797 w->AddCharacter(hex_chars[u & 0xf]);
2798 }
2799
2800
SerializeTraceTree()2801 void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2802 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2803 if (!tracker) return;
2804 AllocationTraceTree* traces = tracker->trace_tree();
2805 SerializeTraceNode(traces->root());
2806 }
2807
2808
SerializeTraceNode(AllocationTraceNode * node)2809 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2810 // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2811 const int kBufferSize =
2812 4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2813 + 4 + 1 + 1;
2814 EmbeddedVector<char, kBufferSize> buffer;
2815 int buffer_pos = 0;
2816 buffer_pos = utoa(node->id(), buffer, buffer_pos);
2817 buffer[buffer_pos++] = ',';
2818 buffer_pos = utoa(node->function_id(), buffer, buffer_pos);
2819 buffer[buffer_pos++] = ',';
2820 buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
2821 buffer[buffer_pos++] = ',';
2822 buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
2823 buffer[buffer_pos++] = ',';
2824 buffer[buffer_pos++] = '[';
2825 buffer[buffer_pos++] = '\0';
2826 writer_->AddString(buffer.start());
2827
2828 Vector<AllocationTraceNode*> children = node->children();
2829 for (int i = 0; i < children.length(); i++) {
2830 if (i > 0) {
2831 writer_->AddCharacter(',');
2832 }
2833 SerializeTraceNode(children[i]);
2834 }
2835 writer_->AddCharacter(']');
2836 }
2837
2838
2839 // 0-based position is converted to 1-based during the serialization.
SerializePosition(int position,const Vector<char> & buffer,int buffer_pos)2840 static int SerializePosition(int position, const Vector<char>& buffer,
2841 int buffer_pos) {
2842 if (position == -1) {
2843 buffer[buffer_pos++] = '0';
2844 } else {
2845 ASSERT(position >= 0);
2846 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
2847 }
2848 return buffer_pos;
2849 }
2850
2851
SerializeTraceNodeInfos()2852 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
2853 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2854 if (!tracker) return;
2855 // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
2856 const int kBufferSize =
2857 6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2858 + 6 + 1 + 1;
2859 EmbeddedVector<char, kBufferSize> buffer;
2860 HashMap* id_to_function_info = tracker->id_to_function_info();
2861 bool first_entry = true;
2862 for (HashMap::Entry* p = id_to_function_info->Start();
2863 p != NULL;
2864 p = id_to_function_info->Next(p)) {
2865 SnapshotObjectId id =
2866 static_cast<SnapshotObjectId>(reinterpret_cast<intptr_t>(p->key));
2867 AllocationTracker::FunctionInfo* info =
2868 reinterpret_cast<AllocationTracker::FunctionInfo* >(p->value);
2869 int buffer_pos = 0;
2870 if (first_entry) {
2871 first_entry = false;
2872 } else {
2873 buffer[buffer_pos++] = ',';
2874 }
2875 buffer_pos = utoa(id, buffer, buffer_pos);
2876 buffer[buffer_pos++] = ',';
2877 buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
2878 buffer[buffer_pos++] = ',';
2879 buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
2880 buffer[buffer_pos++] = ',';
2881 // The cast is safe because script id is a non-negative Smi.
2882 buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
2883 buffer_pos);
2884 buffer[buffer_pos++] = ',';
2885 buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
2886 buffer[buffer_pos++] = ',';
2887 buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
2888 buffer[buffer_pos++] = '\n';
2889 buffer[buffer_pos++] = '\0';
2890 writer_->AddString(buffer.start());
2891 }
2892 }
2893
2894
SerializeString(const unsigned char * s)2895 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
2896 writer_->AddCharacter('\n');
2897 writer_->AddCharacter('\"');
2898 for ( ; *s != '\0'; ++s) {
2899 switch (*s) {
2900 case '\b':
2901 writer_->AddString("\\b");
2902 continue;
2903 case '\f':
2904 writer_->AddString("\\f");
2905 continue;
2906 case '\n':
2907 writer_->AddString("\\n");
2908 continue;
2909 case '\r':
2910 writer_->AddString("\\r");
2911 continue;
2912 case '\t':
2913 writer_->AddString("\\t");
2914 continue;
2915 case '\"':
2916 case '\\':
2917 writer_->AddCharacter('\\');
2918 writer_->AddCharacter(*s);
2919 continue;
2920 default:
2921 if (*s > 31 && *s < 128) {
2922 writer_->AddCharacter(*s);
2923 } else if (*s <= 31) {
2924 // Special character with no dedicated literal.
2925 WriteUChar(writer_, *s);
2926 } else {
2927 // Convert UTF-8 into \u UTF-16 literal.
2928 unsigned length = 1, cursor = 0;
2929 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
2930 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
2931 if (c != unibrow::Utf8::kBadChar) {
2932 WriteUChar(writer_, c);
2933 ASSERT(cursor != 0);
2934 s += cursor - 1;
2935 } else {
2936 writer_->AddCharacter('?');
2937 }
2938 }
2939 }
2940 }
2941 writer_->AddCharacter('\"');
2942 }
2943
2944
SerializeStrings()2945 void HeapSnapshotJSONSerializer::SerializeStrings() {
2946 ScopedVector<const unsigned char*> sorted_strings(
2947 strings_.occupancy() + 1);
2948 for (HashMap::Entry* entry = strings_.Start();
2949 entry != NULL;
2950 entry = strings_.Next(entry)) {
2951 int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
2952 sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
2953 }
2954 writer_->AddString("\"<dummy>\"");
2955 for (int i = 1; i < sorted_strings.length(); ++i) {
2956 writer_->AddCharacter(',');
2957 SerializeString(sorted_strings[i]);
2958 if (writer_->aborted()) return;
2959 }
2960 }
2961
2962
2963 } } // namespace v8::internal
2964