1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #ifdef ENABLE_LOGGING_AND_PROFILING
29
30 #include "v8.h"
31 #include "global-handles.h"
32 #include "heap-profiler.h"
33 #include "scopeinfo.h"
34 #include "unicode.h"
35 #include "zone-inl.h"
36
37 #include "profile-generator-inl.h"
38
39 namespace v8 {
40 namespace internal {
41
42
TokenEnumerator()43 TokenEnumerator::TokenEnumerator()
44 : token_locations_(4),
45 token_removed_(4) {
46 }
47
48
~TokenEnumerator()49 TokenEnumerator::~TokenEnumerator() {
50 Isolate* isolate = Isolate::Current();
51 for (int i = 0; i < token_locations_.length(); ++i) {
52 if (!token_removed_[i]) {
53 isolate->global_handles()->ClearWeakness(token_locations_[i]);
54 isolate->global_handles()->Destroy(token_locations_[i]);
55 }
56 }
57 }
58
59
GetTokenId(Object * token)60 int TokenEnumerator::GetTokenId(Object* token) {
61 Isolate* isolate = Isolate::Current();
62 if (token == NULL) return TokenEnumerator::kNoSecurityToken;
63 for (int i = 0; i < token_locations_.length(); ++i) {
64 if (*token_locations_[i] == token && !token_removed_[i]) return i;
65 }
66 Handle<Object> handle = isolate->global_handles()->Create(token);
67 // handle.location() points to a memory cell holding a pointer
68 // to a token object in the V8's heap.
69 isolate->global_handles()->MakeWeak(handle.location(), this,
70 TokenRemovedCallback);
71 token_locations_.Add(handle.location());
72 token_removed_.Add(false);
73 return token_locations_.length() - 1;
74 }
75
76
TokenRemovedCallback(v8::Persistent<v8::Value> handle,void * parameter)77 void TokenEnumerator::TokenRemovedCallback(v8::Persistent<v8::Value> handle,
78 void* parameter) {
79 reinterpret_cast<TokenEnumerator*>(parameter)->TokenRemoved(
80 Utils::OpenHandle(*handle).location());
81 handle.Dispose();
82 }
83
84
TokenRemoved(Object ** token_location)85 void TokenEnumerator::TokenRemoved(Object** token_location) {
86 for (int i = 0; i < token_locations_.length(); ++i) {
87 if (token_locations_[i] == token_location && !token_removed_[i]) {
88 token_removed_[i] = true;
89 return;
90 }
91 }
92 }
93
94
StringsStorage()95 StringsStorage::StringsStorage()
96 : names_(StringsMatch) {
97 }
98
99
~StringsStorage()100 StringsStorage::~StringsStorage() {
101 for (HashMap::Entry* p = names_.Start();
102 p != NULL;
103 p = names_.Next(p)) {
104 DeleteArray(reinterpret_cast<const char*>(p->value));
105 }
106 }
107
108
GetCopy(const char * src)109 const char* StringsStorage::GetCopy(const char* src) {
110 int len = static_cast<int>(strlen(src));
111 Vector<char> dst = Vector<char>::New(len + 1);
112 OS::StrNCpy(dst, src, len);
113 dst[len] = '\0';
114 uint32_t hash = HashSequentialString(dst.start(), len);
115 return AddOrDisposeString(dst.start(), hash);
116 }
117
118
GetFormatted(const char * format,...)119 const char* StringsStorage::GetFormatted(const char* format, ...) {
120 va_list args;
121 va_start(args, format);
122 const char* result = GetVFormatted(format, args);
123 va_end(args);
124 return result;
125 }
126
127
AddOrDisposeString(char * str,uint32_t hash)128 const char* StringsStorage::AddOrDisposeString(char* str, uint32_t hash) {
129 HashMap::Entry* cache_entry = names_.Lookup(str, hash, true);
130 if (cache_entry->value == NULL) {
131 // New entry added.
132 cache_entry->value = str;
133 } else {
134 DeleteArray(str);
135 }
136 return reinterpret_cast<const char*>(cache_entry->value);
137 }
138
139
GetVFormatted(const char * format,va_list args)140 const char* StringsStorage::GetVFormatted(const char* format, va_list args) {
141 Vector<char> str = Vector<char>::New(1024);
142 int len = OS::VSNPrintF(str, format, args);
143 if (len == -1) {
144 DeleteArray(str.start());
145 return format;
146 }
147 uint32_t hash = HashSequentialString(str.start(), len);
148 return AddOrDisposeString(str.start(), hash);
149 }
150
151
GetName(String * name)152 const char* StringsStorage::GetName(String* name) {
153 if (name->IsString()) {
154 return AddOrDisposeString(
155 name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).Detach(),
156 name->Hash());
157 }
158 return "";
159 }
160
161
GetName(int index)162 const char* StringsStorage::GetName(int index) {
163 return GetFormatted("%d", index);
164 }
165
166
167 const char* const CodeEntry::kEmptyNamePrefix = "";
168
169
CopyData(const CodeEntry & source)170 void CodeEntry::CopyData(const CodeEntry& source) {
171 tag_ = source.tag_;
172 name_prefix_ = source.name_prefix_;
173 name_ = source.name_;
174 resource_name_ = source.resource_name_;
175 line_number_ = source.line_number_;
176 }
177
178
GetCallUid() const179 uint32_t CodeEntry::GetCallUid() const {
180 uint32_t hash = ComputeIntegerHash(tag_);
181 if (shared_id_ != 0) {
182 hash ^= ComputeIntegerHash(
183 static_cast<uint32_t>(shared_id_));
184 } else {
185 hash ^= ComputeIntegerHash(
186 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_prefix_)));
187 hash ^= ComputeIntegerHash(
188 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_)));
189 hash ^= ComputeIntegerHash(
190 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(resource_name_)));
191 hash ^= ComputeIntegerHash(line_number_);
192 }
193 return hash;
194 }
195
196
IsSameAs(CodeEntry * entry) const197 bool CodeEntry::IsSameAs(CodeEntry* entry) const {
198 return this == entry
199 || (tag_ == entry->tag_
200 && shared_id_ == entry->shared_id_
201 && (shared_id_ != 0
202 || (name_prefix_ == entry->name_prefix_
203 && name_ == entry->name_
204 && resource_name_ == entry->resource_name_
205 && line_number_ == entry->line_number_)));
206 }
207
208
FindChild(CodeEntry * entry)209 ProfileNode* ProfileNode::FindChild(CodeEntry* entry) {
210 HashMap::Entry* map_entry =
211 children_.Lookup(entry, CodeEntryHash(entry), false);
212 return map_entry != NULL ?
213 reinterpret_cast<ProfileNode*>(map_entry->value) : NULL;
214 }
215
216
FindOrAddChild(CodeEntry * entry)217 ProfileNode* ProfileNode::FindOrAddChild(CodeEntry* entry) {
218 HashMap::Entry* map_entry =
219 children_.Lookup(entry, CodeEntryHash(entry), true);
220 if (map_entry->value == NULL) {
221 // New node added.
222 ProfileNode* new_node = new ProfileNode(tree_, entry);
223 map_entry->value = new_node;
224 children_list_.Add(new_node);
225 }
226 return reinterpret_cast<ProfileNode*>(map_entry->value);
227 }
228
229
GetSelfMillis() const230 double ProfileNode::GetSelfMillis() const {
231 return tree_->TicksToMillis(self_ticks_);
232 }
233
234
GetTotalMillis() const235 double ProfileNode::GetTotalMillis() const {
236 return tree_->TicksToMillis(total_ticks_);
237 }
238
239
Print(int indent)240 void ProfileNode::Print(int indent) {
241 OS::Print("%5u %5u %*c %s%s [%d]",
242 total_ticks_, self_ticks_,
243 indent, ' ',
244 entry_->name_prefix(),
245 entry_->name(),
246 entry_->security_token_id());
247 if (entry_->resource_name()[0] != '\0')
248 OS::Print(" %s:%d", entry_->resource_name(), entry_->line_number());
249 OS::Print("\n");
250 for (HashMap::Entry* p = children_.Start();
251 p != NULL;
252 p = children_.Next(p)) {
253 reinterpret_cast<ProfileNode*>(p->value)->Print(indent + 2);
254 }
255 }
256
257
258 class DeleteNodesCallback {
259 public:
BeforeTraversingChild(ProfileNode *,ProfileNode *)260 void BeforeTraversingChild(ProfileNode*, ProfileNode*) { }
261
AfterAllChildrenTraversed(ProfileNode * node)262 void AfterAllChildrenTraversed(ProfileNode* node) {
263 delete node;
264 }
265
AfterChildTraversed(ProfileNode *,ProfileNode *)266 void AfterChildTraversed(ProfileNode*, ProfileNode*) { }
267 };
268
269
ProfileTree()270 ProfileTree::ProfileTree()
271 : root_entry_(Logger::FUNCTION_TAG,
272 "",
273 "(root)",
274 "",
275 0,
276 TokenEnumerator::kNoSecurityToken),
277 root_(new ProfileNode(this, &root_entry_)) {
278 }
279
280
~ProfileTree()281 ProfileTree::~ProfileTree() {
282 DeleteNodesCallback cb;
283 TraverseDepthFirst(&cb);
284 }
285
286
AddPathFromEnd(const Vector<CodeEntry * > & path)287 void ProfileTree::AddPathFromEnd(const Vector<CodeEntry*>& path) {
288 ProfileNode* node = root_;
289 for (CodeEntry** entry = path.start() + path.length() - 1;
290 entry != path.start() - 1;
291 --entry) {
292 if (*entry != NULL) {
293 node = node->FindOrAddChild(*entry);
294 }
295 }
296 node->IncrementSelfTicks();
297 }
298
299
AddPathFromStart(const Vector<CodeEntry * > & path)300 void ProfileTree::AddPathFromStart(const Vector<CodeEntry*>& path) {
301 ProfileNode* node = root_;
302 for (CodeEntry** entry = path.start();
303 entry != path.start() + path.length();
304 ++entry) {
305 if (*entry != NULL) {
306 node = node->FindOrAddChild(*entry);
307 }
308 }
309 node->IncrementSelfTicks();
310 }
311
312
313 struct NodesPair {
NodesPairv8::internal::NodesPair314 NodesPair(ProfileNode* src, ProfileNode* dst)
315 : src(src), dst(dst) { }
316 ProfileNode* src;
317 ProfileNode* dst;
318 };
319
320
321 class FilteredCloneCallback {
322 public:
FilteredCloneCallback(ProfileNode * dst_root,int security_token_id)323 FilteredCloneCallback(ProfileNode* dst_root, int security_token_id)
324 : stack_(10),
325 security_token_id_(security_token_id) {
326 stack_.Add(NodesPair(NULL, dst_root));
327 }
328
BeforeTraversingChild(ProfileNode * parent,ProfileNode * child)329 void BeforeTraversingChild(ProfileNode* parent, ProfileNode* child) {
330 if (IsTokenAcceptable(child->entry()->security_token_id(),
331 parent->entry()->security_token_id())) {
332 ProfileNode* clone = stack_.last().dst->FindOrAddChild(child->entry());
333 clone->IncreaseSelfTicks(child->self_ticks());
334 stack_.Add(NodesPair(child, clone));
335 } else {
336 // Attribute ticks to parent node.
337 stack_.last().dst->IncreaseSelfTicks(child->self_ticks());
338 }
339 }
340
AfterAllChildrenTraversed(ProfileNode * parent)341 void AfterAllChildrenTraversed(ProfileNode* parent) { }
342
AfterChildTraversed(ProfileNode *,ProfileNode * child)343 void AfterChildTraversed(ProfileNode*, ProfileNode* child) {
344 if (stack_.last().src == child) {
345 stack_.RemoveLast();
346 }
347 }
348
349 private:
IsTokenAcceptable(int token,int parent_token)350 bool IsTokenAcceptable(int token, int parent_token) {
351 if (token == TokenEnumerator::kNoSecurityToken
352 || token == security_token_id_) return true;
353 if (token == TokenEnumerator::kInheritsSecurityToken) {
354 ASSERT(parent_token != TokenEnumerator::kInheritsSecurityToken);
355 return parent_token == TokenEnumerator::kNoSecurityToken
356 || parent_token == security_token_id_;
357 }
358 return false;
359 }
360
361 List<NodesPair> stack_;
362 int security_token_id_;
363 };
364
FilteredClone(ProfileTree * src,int security_token_id)365 void ProfileTree::FilteredClone(ProfileTree* src, int security_token_id) {
366 ms_to_ticks_scale_ = src->ms_to_ticks_scale_;
367 FilteredCloneCallback cb(root_, security_token_id);
368 src->TraverseDepthFirst(&cb);
369 CalculateTotalTicks();
370 }
371
372
SetTickRatePerMs(double ticks_per_ms)373 void ProfileTree::SetTickRatePerMs(double ticks_per_ms) {
374 ms_to_ticks_scale_ = ticks_per_ms > 0 ? 1.0 / ticks_per_ms : 1.0;
375 }
376
377
378 class Position {
379 public:
Position(ProfileNode * node)380 explicit Position(ProfileNode* node)
381 : node(node), child_idx_(0) { }
INLINE(ProfileNode * current_child ())382 INLINE(ProfileNode* current_child()) {
383 return node->children()->at(child_idx_);
384 }
INLINE(bool has_current_child ())385 INLINE(bool has_current_child()) {
386 return child_idx_ < node->children()->length();
387 }
INLINE(void next_child ())388 INLINE(void next_child()) { ++child_idx_; }
389
390 ProfileNode* node;
391 private:
392 int child_idx_;
393 };
394
395
396 // Non-recursive implementation of a depth-first post-order tree traversal.
397 template <typename Callback>
TraverseDepthFirst(Callback * callback)398 void ProfileTree::TraverseDepthFirst(Callback* callback) {
399 List<Position> stack(10);
400 stack.Add(Position(root_));
401 while (stack.length() > 0) {
402 Position& current = stack.last();
403 if (current.has_current_child()) {
404 callback->BeforeTraversingChild(current.node, current.current_child());
405 stack.Add(Position(current.current_child()));
406 } else {
407 callback->AfterAllChildrenTraversed(current.node);
408 if (stack.length() > 1) {
409 Position& parent = stack[stack.length() - 2];
410 callback->AfterChildTraversed(parent.node, current.node);
411 parent.next_child();
412 }
413 // Remove child from the stack.
414 stack.RemoveLast();
415 }
416 }
417 }
418
419
420 class CalculateTotalTicksCallback {
421 public:
BeforeTraversingChild(ProfileNode *,ProfileNode *)422 void BeforeTraversingChild(ProfileNode*, ProfileNode*) { }
423
AfterAllChildrenTraversed(ProfileNode * node)424 void AfterAllChildrenTraversed(ProfileNode* node) {
425 node->IncreaseTotalTicks(node->self_ticks());
426 }
427
AfterChildTraversed(ProfileNode * parent,ProfileNode * child)428 void AfterChildTraversed(ProfileNode* parent, ProfileNode* child) {
429 parent->IncreaseTotalTicks(child->total_ticks());
430 }
431 };
432
433
CalculateTotalTicks()434 void ProfileTree::CalculateTotalTicks() {
435 CalculateTotalTicksCallback cb;
436 TraverseDepthFirst(&cb);
437 }
438
439
ShortPrint()440 void ProfileTree::ShortPrint() {
441 OS::Print("root: %u %u %.2fms %.2fms\n",
442 root_->total_ticks(), root_->self_ticks(),
443 root_->GetTotalMillis(), root_->GetSelfMillis());
444 }
445
446
AddPath(const Vector<CodeEntry * > & path)447 void CpuProfile::AddPath(const Vector<CodeEntry*>& path) {
448 top_down_.AddPathFromEnd(path);
449 bottom_up_.AddPathFromStart(path);
450 }
451
452
CalculateTotalTicks()453 void CpuProfile::CalculateTotalTicks() {
454 top_down_.CalculateTotalTicks();
455 bottom_up_.CalculateTotalTicks();
456 }
457
458
SetActualSamplingRate(double actual_sampling_rate)459 void CpuProfile::SetActualSamplingRate(double actual_sampling_rate) {
460 top_down_.SetTickRatePerMs(actual_sampling_rate);
461 bottom_up_.SetTickRatePerMs(actual_sampling_rate);
462 }
463
464
FilteredClone(int security_token_id)465 CpuProfile* CpuProfile::FilteredClone(int security_token_id) {
466 ASSERT(security_token_id != TokenEnumerator::kNoSecurityToken);
467 CpuProfile* clone = new CpuProfile(title_, uid_);
468 clone->top_down_.FilteredClone(&top_down_, security_token_id);
469 clone->bottom_up_.FilteredClone(&bottom_up_, security_token_id);
470 return clone;
471 }
472
473
ShortPrint()474 void CpuProfile::ShortPrint() {
475 OS::Print("top down ");
476 top_down_.ShortPrint();
477 OS::Print("bottom up ");
478 bottom_up_.ShortPrint();
479 }
480
481
Print()482 void CpuProfile::Print() {
483 OS::Print("[Top down]:\n");
484 top_down_.Print();
485 OS::Print("[Bottom up]:\n");
486 bottom_up_.Print();
487 }
488
489
490 CodeEntry* const CodeMap::kSharedFunctionCodeEntry = NULL;
491 const CodeMap::CodeTreeConfig::Key CodeMap::CodeTreeConfig::kNoKey = NULL;
492 const CodeMap::CodeTreeConfig::Value CodeMap::CodeTreeConfig::kNoValue =
493 CodeMap::CodeEntryInfo(NULL, 0);
494
495
FindEntry(Address addr)496 CodeEntry* CodeMap::FindEntry(Address addr) {
497 CodeTree::Locator locator;
498 if (tree_.FindGreatestLessThan(addr, &locator)) {
499 // locator.key() <= addr. Need to check that addr is within entry.
500 const CodeEntryInfo& entry = locator.value();
501 if (addr < (locator.key() + entry.size))
502 return entry.entry;
503 }
504 return NULL;
505 }
506
507
GetSharedId(Address addr)508 int CodeMap::GetSharedId(Address addr) {
509 CodeTree::Locator locator;
510 // For shared function entries, 'size' field is used to store their IDs.
511 if (tree_.Find(addr, &locator)) {
512 const CodeEntryInfo& entry = locator.value();
513 ASSERT(entry.entry == kSharedFunctionCodeEntry);
514 return entry.size;
515 } else {
516 tree_.Insert(addr, &locator);
517 int id = next_shared_id_++;
518 locator.set_value(CodeEntryInfo(kSharedFunctionCodeEntry, id));
519 return id;
520 }
521 }
522
523
Call(const Address & key,const CodeMap::CodeEntryInfo & value)524 void CodeMap::CodeTreePrinter::Call(
525 const Address& key, const CodeMap::CodeEntryInfo& value) {
526 OS::Print("%p %5d %s\n", key, value.size, value.entry->name());
527 }
528
529
Print()530 void CodeMap::Print() {
531 CodeTreePrinter printer;
532 tree_.ForEach(&printer);
533 }
534
535
CpuProfilesCollection()536 CpuProfilesCollection::CpuProfilesCollection()
537 : profiles_uids_(UidsMatch),
538 current_profiles_semaphore_(OS::CreateSemaphore(1)) {
539 // Create list of unabridged profiles.
540 profiles_by_token_.Add(new List<CpuProfile*>());
541 }
542
543
DeleteCodeEntry(CodeEntry ** entry_ptr)544 static void DeleteCodeEntry(CodeEntry** entry_ptr) {
545 delete *entry_ptr;
546 }
547
DeleteCpuProfile(CpuProfile ** profile_ptr)548 static void DeleteCpuProfile(CpuProfile** profile_ptr) {
549 delete *profile_ptr;
550 }
551
DeleteProfilesList(List<CpuProfile * > ** list_ptr)552 static void DeleteProfilesList(List<CpuProfile*>** list_ptr) {
553 if (*list_ptr != NULL) {
554 (*list_ptr)->Iterate(DeleteCpuProfile);
555 delete *list_ptr;
556 }
557 }
558
~CpuProfilesCollection()559 CpuProfilesCollection::~CpuProfilesCollection() {
560 delete current_profiles_semaphore_;
561 current_profiles_.Iterate(DeleteCpuProfile);
562 detached_profiles_.Iterate(DeleteCpuProfile);
563 profiles_by_token_.Iterate(DeleteProfilesList);
564 code_entries_.Iterate(DeleteCodeEntry);
565 }
566
567
StartProfiling(const char * title,unsigned uid)568 bool CpuProfilesCollection::StartProfiling(const char* title, unsigned uid) {
569 ASSERT(uid > 0);
570 current_profiles_semaphore_->Wait();
571 if (current_profiles_.length() >= kMaxSimultaneousProfiles) {
572 current_profiles_semaphore_->Signal();
573 return false;
574 }
575 for (int i = 0; i < current_profiles_.length(); ++i) {
576 if (strcmp(current_profiles_[i]->title(), title) == 0) {
577 // Ignore attempts to start profile with the same title.
578 current_profiles_semaphore_->Signal();
579 return false;
580 }
581 }
582 current_profiles_.Add(new CpuProfile(title, uid));
583 current_profiles_semaphore_->Signal();
584 return true;
585 }
586
587
StartProfiling(String * title,unsigned uid)588 bool CpuProfilesCollection::StartProfiling(String* title, unsigned uid) {
589 return StartProfiling(GetName(title), uid);
590 }
591
592
StopProfiling(int security_token_id,const char * title,double actual_sampling_rate)593 CpuProfile* CpuProfilesCollection::StopProfiling(int security_token_id,
594 const char* title,
595 double actual_sampling_rate) {
596 const int title_len = StrLength(title);
597 CpuProfile* profile = NULL;
598 current_profiles_semaphore_->Wait();
599 for (int i = current_profiles_.length() - 1; i >= 0; --i) {
600 if (title_len == 0 || strcmp(current_profiles_[i]->title(), title) == 0) {
601 profile = current_profiles_.Remove(i);
602 break;
603 }
604 }
605 current_profiles_semaphore_->Signal();
606
607 if (profile != NULL) {
608 profile->CalculateTotalTicks();
609 profile->SetActualSamplingRate(actual_sampling_rate);
610 List<CpuProfile*>* unabridged_list =
611 profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
612 unabridged_list->Add(profile);
613 HashMap::Entry* entry =
614 profiles_uids_.Lookup(reinterpret_cast<void*>(profile->uid()),
615 static_cast<uint32_t>(profile->uid()),
616 true);
617 ASSERT(entry->value == NULL);
618 entry->value = reinterpret_cast<void*>(unabridged_list->length() - 1);
619 return GetProfile(security_token_id, profile->uid());
620 }
621 return NULL;
622 }
623
624
GetProfile(int security_token_id,unsigned uid)625 CpuProfile* CpuProfilesCollection::GetProfile(int security_token_id,
626 unsigned uid) {
627 int index = GetProfileIndex(uid);
628 if (index < 0) return NULL;
629 List<CpuProfile*>* unabridged_list =
630 profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
631 if (security_token_id == TokenEnumerator::kNoSecurityToken) {
632 return unabridged_list->at(index);
633 }
634 List<CpuProfile*>* list = GetProfilesList(security_token_id);
635 if (list->at(index) == NULL) {
636 (*list)[index] =
637 unabridged_list->at(index)->FilteredClone(security_token_id);
638 }
639 return list->at(index);
640 }
641
642
GetProfileIndex(unsigned uid)643 int CpuProfilesCollection::GetProfileIndex(unsigned uid) {
644 HashMap::Entry* entry = profiles_uids_.Lookup(reinterpret_cast<void*>(uid),
645 static_cast<uint32_t>(uid),
646 false);
647 return entry != NULL ?
648 static_cast<int>(reinterpret_cast<intptr_t>(entry->value)) : -1;
649 }
650
651
IsLastProfile(const char * title)652 bool CpuProfilesCollection::IsLastProfile(const char* title) {
653 // Called from VM thread, and only it can mutate the list,
654 // so no locking is needed here.
655 if (current_profiles_.length() != 1) return false;
656 return StrLength(title) == 0
657 || strcmp(current_profiles_[0]->title(), title) == 0;
658 }
659
660
RemoveProfile(CpuProfile * profile)661 void CpuProfilesCollection::RemoveProfile(CpuProfile* profile) {
662 // Called from VM thread for a completed profile.
663 unsigned uid = profile->uid();
664 int index = GetProfileIndex(uid);
665 if (index < 0) {
666 detached_profiles_.RemoveElement(profile);
667 return;
668 }
669 profiles_uids_.Remove(reinterpret_cast<void*>(uid),
670 static_cast<uint32_t>(uid));
671 // Decrement all indexes above the deleted one.
672 for (HashMap::Entry* p = profiles_uids_.Start();
673 p != NULL;
674 p = profiles_uids_.Next(p)) {
675 intptr_t p_index = reinterpret_cast<intptr_t>(p->value);
676 if (p_index > index) {
677 p->value = reinterpret_cast<void*>(p_index - 1);
678 }
679 }
680 for (int i = 0; i < profiles_by_token_.length(); ++i) {
681 List<CpuProfile*>* list = profiles_by_token_[i];
682 if (list != NULL && index < list->length()) {
683 // Move all filtered clones into detached_profiles_,
684 // so we can know that they are still in use.
685 CpuProfile* cloned_profile = list->Remove(index);
686 if (cloned_profile != NULL && cloned_profile != profile) {
687 detached_profiles_.Add(cloned_profile);
688 }
689 }
690 }
691 }
692
693
TokenToIndex(int security_token_id)694 int CpuProfilesCollection::TokenToIndex(int security_token_id) {
695 ASSERT(TokenEnumerator::kNoSecurityToken == -1);
696 return security_token_id + 1; // kNoSecurityToken -> 0, 0 -> 1, ...
697 }
698
699
GetProfilesList(int security_token_id)700 List<CpuProfile*>* CpuProfilesCollection::GetProfilesList(
701 int security_token_id) {
702 const int index = TokenToIndex(security_token_id);
703 const int lists_to_add = index - profiles_by_token_.length() + 1;
704 if (lists_to_add > 0) profiles_by_token_.AddBlock(NULL, lists_to_add);
705 List<CpuProfile*>* unabridged_list =
706 profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
707 const int current_count = unabridged_list->length();
708 if (profiles_by_token_[index] == NULL) {
709 profiles_by_token_[index] = new List<CpuProfile*>(current_count);
710 }
711 List<CpuProfile*>* list = profiles_by_token_[index];
712 const int profiles_to_add = current_count - list->length();
713 if (profiles_to_add > 0) list->AddBlock(NULL, profiles_to_add);
714 return list;
715 }
716
717
Profiles(int security_token_id)718 List<CpuProfile*>* CpuProfilesCollection::Profiles(int security_token_id) {
719 List<CpuProfile*>* unabridged_list =
720 profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
721 if (security_token_id == TokenEnumerator::kNoSecurityToken) {
722 return unabridged_list;
723 }
724 List<CpuProfile*>* list = GetProfilesList(security_token_id);
725 const int current_count = unabridged_list->length();
726 for (int i = 0; i < current_count; ++i) {
727 if (list->at(i) == NULL) {
728 (*list)[i] = unabridged_list->at(i)->FilteredClone(security_token_id);
729 }
730 }
731 return list;
732 }
733
734
NewCodeEntry(Logger::LogEventsAndTags tag,String * name,String * resource_name,int line_number)735 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
736 String* name,
737 String* resource_name,
738 int line_number) {
739 CodeEntry* entry = new CodeEntry(tag,
740 CodeEntry::kEmptyNamePrefix,
741 GetFunctionName(name),
742 GetName(resource_name),
743 line_number,
744 TokenEnumerator::kNoSecurityToken);
745 code_entries_.Add(entry);
746 return entry;
747 }
748
749
NewCodeEntry(Logger::LogEventsAndTags tag,const char * name)750 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
751 const char* name) {
752 CodeEntry* entry = new CodeEntry(tag,
753 CodeEntry::kEmptyNamePrefix,
754 GetFunctionName(name),
755 "",
756 v8::CpuProfileNode::kNoLineNumberInfo,
757 TokenEnumerator::kNoSecurityToken);
758 code_entries_.Add(entry);
759 return entry;
760 }
761
762
NewCodeEntry(Logger::LogEventsAndTags tag,const char * name_prefix,String * name)763 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
764 const char* name_prefix,
765 String* name) {
766 CodeEntry* entry = new CodeEntry(tag,
767 name_prefix,
768 GetName(name),
769 "",
770 v8::CpuProfileNode::kNoLineNumberInfo,
771 TokenEnumerator::kInheritsSecurityToken);
772 code_entries_.Add(entry);
773 return entry;
774 }
775
776
NewCodeEntry(Logger::LogEventsAndTags tag,int args_count)777 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
778 int args_count) {
779 CodeEntry* entry = new CodeEntry(tag,
780 "args_count: ",
781 GetName(args_count),
782 "",
783 v8::CpuProfileNode::kNoLineNumberInfo,
784 TokenEnumerator::kInheritsSecurityToken);
785 code_entries_.Add(entry);
786 return entry;
787 }
788
789
AddPathToCurrentProfiles(const Vector<CodeEntry * > & path)790 void CpuProfilesCollection::AddPathToCurrentProfiles(
791 const Vector<CodeEntry*>& path) {
792 // As starting / stopping profiles is rare relatively to this
793 // method, we don't bother minimizing the duration of lock holding,
794 // e.g. copying contents of the list to a local vector.
795 current_profiles_semaphore_->Wait();
796 for (int i = 0; i < current_profiles_.length(); ++i) {
797 current_profiles_[i]->AddPath(path);
798 }
799 current_profiles_semaphore_->Signal();
800 }
801
802
Tick()803 void SampleRateCalculator::Tick() {
804 if (--wall_time_query_countdown_ == 0)
805 UpdateMeasurements(OS::TimeCurrentMillis());
806 }
807
808
UpdateMeasurements(double current_time)809 void SampleRateCalculator::UpdateMeasurements(double current_time) {
810 if (measurements_count_++ != 0) {
811 const double measured_ticks_per_ms =
812 (kWallTimeQueryIntervalMs * ticks_per_ms_) /
813 (current_time - last_wall_time_);
814 // Update the average value.
815 ticks_per_ms_ +=
816 (measured_ticks_per_ms - ticks_per_ms_) / measurements_count_;
817 // Update the externally accessible result.
818 result_ = static_cast<AtomicWord>(ticks_per_ms_ * kResultScale);
819 }
820 last_wall_time_ = current_time;
821 wall_time_query_countdown_ =
822 static_cast<unsigned>(kWallTimeQueryIntervalMs * ticks_per_ms_);
823 }
824
825
826 const char* const ProfileGenerator::kAnonymousFunctionName =
827 "(anonymous function)";
828 const char* const ProfileGenerator::kProgramEntryName =
829 "(program)";
830 const char* const ProfileGenerator::kGarbageCollectorEntryName =
831 "(garbage collector)";
832
833
ProfileGenerator(CpuProfilesCollection * profiles)834 ProfileGenerator::ProfileGenerator(CpuProfilesCollection* profiles)
835 : profiles_(profiles),
836 program_entry_(
837 profiles->NewCodeEntry(Logger::FUNCTION_TAG, kProgramEntryName)),
838 gc_entry_(
839 profiles->NewCodeEntry(Logger::BUILTIN_TAG,
840 kGarbageCollectorEntryName)) {
841 }
842
843
RecordTickSample(const TickSample & sample)844 void ProfileGenerator::RecordTickSample(const TickSample& sample) {
845 // Allocate space for stack frames + pc + function + vm-state.
846 ScopedVector<CodeEntry*> entries(sample.frames_count + 3);
847 // As actual number of decoded code entries may vary, initialize
848 // entries vector with NULL values.
849 CodeEntry** entry = entries.start();
850 memset(entry, 0, entries.length() * sizeof(*entry));
851 if (sample.pc != NULL) {
852 *entry++ = code_map_.FindEntry(sample.pc);
853
854 if (sample.has_external_callback) {
855 // Don't use PC when in external callback code, as it can point
856 // inside callback's code, and we will erroneously report
857 // that a callback calls itself.
858 *(entries.start()) = NULL;
859 *entry++ = code_map_.FindEntry(sample.external_callback);
860 } else if (sample.tos != NULL) {
861 // Find out, if top of stack was pointing inside a JS function
862 // meaning that we have encountered a frameless invocation.
863 *entry = code_map_.FindEntry(sample.tos);
864 if (*entry != NULL && !(*entry)->is_js_function()) {
865 *entry = NULL;
866 }
867 entry++;
868 }
869
870 for (const Address *stack_pos = sample.stack,
871 *stack_end = stack_pos + sample.frames_count;
872 stack_pos != stack_end;
873 ++stack_pos) {
874 *entry++ = code_map_.FindEntry(*stack_pos);
875 }
876 }
877
878 if (FLAG_prof_browser_mode) {
879 bool no_symbolized_entries = true;
880 for (CodeEntry** e = entries.start(); e != entry; ++e) {
881 if (*e != NULL) {
882 no_symbolized_entries = false;
883 break;
884 }
885 }
886 // If no frames were symbolized, put the VM state entry in.
887 if (no_symbolized_entries) {
888 *entry++ = EntryForVMState(sample.state);
889 }
890 }
891
892 profiles_->AddPathToCurrentProfiles(entries);
893 }
894
895
Init(int child_index,Type type,const char * name,HeapEntry * to)896 void HeapGraphEdge::Init(
897 int child_index, Type type, const char* name, HeapEntry* to) {
898 ASSERT(type == kContextVariable
899 || type == kProperty
900 || type == kInternal
901 || type == kShortcut);
902 child_index_ = child_index;
903 type_ = type;
904 name_ = name;
905 to_ = to;
906 }
907
908
Init(int child_index,Type type,int index,HeapEntry * to)909 void HeapGraphEdge::Init(int child_index, Type type, int index, HeapEntry* to) {
910 ASSERT(type == kElement || type == kHidden);
911 child_index_ = child_index;
912 type_ = type;
913 index_ = index;
914 to_ = to;
915 }
916
917
Init(int child_index,int index,HeapEntry * to)918 void HeapGraphEdge::Init(int child_index, int index, HeapEntry* to) {
919 Init(child_index, kElement, index, to);
920 }
921
922
From()923 HeapEntry* HeapGraphEdge::From() {
924 return reinterpret_cast<HeapEntry*>(this - child_index_) - 1;
925 }
926
927
Init(HeapSnapshot * snapshot,Type type,const char * name,uint64_t id,int self_size,int children_count,int retainers_count)928 void HeapEntry::Init(HeapSnapshot* snapshot,
929 Type type,
930 const char* name,
931 uint64_t id,
932 int self_size,
933 int children_count,
934 int retainers_count) {
935 snapshot_ = snapshot;
936 type_ = type;
937 painted_ = kUnpainted;
938 name_ = name;
939 self_size_ = self_size;
940 retained_size_ = 0;
941 children_count_ = children_count;
942 retainers_count_ = retainers_count;
943 dominator_ = NULL;
944
945 union {
946 uint64_t set_id;
947 Id stored_id;
948 } id_adaptor = {id};
949 id_ = id_adaptor.stored_id;
950 }
951
952
SetNamedReference(HeapGraphEdge::Type type,int child_index,const char * name,HeapEntry * entry,int retainer_index)953 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
954 int child_index,
955 const char* name,
956 HeapEntry* entry,
957 int retainer_index) {
958 children_arr()[child_index].Init(child_index, type, name, entry);
959 entry->retainers_arr()[retainer_index] = children_arr() + child_index;
960 }
961
962
SetIndexedReference(HeapGraphEdge::Type type,int child_index,int index,HeapEntry * entry,int retainer_index)963 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
964 int child_index,
965 int index,
966 HeapEntry* entry,
967 int retainer_index) {
968 children_arr()[child_index].Init(child_index, type, index, entry);
969 entry->retainers_arr()[retainer_index] = children_arr() + child_index;
970 }
971
972
SetUnidirElementReference(int child_index,int index,HeapEntry * entry)973 void HeapEntry::SetUnidirElementReference(
974 int child_index, int index, HeapEntry* entry) {
975 children_arr()[child_index].Init(child_index, index, entry);
976 }
977
978
RetainedSize(bool exact)979 int HeapEntry::RetainedSize(bool exact) {
980 if (exact && (retained_size_ & kExactRetainedSizeTag) == 0) {
981 CalculateExactRetainedSize();
982 }
983 return retained_size_ & (~kExactRetainedSizeTag);
984 }
985
986
987 template<class Visitor>
ApplyAndPaintAllReachable(Visitor * visitor)988 void HeapEntry::ApplyAndPaintAllReachable(Visitor* visitor) {
989 List<HeapEntry*> list(10);
990 list.Add(this);
991 this->paint_reachable();
992 visitor->Apply(this);
993 while (!list.is_empty()) {
994 HeapEntry* entry = list.RemoveLast();
995 Vector<HeapGraphEdge> children = entry->children();
996 for (int i = 0; i < children.length(); ++i) {
997 if (children[i].type() == HeapGraphEdge::kShortcut) continue;
998 HeapEntry* child = children[i].to();
999 if (!child->painted_reachable()) {
1000 list.Add(child);
1001 child->paint_reachable();
1002 visitor->Apply(child);
1003 }
1004 }
1005 }
1006 }
1007
1008
1009 class NullClass {
1010 public:
Apply(HeapEntry * entry)1011 void Apply(HeapEntry* entry) { }
1012 };
1013
PaintAllReachable()1014 void HeapEntry::PaintAllReachable() {
1015 NullClass null;
1016 ApplyAndPaintAllReachable(&null);
1017 }
1018
1019
Print(int max_depth,int indent)1020 void HeapEntry::Print(int max_depth, int indent) {
1021 OS::Print("%6d %6d [%llu] ", self_size(), RetainedSize(false), id());
1022 if (type() != kString) {
1023 OS::Print("%s %.40s\n", TypeAsString(), name_);
1024 } else {
1025 OS::Print("\"");
1026 const char* c = name_;
1027 while (*c && (c - name_) <= 40) {
1028 if (*c != '\n')
1029 OS::Print("%c", *c);
1030 else
1031 OS::Print("\\n");
1032 ++c;
1033 }
1034 OS::Print("\"\n");
1035 }
1036 if (--max_depth == 0) return;
1037 Vector<HeapGraphEdge> ch = children();
1038 for (int i = 0; i < ch.length(); ++i) {
1039 HeapGraphEdge& edge = ch[i];
1040 switch (edge.type()) {
1041 case HeapGraphEdge::kContextVariable:
1042 OS::Print(" %*c #%s: ", indent, ' ', edge.name());
1043 break;
1044 case HeapGraphEdge::kElement:
1045 OS::Print(" %*c %d: ", indent, ' ', edge.index());
1046 break;
1047 case HeapGraphEdge::kInternal:
1048 OS::Print(" %*c $%s: ", indent, ' ', edge.name());
1049 break;
1050 case HeapGraphEdge::kProperty:
1051 OS::Print(" %*c %s: ", indent, ' ', edge.name());
1052 break;
1053 case HeapGraphEdge::kHidden:
1054 OS::Print(" %*c $%d: ", indent, ' ', edge.index());
1055 break;
1056 case HeapGraphEdge::kShortcut:
1057 OS::Print(" %*c ^%s: ", indent, ' ', edge.name());
1058 break;
1059 default:
1060 OS::Print("!!! unknown edge type: %d ", edge.type());
1061 }
1062 edge.to()->Print(max_depth, indent + 2);
1063 }
1064 }
1065
1066
TypeAsString()1067 const char* HeapEntry::TypeAsString() {
1068 switch (type()) {
1069 case kHidden: return "/hidden/";
1070 case kObject: return "/object/";
1071 case kClosure: return "/closure/";
1072 case kString: return "/string/";
1073 case kCode: return "/code/";
1074 case kArray: return "/array/";
1075 case kRegExp: return "/regexp/";
1076 case kHeapNumber: return "/number/";
1077 case kNative: return "/native/";
1078 default: return "???";
1079 }
1080 }
1081
1082
EntriesSize(int entries_count,int children_count,int retainers_count)1083 int HeapEntry::EntriesSize(int entries_count,
1084 int children_count,
1085 int retainers_count) {
1086 return sizeof(HeapEntry) * entries_count // NOLINT
1087 + sizeof(HeapGraphEdge) * children_count // NOLINT
1088 + sizeof(HeapGraphEdge*) * retainers_count; // NOLINT
1089 }
1090
1091
1092 class RetainedSizeCalculator {
1093 public:
RetainedSizeCalculator()1094 RetainedSizeCalculator()
1095 : retained_size_(0) {
1096 }
1097
reained_size() const1098 int reained_size() const { return retained_size_; }
1099
Apply(HeapEntry ** entry_ptr)1100 void Apply(HeapEntry** entry_ptr) {
1101 if ((*entry_ptr)->painted_reachable()) {
1102 retained_size_ += (*entry_ptr)->self_size();
1103 }
1104 }
1105
1106 private:
1107 int retained_size_;
1108 };
1109
CalculateExactRetainedSize()1110 void HeapEntry::CalculateExactRetainedSize() {
1111 // To calculate retained size, first we paint all reachable nodes in
1112 // one color, then we paint (or re-paint) all nodes reachable from
1113 // other nodes with a different color. Then we sum up self sizes of
1114 // nodes painted with the first color.
1115 snapshot()->ClearPaint();
1116 PaintAllReachable();
1117
1118 List<HeapEntry*> list(10);
1119 HeapEntry* root = snapshot()->root();
1120 if (this != root) {
1121 list.Add(root);
1122 root->paint_reachable_from_others();
1123 }
1124 while (!list.is_empty()) {
1125 HeapEntry* curr = list.RemoveLast();
1126 Vector<HeapGraphEdge> children = curr->children();
1127 for (int i = 0; i < children.length(); ++i) {
1128 if (children[i].type() == HeapGraphEdge::kShortcut) continue;
1129 HeapEntry* child = children[i].to();
1130 if (child != this && child->not_painted_reachable_from_others()) {
1131 list.Add(child);
1132 child->paint_reachable_from_others();
1133 }
1134 }
1135 }
1136
1137 RetainedSizeCalculator ret_size_calc;
1138 snapshot()->IterateEntries(&ret_size_calc);
1139 retained_size_ = ret_size_calc.reained_size();
1140 ASSERT((retained_size_ & kExactRetainedSizeTag) == 0);
1141 retained_size_ |= kExactRetainedSizeTag;
1142 }
1143
1144
1145 // It is very important to keep objects that form a heap snapshot
1146 // as small as possible.
1147 namespace { // Avoid littering the global namespace.
1148
1149 template <size_t ptr_size> struct SnapshotSizeConstants;
1150
1151 template <> struct SnapshotSizeConstants<4> {
1152 static const int kExpectedHeapGraphEdgeSize = 12;
1153 static const int kExpectedHeapEntrySize = 36;
1154 };
1155
1156 template <> struct SnapshotSizeConstants<8> {
1157 static const int kExpectedHeapGraphEdgeSize = 24;
1158 static const int kExpectedHeapEntrySize = 48;
1159 };
1160
1161 } // namespace
1162
HeapSnapshot(HeapSnapshotsCollection * collection,HeapSnapshot::Type type,const char * title,unsigned uid)1163 HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection,
1164 HeapSnapshot::Type type,
1165 const char* title,
1166 unsigned uid)
1167 : collection_(collection),
1168 type_(type),
1169 title_(title),
1170 uid_(uid),
1171 root_entry_(NULL),
1172 gc_roots_entry_(NULL),
1173 natives_root_entry_(NULL),
1174 raw_entries_(NULL),
1175 entries_sorted_(false) {
1176 STATIC_ASSERT(
1177 sizeof(HeapGraphEdge) ==
1178 SnapshotSizeConstants<sizeof(void*)>::kExpectedHeapGraphEdgeSize); // NOLINT
1179 STATIC_ASSERT(
1180 sizeof(HeapEntry) ==
1181 SnapshotSizeConstants<sizeof(void*)>::kExpectedHeapEntrySize); // NOLINT
1182 }
1183
~HeapSnapshot()1184 HeapSnapshot::~HeapSnapshot() {
1185 DeleteArray(raw_entries_);
1186 }
1187
1188
Delete()1189 void HeapSnapshot::Delete() {
1190 collection_->RemoveSnapshot(this);
1191 delete this;
1192 }
1193
1194
AllocateEntries(int entries_count,int children_count,int retainers_count)1195 void HeapSnapshot::AllocateEntries(int entries_count,
1196 int children_count,
1197 int retainers_count) {
1198 ASSERT(raw_entries_ == NULL);
1199 raw_entries_ = NewArray<char>(
1200 HeapEntry::EntriesSize(entries_count, children_count, retainers_count));
1201 #ifdef DEBUG
1202 raw_entries_size_ =
1203 HeapEntry::EntriesSize(entries_count, children_count, retainers_count);
1204 #endif
1205 }
1206
1207
HeapEntryClearPaint(HeapEntry ** entry_ptr)1208 static void HeapEntryClearPaint(HeapEntry** entry_ptr) {
1209 (*entry_ptr)->clear_paint();
1210 }
1211
ClearPaint()1212 void HeapSnapshot::ClearPaint() {
1213 entries_.Iterate(HeapEntryClearPaint);
1214 }
1215
1216
AddRootEntry(int children_count)1217 HeapEntry* HeapSnapshot::AddRootEntry(int children_count) {
1218 ASSERT(root_entry_ == NULL);
1219 return (root_entry_ = AddEntry(HeapEntry::kObject,
1220 "",
1221 HeapObjectsMap::kInternalRootObjectId,
1222 0,
1223 children_count,
1224 0));
1225 }
1226
1227
AddGcRootsEntry(int children_count,int retainers_count)1228 HeapEntry* HeapSnapshot::AddGcRootsEntry(int children_count,
1229 int retainers_count) {
1230 ASSERT(gc_roots_entry_ == NULL);
1231 return (gc_roots_entry_ = AddEntry(HeapEntry::kObject,
1232 "(GC roots)",
1233 HeapObjectsMap::kGcRootsObjectId,
1234 0,
1235 children_count,
1236 retainers_count));
1237 }
1238
1239
AddNativesRootEntry(int children_count,int retainers_count)1240 HeapEntry* HeapSnapshot::AddNativesRootEntry(int children_count,
1241 int retainers_count) {
1242 ASSERT(natives_root_entry_ == NULL);
1243 return (natives_root_entry_ = AddEntry(
1244 HeapEntry::kObject,
1245 "(Native objects)",
1246 HeapObjectsMap::kNativesRootObjectId,
1247 0,
1248 children_count,
1249 retainers_count));
1250 }
1251
1252
AddEntry(HeapEntry::Type type,const char * name,uint64_t id,int size,int children_count,int retainers_count)1253 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
1254 const char* name,
1255 uint64_t id,
1256 int size,
1257 int children_count,
1258 int retainers_count) {
1259 HeapEntry* entry = GetNextEntryToInit();
1260 entry->Init(this, type, name, id, size, children_count, retainers_count);
1261 return entry;
1262 }
1263
1264
SetDominatorsToSelf()1265 void HeapSnapshot::SetDominatorsToSelf() {
1266 for (int i = 0; i < entries_.length(); ++i) {
1267 HeapEntry* entry = entries_[i];
1268 if (entry->dominator() == NULL) entry->set_dominator(entry);
1269 }
1270 }
1271
1272
GetNextEntryToInit()1273 HeapEntry* HeapSnapshot::GetNextEntryToInit() {
1274 if (entries_.length() > 0) {
1275 HeapEntry* last_entry = entries_.last();
1276 entries_.Add(reinterpret_cast<HeapEntry*>(
1277 reinterpret_cast<char*>(last_entry) + last_entry->EntrySize()));
1278 } else {
1279 entries_.Add(reinterpret_cast<HeapEntry*>(raw_entries_));
1280 }
1281 ASSERT(reinterpret_cast<char*>(entries_.last()) <
1282 (raw_entries_ + raw_entries_size_));
1283 return entries_.last();
1284 }
1285
1286
GetEntryById(uint64_t id)1287 HeapEntry* HeapSnapshot::GetEntryById(uint64_t id) {
1288 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
1289
1290 // Perform a binary search by id.
1291 int low = 0;
1292 int high = entries_by_id->length() - 1;
1293 while (low <= high) {
1294 int mid =
1295 (static_cast<unsigned int>(low) + static_cast<unsigned int>(high)) >> 1;
1296 uint64_t mid_id = entries_by_id->at(mid)->id();
1297 if (mid_id > id)
1298 high = mid - 1;
1299 else if (mid_id < id)
1300 low = mid + 1;
1301 else
1302 return entries_by_id->at(mid);
1303 }
1304 return NULL;
1305 }
1306
1307
1308 template<class T>
SortByIds(const T * entry1_ptr,const T * entry2_ptr)1309 static int SortByIds(const T* entry1_ptr,
1310 const T* entry2_ptr) {
1311 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
1312 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
1313 }
1314
GetSortedEntriesList()1315 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
1316 if (!entries_sorted_) {
1317 entries_.Sort(SortByIds);
1318 entries_sorted_ = true;
1319 }
1320 return &entries_;
1321 }
1322
1323
Print(int max_depth)1324 void HeapSnapshot::Print(int max_depth) {
1325 root()->Print(max_depth, 0);
1326 }
1327
1328
1329 // We split IDs on evens for embedder objects (see
1330 // HeapObjectsMap::GenerateId) and odds for native objects.
1331 const uint64_t HeapObjectsMap::kInternalRootObjectId = 1;
1332 const uint64_t HeapObjectsMap::kGcRootsObjectId = 3;
1333 const uint64_t HeapObjectsMap::kNativesRootObjectId = 5;
1334 // Increase kFirstAvailableObjectId if new 'special' objects appear.
1335 const uint64_t HeapObjectsMap::kFirstAvailableObjectId = 7;
1336
HeapObjectsMap()1337 HeapObjectsMap::HeapObjectsMap()
1338 : initial_fill_mode_(true),
1339 next_id_(kFirstAvailableObjectId),
1340 entries_map_(AddressesMatch),
1341 entries_(new List<EntryInfo>()) { }
1342
1343
~HeapObjectsMap()1344 HeapObjectsMap::~HeapObjectsMap() {
1345 delete entries_;
1346 }
1347
1348
SnapshotGenerationFinished()1349 void HeapObjectsMap::SnapshotGenerationFinished() {
1350 initial_fill_mode_ = false;
1351 RemoveDeadEntries();
1352 }
1353
1354
FindObject(Address addr)1355 uint64_t HeapObjectsMap::FindObject(Address addr) {
1356 if (!initial_fill_mode_) {
1357 uint64_t existing = FindEntry(addr);
1358 if (existing != 0) return existing;
1359 }
1360 uint64_t id = next_id_;
1361 next_id_ += 2;
1362 AddEntry(addr, id);
1363 return id;
1364 }
1365
1366
MoveObject(Address from,Address to)1367 void HeapObjectsMap::MoveObject(Address from, Address to) {
1368 if (from == to) return;
1369 HashMap::Entry* entry = entries_map_.Lookup(from, AddressHash(from), false);
1370 if (entry != NULL) {
1371 void* value = entry->value;
1372 entries_map_.Remove(from, AddressHash(from));
1373 entry = entries_map_.Lookup(to, AddressHash(to), true);
1374 // We can have an entry at the new location, it is OK, as GC can overwrite
1375 // dead objects with alive objects being moved.
1376 entry->value = value;
1377 }
1378 }
1379
1380
AddEntry(Address addr,uint64_t id)1381 void HeapObjectsMap::AddEntry(Address addr, uint64_t id) {
1382 HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
1383 ASSERT(entry->value == NULL);
1384 entry->value = reinterpret_cast<void*>(entries_->length());
1385 entries_->Add(EntryInfo(id));
1386 }
1387
1388
FindEntry(Address addr)1389 uint64_t HeapObjectsMap::FindEntry(Address addr) {
1390 HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
1391 if (entry != NULL) {
1392 int entry_index =
1393 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
1394 EntryInfo& entry_info = entries_->at(entry_index);
1395 entry_info.accessed = true;
1396 return entry_info.id;
1397 } else {
1398 return 0;
1399 }
1400 }
1401
1402
RemoveDeadEntries()1403 void HeapObjectsMap::RemoveDeadEntries() {
1404 List<EntryInfo>* new_entries = new List<EntryInfo>();
1405 List<void*> dead_entries;
1406 for (HashMap::Entry* entry = entries_map_.Start();
1407 entry != NULL;
1408 entry = entries_map_.Next(entry)) {
1409 int entry_index =
1410 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
1411 EntryInfo& entry_info = entries_->at(entry_index);
1412 if (entry_info.accessed) {
1413 entry->value = reinterpret_cast<void*>(new_entries->length());
1414 new_entries->Add(EntryInfo(entry_info.id, false));
1415 } else {
1416 dead_entries.Add(entry->key);
1417 }
1418 }
1419 for (int i = 0; i < dead_entries.length(); ++i) {
1420 void* raw_entry = dead_entries[i];
1421 entries_map_.Remove(
1422 raw_entry, AddressHash(reinterpret_cast<Address>(raw_entry)));
1423 }
1424 delete entries_;
1425 entries_ = new_entries;
1426 }
1427
1428
GenerateId(v8::RetainedObjectInfo * info)1429 uint64_t HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
1430 uint64_t id = static_cast<uint64_t>(info->GetHash());
1431 const char* label = info->GetLabel();
1432 id ^= HashSequentialString(label, static_cast<int>(strlen(label)));
1433 intptr_t element_count = info->GetElementCount();
1434 if (element_count != -1)
1435 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count));
1436 return id << 1;
1437 }
1438
1439
HeapSnapshotsCollection()1440 HeapSnapshotsCollection::HeapSnapshotsCollection()
1441 : is_tracking_objects_(false),
1442 snapshots_uids_(HeapSnapshotsMatch),
1443 token_enumerator_(new TokenEnumerator()) {
1444 }
1445
1446
DeleteHeapSnapshot(HeapSnapshot ** snapshot_ptr)1447 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) {
1448 delete *snapshot_ptr;
1449 }
1450
1451
~HeapSnapshotsCollection()1452 HeapSnapshotsCollection::~HeapSnapshotsCollection() {
1453 delete token_enumerator_;
1454 snapshots_.Iterate(DeleteHeapSnapshot);
1455 }
1456
1457
NewSnapshot(HeapSnapshot::Type type,const char * name,unsigned uid)1458 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(HeapSnapshot::Type type,
1459 const char* name,
1460 unsigned uid) {
1461 is_tracking_objects_ = true; // Start watching for heap objects moves.
1462 return new HeapSnapshot(this, type, name, uid);
1463 }
1464
1465
SnapshotGenerationFinished(HeapSnapshot * snapshot)1466 void HeapSnapshotsCollection::SnapshotGenerationFinished(
1467 HeapSnapshot* snapshot) {
1468 ids_.SnapshotGenerationFinished();
1469 if (snapshot != NULL) {
1470 snapshots_.Add(snapshot);
1471 HashMap::Entry* entry =
1472 snapshots_uids_.Lookup(reinterpret_cast<void*>(snapshot->uid()),
1473 static_cast<uint32_t>(snapshot->uid()),
1474 true);
1475 ASSERT(entry->value == NULL);
1476 entry->value = snapshot;
1477 }
1478 }
1479
1480
GetSnapshot(unsigned uid)1481 HeapSnapshot* HeapSnapshotsCollection::GetSnapshot(unsigned uid) {
1482 HashMap::Entry* entry = snapshots_uids_.Lookup(reinterpret_cast<void*>(uid),
1483 static_cast<uint32_t>(uid),
1484 false);
1485 return entry != NULL ? reinterpret_cast<HeapSnapshot*>(entry->value) : NULL;
1486 }
1487
1488
RemoveSnapshot(HeapSnapshot * snapshot)1489 void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) {
1490 snapshots_.RemoveElement(snapshot);
1491 unsigned uid = snapshot->uid();
1492 snapshots_uids_.Remove(reinterpret_cast<void*>(uid),
1493 static_cast<uint32_t>(uid));
1494 }
1495
1496
1497 HeapEntry *const HeapEntriesMap::kHeapEntryPlaceholder =
1498 reinterpret_cast<HeapEntry*>(1);
1499
HeapEntriesMap()1500 HeapEntriesMap::HeapEntriesMap()
1501 : entries_(HeapThingsMatch),
1502 entries_count_(0),
1503 total_children_count_(0),
1504 total_retainers_count_(0) {
1505 }
1506
1507
~HeapEntriesMap()1508 HeapEntriesMap::~HeapEntriesMap() {
1509 for (HashMap::Entry* p = entries_.Start(); p != NULL; p = entries_.Next(p)) {
1510 delete reinterpret_cast<EntryInfo*>(p->value);
1511 }
1512 }
1513
1514
AllocateEntries()1515 void HeapEntriesMap::AllocateEntries() {
1516 for (HashMap::Entry* p = entries_.Start();
1517 p != NULL;
1518 p = entries_.Next(p)) {
1519 EntryInfo* entry_info = reinterpret_cast<EntryInfo*>(p->value);
1520 entry_info->entry = entry_info->allocator->AllocateEntry(
1521 p->key,
1522 entry_info->children_count,
1523 entry_info->retainers_count);
1524 ASSERT(entry_info->entry != NULL);
1525 ASSERT(entry_info->entry != kHeapEntryPlaceholder);
1526 entry_info->children_count = 0;
1527 entry_info->retainers_count = 0;
1528 }
1529 }
1530
1531
Map(HeapThing thing)1532 HeapEntry* HeapEntriesMap::Map(HeapThing thing) {
1533 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
1534 if (cache_entry != NULL) {
1535 EntryInfo* entry_info = reinterpret_cast<EntryInfo*>(cache_entry->value);
1536 return entry_info->entry;
1537 } else {
1538 return NULL;
1539 }
1540 }
1541
1542
Pair(HeapThing thing,HeapEntriesAllocator * allocator,HeapEntry * entry)1543 void HeapEntriesMap::Pair(
1544 HeapThing thing, HeapEntriesAllocator* allocator, HeapEntry* entry) {
1545 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
1546 ASSERT(cache_entry->value == NULL);
1547 cache_entry->value = new EntryInfo(entry, allocator);
1548 ++entries_count_;
1549 }
1550
1551
CountReference(HeapThing from,HeapThing to,int * prev_children_count,int * prev_retainers_count)1552 void HeapEntriesMap::CountReference(HeapThing from, HeapThing to,
1553 int* prev_children_count,
1554 int* prev_retainers_count) {
1555 HashMap::Entry* from_cache_entry = entries_.Lookup(from, Hash(from), false);
1556 HashMap::Entry* to_cache_entry = entries_.Lookup(to, Hash(to), false);
1557 ASSERT(from_cache_entry != NULL);
1558 ASSERT(to_cache_entry != NULL);
1559 EntryInfo* from_entry_info =
1560 reinterpret_cast<EntryInfo*>(from_cache_entry->value);
1561 EntryInfo* to_entry_info =
1562 reinterpret_cast<EntryInfo*>(to_cache_entry->value);
1563 if (prev_children_count)
1564 *prev_children_count = from_entry_info->children_count;
1565 if (prev_retainers_count)
1566 *prev_retainers_count = to_entry_info->retainers_count;
1567 ++from_entry_info->children_count;
1568 ++to_entry_info->retainers_count;
1569 ++total_children_count_;
1570 ++total_retainers_count_;
1571 }
1572
1573
HeapObjectsSet()1574 HeapObjectsSet::HeapObjectsSet()
1575 : entries_(HeapEntriesMap::HeapThingsMatch) {
1576 }
1577
1578
Clear()1579 void HeapObjectsSet::Clear() {
1580 entries_.Clear();
1581 }
1582
1583
Contains(Object * obj)1584 bool HeapObjectsSet::Contains(Object* obj) {
1585 if (!obj->IsHeapObject()) return false;
1586 HeapObject* object = HeapObject::cast(obj);
1587 HashMap::Entry* cache_entry =
1588 entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
1589 return cache_entry != NULL;
1590 }
1591
1592
Insert(Object * obj)1593 void HeapObjectsSet::Insert(Object* obj) {
1594 if (!obj->IsHeapObject()) return;
1595 HeapObject* object = HeapObject::cast(obj);
1596 HashMap::Entry* cache_entry =
1597 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
1598 if (cache_entry->value == NULL) {
1599 cache_entry->value = HeapEntriesMap::kHeapEntryPlaceholder;
1600 }
1601 }
1602
1603
1604 HeapObject *const V8HeapExplorer::kInternalRootObject =
1605 reinterpret_cast<HeapObject*>(
1606 static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
1607 HeapObject *const V8HeapExplorer::kGcRootsObject =
1608 reinterpret_cast<HeapObject*>(
1609 static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
1610
1611
V8HeapExplorer(HeapSnapshot * snapshot,SnapshottingProgressReportingInterface * progress)1612 V8HeapExplorer::V8HeapExplorer(
1613 HeapSnapshot* snapshot,
1614 SnapshottingProgressReportingInterface* progress)
1615 : snapshot_(snapshot),
1616 collection_(snapshot_->collection()),
1617 progress_(progress),
1618 filler_(NULL) {
1619 }
1620
1621
~V8HeapExplorer()1622 V8HeapExplorer::~V8HeapExplorer() {
1623 }
1624
1625
AllocateEntry(HeapThing ptr,int children_count,int retainers_count)1626 HeapEntry* V8HeapExplorer::AllocateEntry(
1627 HeapThing ptr, int children_count, int retainers_count) {
1628 return AddEntry(
1629 reinterpret_cast<HeapObject*>(ptr), children_count, retainers_count);
1630 }
1631
1632
AddEntry(HeapObject * object,int children_count,int retainers_count)1633 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
1634 int children_count,
1635 int retainers_count) {
1636 if (object == kInternalRootObject) {
1637 ASSERT(retainers_count == 0);
1638 return snapshot_->AddRootEntry(children_count);
1639 } else if (object == kGcRootsObject) {
1640 return snapshot_->AddGcRootsEntry(children_count, retainers_count);
1641 } else if (object->IsJSFunction()) {
1642 JSFunction* func = JSFunction::cast(object);
1643 SharedFunctionInfo* shared = func->shared();
1644 return AddEntry(object,
1645 HeapEntry::kClosure,
1646 collection_->names()->GetName(String::cast(shared->name())),
1647 children_count,
1648 retainers_count);
1649 } else if (object->IsJSRegExp()) {
1650 JSRegExp* re = JSRegExp::cast(object);
1651 return AddEntry(object,
1652 HeapEntry::kRegExp,
1653 collection_->names()->GetName(re->Pattern()),
1654 children_count,
1655 retainers_count);
1656 } else if (object->IsJSObject()) {
1657 return AddEntry(object,
1658 HeapEntry::kObject,
1659 collection_->names()->GetName(
1660 GetConstructorNameForHeapProfile(
1661 JSObject::cast(object))),
1662 children_count,
1663 retainers_count);
1664 } else if (object->IsString()) {
1665 return AddEntry(object,
1666 HeapEntry::kString,
1667 collection_->names()->GetName(String::cast(object)),
1668 children_count,
1669 retainers_count);
1670 } else if (object->IsCode()) {
1671 return AddEntry(object,
1672 HeapEntry::kCode,
1673 "",
1674 children_count,
1675 retainers_count);
1676 } else if (object->IsSharedFunctionInfo()) {
1677 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
1678 return AddEntry(object,
1679 HeapEntry::kCode,
1680 collection_->names()->GetName(String::cast(shared->name())),
1681 children_count,
1682 retainers_count);
1683 } else if (object->IsScript()) {
1684 Script* script = Script::cast(object);
1685 return AddEntry(object,
1686 HeapEntry::kCode,
1687 script->name()->IsString() ?
1688 collection_->names()->GetName(
1689 String::cast(script->name()))
1690 : "",
1691 children_count,
1692 retainers_count);
1693 } else if (object->IsFixedArray() || object->IsByteArray()) {
1694 return AddEntry(object,
1695 HeapEntry::kArray,
1696 "",
1697 children_count,
1698 retainers_count);
1699 } else if (object->IsHeapNumber()) {
1700 return AddEntry(object,
1701 HeapEntry::kHeapNumber,
1702 "number",
1703 children_count,
1704 retainers_count);
1705 }
1706 return AddEntry(object,
1707 HeapEntry::kHidden,
1708 GetSystemEntryName(object),
1709 children_count,
1710 retainers_count);
1711 }
1712
1713
AddEntry(HeapObject * object,HeapEntry::Type type,const char * name,int children_count,int retainers_count)1714 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
1715 HeapEntry::Type type,
1716 const char* name,
1717 int children_count,
1718 int retainers_count) {
1719 return snapshot_->AddEntry(type,
1720 name,
1721 collection_->GetObjectId(object->address()),
1722 object->Size(),
1723 children_count,
1724 retainers_count);
1725 }
1726
1727
AddRootEntries(SnapshotFillerInterface * filler)1728 void V8HeapExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
1729 filler->AddEntry(kInternalRootObject, this);
1730 filler->AddEntry(kGcRootsObject, this);
1731 }
1732
1733
GetSystemEntryName(HeapObject * object)1734 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
1735 switch (object->map()->instance_type()) {
1736 case MAP_TYPE: return "system / Map";
1737 case JS_GLOBAL_PROPERTY_CELL_TYPE: return "system / JSGlobalPropertyCell";
1738 case PROXY_TYPE: return "system / Proxy";
1739 case ODDBALL_TYPE: return "system / Oddball";
1740 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1741 case NAME##_TYPE: return "system / "#Name;
1742 STRUCT_LIST(MAKE_STRUCT_CASE)
1743 #undef MAKE_STRUCT_CASE
1744 default: return "system";
1745 }
1746 }
1747
1748
EstimateObjectsCount()1749 int V8HeapExplorer::EstimateObjectsCount() {
1750 HeapIterator iterator(HeapIterator::kFilterUnreachable);
1751 int objects_count = 0;
1752 for (HeapObject* obj = iterator.next();
1753 obj != NULL;
1754 obj = iterator.next(), ++objects_count) {}
1755 return objects_count;
1756 }
1757
1758
1759 class IndexedReferencesExtractor : public ObjectVisitor {
1760 public:
IndexedReferencesExtractor(V8HeapExplorer * generator,HeapObject * parent_obj,HeapEntry * parent_entry)1761 IndexedReferencesExtractor(V8HeapExplorer* generator,
1762 HeapObject* parent_obj,
1763 HeapEntry* parent_entry)
1764 : generator_(generator),
1765 parent_obj_(parent_obj),
1766 parent_(parent_entry),
1767 next_index_(1) {
1768 }
VisitPointers(Object ** start,Object ** end)1769 void VisitPointers(Object** start, Object** end) {
1770 for (Object** p = start; p < end; p++) {
1771 if (CheckVisitedAndUnmark(p)) continue;
1772 generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p);
1773 }
1774 }
MarkVisitedField(HeapObject * obj,int offset)1775 static void MarkVisitedField(HeapObject* obj, int offset) {
1776 if (offset < 0) return;
1777 Address field = obj->address() + offset;
1778 ASSERT(!Memory::Object_at(field)->IsFailure());
1779 ASSERT(Memory::Object_at(field)->IsHeapObject());
1780 *field |= kFailureTag;
1781 }
1782 private:
CheckVisitedAndUnmark(Object ** field)1783 bool CheckVisitedAndUnmark(Object** field) {
1784 if ((*field)->IsFailure()) {
1785 intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
1786 *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
1787 ASSERT((*field)->IsHeapObject());
1788 return true;
1789 }
1790 return false;
1791 }
1792 V8HeapExplorer* generator_;
1793 HeapObject* parent_obj_;
1794 HeapEntry* parent_;
1795 int next_index_;
1796 };
1797
1798
ExtractReferences(HeapObject * obj)1799 void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
1800 HeapEntry* entry = GetEntry(obj);
1801 if (entry == NULL) return; // No interest in this object.
1802
1803 if (obj->IsJSGlobalProxy()) {
1804 // We need to reference JS global objects from snapshot's root.
1805 // We use JSGlobalProxy because this is what embedder (e.g. browser)
1806 // uses for the global object.
1807 JSGlobalProxy* proxy = JSGlobalProxy::cast(obj);
1808 SetRootShortcutReference(proxy->map()->prototype());
1809 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1810 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1811 obj->Iterate(&refs_extractor);
1812 } else if (obj->IsJSObject()) {
1813 JSObject* js_obj = JSObject::cast(obj);
1814 ExtractClosureReferences(js_obj, entry);
1815 ExtractPropertyReferences(js_obj, entry);
1816 ExtractElementReferences(js_obj, entry);
1817 ExtractInternalReferences(js_obj, entry);
1818 SetPropertyReference(
1819 obj, entry, HEAP->Proto_symbol(), js_obj->GetPrototype());
1820 if (obj->IsJSFunction()) {
1821 JSFunction* js_fun = JSFunction::cast(js_obj);
1822 Object* proto_or_map = js_fun->prototype_or_initial_map();
1823 if (!proto_or_map->IsTheHole()) {
1824 if (!proto_or_map->IsMap()) {
1825 SetPropertyReference(
1826 obj, entry,
1827 HEAP->prototype_symbol(), proto_or_map,
1828 JSFunction::kPrototypeOrInitialMapOffset);
1829 } else {
1830 SetPropertyReference(
1831 obj, entry,
1832 HEAP->prototype_symbol(), js_fun->prototype());
1833 }
1834 }
1835 SetInternalReference(js_fun, entry,
1836 "shared", js_fun->shared(),
1837 JSFunction::kSharedFunctionInfoOffset);
1838 SetInternalReference(js_fun, entry,
1839 "context", js_fun->unchecked_context(),
1840 JSFunction::kContextOffset);
1841 SetInternalReference(js_fun, entry,
1842 "literals", js_fun->literals(),
1843 JSFunction::kLiteralsOffset);
1844 }
1845 SetInternalReference(obj, entry,
1846 "properties", js_obj->properties(),
1847 JSObject::kPropertiesOffset);
1848 SetInternalReference(obj, entry,
1849 "elements", js_obj->elements(),
1850 JSObject::kElementsOffset);
1851 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1852 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1853 obj->Iterate(&refs_extractor);
1854 } else if (obj->IsString()) {
1855 if (obj->IsConsString()) {
1856 ConsString* cs = ConsString::cast(obj);
1857 SetInternalReference(obj, entry, 1, cs->first());
1858 SetInternalReference(obj, entry, 2, cs->second());
1859 }
1860 } else if (obj->IsMap()) {
1861 Map* map = Map::cast(obj);
1862 SetInternalReference(obj, entry,
1863 "prototype", map->prototype(), Map::kPrototypeOffset);
1864 SetInternalReference(obj, entry,
1865 "constructor", map->constructor(),
1866 Map::kConstructorOffset);
1867 SetInternalReference(obj, entry,
1868 "descriptors", map->instance_descriptors(),
1869 Map::kInstanceDescriptorsOffset);
1870 SetInternalReference(obj, entry,
1871 "code_cache", map->code_cache(),
1872 Map::kCodeCacheOffset);
1873 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1874 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1875 obj->Iterate(&refs_extractor);
1876 } else if (obj->IsSharedFunctionInfo()) {
1877 SharedFunctionInfo* shared = SharedFunctionInfo::cast(obj);
1878 SetInternalReference(obj, entry,
1879 "name", shared->name(),
1880 SharedFunctionInfo::kNameOffset);
1881 SetInternalReference(obj, entry,
1882 "code", shared->unchecked_code(),
1883 SharedFunctionInfo::kCodeOffset);
1884 SetInternalReference(obj, entry,
1885 "instance_class_name", shared->instance_class_name(),
1886 SharedFunctionInfo::kInstanceClassNameOffset);
1887 SetInternalReference(obj, entry,
1888 "script", shared->script(),
1889 SharedFunctionInfo::kScriptOffset);
1890 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1891 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1892 obj->Iterate(&refs_extractor);
1893 } else {
1894 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1895 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1896 obj->Iterate(&refs_extractor);
1897 }
1898 }
1899
1900
ExtractClosureReferences(JSObject * js_obj,HeapEntry * entry)1901 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj,
1902 HeapEntry* entry) {
1903 if (js_obj->IsJSFunction()) {
1904 HandleScope hs;
1905 JSFunction* func = JSFunction::cast(js_obj);
1906 Context* context = func->context();
1907 ZoneScope zscope(DELETE_ON_EXIT);
1908 SerializedScopeInfo* serialized_scope_info =
1909 context->closure()->shared()->scope_info();
1910 ScopeInfo<ZoneListAllocationPolicy> zone_scope_info(serialized_scope_info);
1911 int locals_number = zone_scope_info.NumberOfLocals();
1912 for (int i = 0; i < locals_number; ++i) {
1913 String* local_name = *zone_scope_info.LocalName(i);
1914 int idx = serialized_scope_info->ContextSlotIndex(local_name, NULL);
1915 if (idx >= 0 && idx < context->length()) {
1916 SetClosureReference(js_obj, entry, local_name, context->get(idx));
1917 }
1918 }
1919 }
1920 }
1921
1922
ExtractPropertyReferences(JSObject * js_obj,HeapEntry * entry)1923 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj,
1924 HeapEntry* entry) {
1925 if (js_obj->HasFastProperties()) {
1926 DescriptorArray* descs = js_obj->map()->instance_descriptors();
1927 for (int i = 0; i < descs->number_of_descriptors(); i++) {
1928 switch (descs->GetType(i)) {
1929 case FIELD: {
1930 int index = descs->GetFieldIndex(i);
1931 if (index < js_obj->map()->inobject_properties()) {
1932 SetPropertyReference(
1933 js_obj, entry,
1934 descs->GetKey(i), js_obj->InObjectPropertyAt(index),
1935 js_obj->GetInObjectPropertyOffset(index));
1936 } else {
1937 SetPropertyReference(
1938 js_obj, entry,
1939 descs->GetKey(i), js_obj->FastPropertyAt(index));
1940 }
1941 break;
1942 }
1943 case CONSTANT_FUNCTION:
1944 SetPropertyReference(
1945 js_obj, entry,
1946 descs->GetKey(i), descs->GetConstantFunction(i));
1947 break;
1948 default: ;
1949 }
1950 }
1951 } else {
1952 StringDictionary* dictionary = js_obj->property_dictionary();
1953 int length = dictionary->Capacity();
1954 for (int i = 0; i < length; ++i) {
1955 Object* k = dictionary->KeyAt(i);
1956 if (dictionary->IsKey(k)) {
1957 Object* target = dictionary->ValueAt(i);
1958 SetPropertyReference(
1959 js_obj, entry, String::cast(k), target);
1960 // We assume that global objects can only have slow properties.
1961 if (target->IsJSGlobalPropertyCell()) {
1962 SetPropertyShortcutReference(js_obj,
1963 entry,
1964 String::cast(k),
1965 JSGlobalPropertyCell::cast(
1966 target)->value());
1967 }
1968 }
1969 }
1970 }
1971 }
1972
1973
ExtractElementReferences(JSObject * js_obj,HeapEntry * entry)1974 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj,
1975 HeapEntry* entry) {
1976 if (js_obj->HasFastElements()) {
1977 FixedArray* elements = FixedArray::cast(js_obj->elements());
1978 int length = js_obj->IsJSArray() ?
1979 Smi::cast(JSArray::cast(js_obj)->length())->value() :
1980 elements->length();
1981 for (int i = 0; i < length; ++i) {
1982 if (!elements->get(i)->IsTheHole()) {
1983 SetElementReference(js_obj, entry, i, elements->get(i));
1984 }
1985 }
1986 } else if (js_obj->HasDictionaryElements()) {
1987 NumberDictionary* dictionary = js_obj->element_dictionary();
1988 int length = dictionary->Capacity();
1989 for (int i = 0; i < length; ++i) {
1990 Object* k = dictionary->KeyAt(i);
1991 if (dictionary->IsKey(k)) {
1992 ASSERT(k->IsNumber());
1993 uint32_t index = static_cast<uint32_t>(k->Number());
1994 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1995 }
1996 }
1997 }
1998 }
1999
2000
ExtractInternalReferences(JSObject * js_obj,HeapEntry * entry)2001 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj,
2002 HeapEntry* entry) {
2003 int length = js_obj->GetInternalFieldCount();
2004 for (int i = 0; i < length; ++i) {
2005 Object* o = js_obj->GetInternalField(i);
2006 SetInternalReference(
2007 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
2008 }
2009 }
2010
2011
GetEntry(Object * obj)2012 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
2013 if (!obj->IsHeapObject()) return NULL;
2014 return filler_->FindOrAddEntry(obj, this);
2015 }
2016
2017
2018 class RootsReferencesExtractor : public ObjectVisitor {
2019 public:
RootsReferencesExtractor(V8HeapExplorer * explorer)2020 explicit RootsReferencesExtractor(V8HeapExplorer* explorer)
2021 : explorer_(explorer) {
2022 }
VisitPointers(Object ** start,Object ** end)2023 void VisitPointers(Object** start, Object** end) {
2024 for (Object** p = start; p < end; p++) explorer_->SetGcRootsReference(*p);
2025 }
2026 private:
2027 V8HeapExplorer* explorer_;
2028 };
2029
2030
IterateAndExtractReferences(SnapshotFillerInterface * filler)2031 bool V8HeapExplorer::IterateAndExtractReferences(
2032 SnapshotFillerInterface* filler) {
2033 filler_ = filler;
2034 HeapIterator iterator(HeapIterator::kFilterUnreachable);
2035 bool interrupted = false;
2036 // Heap iteration with filtering must be finished in any case.
2037 for (HeapObject* obj = iterator.next();
2038 obj != NULL;
2039 obj = iterator.next(), progress_->ProgressStep()) {
2040 if (!interrupted) {
2041 ExtractReferences(obj);
2042 if (!progress_->ProgressReport(false)) interrupted = true;
2043 }
2044 }
2045 if (interrupted) {
2046 filler_ = NULL;
2047 return false;
2048 }
2049 SetRootGcRootsReference();
2050 RootsReferencesExtractor extractor(this);
2051 HEAP->IterateRoots(&extractor, VISIT_ALL);
2052 filler_ = NULL;
2053 return progress_->ProgressReport(false);
2054 }
2055
2056
SetClosureReference(HeapObject * parent_obj,HeapEntry * parent_entry,String * reference_name,Object * child_obj)2057 void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
2058 HeapEntry* parent_entry,
2059 String* reference_name,
2060 Object* child_obj) {
2061 HeapEntry* child_entry = GetEntry(child_obj);
2062 if (child_entry != NULL) {
2063 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
2064 parent_obj,
2065 parent_entry,
2066 collection_->names()->GetName(reference_name),
2067 child_obj,
2068 child_entry);
2069 }
2070 }
2071
2072
SetElementReference(HeapObject * parent_obj,HeapEntry * parent_entry,int index,Object * child_obj)2073 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
2074 HeapEntry* parent_entry,
2075 int index,
2076 Object* child_obj) {
2077 HeapEntry* child_entry = GetEntry(child_obj);
2078 if (child_entry != NULL) {
2079 filler_->SetIndexedReference(HeapGraphEdge::kElement,
2080 parent_obj,
2081 parent_entry,
2082 index,
2083 child_obj,
2084 child_entry);
2085 }
2086 }
2087
2088
SetInternalReference(HeapObject * parent_obj,HeapEntry * parent_entry,const char * reference_name,Object * child_obj,int field_offset)2089 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2090 HeapEntry* parent_entry,
2091 const char* reference_name,
2092 Object* child_obj,
2093 int field_offset) {
2094 HeapEntry* child_entry = GetEntry(child_obj);
2095 if (child_entry != NULL) {
2096 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2097 parent_obj,
2098 parent_entry,
2099 reference_name,
2100 child_obj,
2101 child_entry);
2102 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2103 }
2104 }
2105
2106
SetInternalReference(HeapObject * parent_obj,HeapEntry * parent_entry,int index,Object * child_obj,int field_offset)2107 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2108 HeapEntry* parent_entry,
2109 int index,
2110 Object* child_obj,
2111 int field_offset) {
2112 HeapEntry* child_entry = GetEntry(child_obj);
2113 if (child_entry != NULL) {
2114 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2115 parent_obj,
2116 parent_entry,
2117 collection_->names()->GetName(index),
2118 child_obj,
2119 child_entry);
2120 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2121 }
2122 }
2123
2124
SetHiddenReference(HeapObject * parent_obj,HeapEntry * parent_entry,int index,Object * child_obj)2125 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
2126 HeapEntry* parent_entry,
2127 int index,
2128 Object* child_obj) {
2129 HeapEntry* child_entry = GetEntry(child_obj);
2130 if (child_entry != NULL) {
2131 filler_->SetIndexedReference(HeapGraphEdge::kHidden,
2132 parent_obj,
2133 parent_entry,
2134 index,
2135 child_obj,
2136 child_entry);
2137 }
2138 }
2139
2140
SetPropertyReference(HeapObject * parent_obj,HeapEntry * parent_entry,String * reference_name,Object * child_obj,int field_offset)2141 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2142 HeapEntry* parent_entry,
2143 String* reference_name,
2144 Object* child_obj,
2145 int field_offset) {
2146 HeapEntry* child_entry = GetEntry(child_obj);
2147 if (child_entry != NULL) {
2148 HeapGraphEdge::Type type = reference_name->length() > 0 ?
2149 HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2150 filler_->SetNamedReference(type,
2151 parent_obj,
2152 parent_entry,
2153 collection_->names()->GetName(reference_name),
2154 child_obj,
2155 child_entry);
2156 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2157 }
2158 }
2159
2160
SetPropertyShortcutReference(HeapObject * parent_obj,HeapEntry * parent_entry,String * reference_name,Object * child_obj)2161 void V8HeapExplorer::SetPropertyShortcutReference(HeapObject* parent_obj,
2162 HeapEntry* parent_entry,
2163 String* reference_name,
2164 Object* child_obj) {
2165 HeapEntry* child_entry = GetEntry(child_obj);
2166 if (child_entry != NULL) {
2167 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
2168 parent_obj,
2169 parent_entry,
2170 collection_->names()->GetName(reference_name),
2171 child_obj,
2172 child_entry);
2173 }
2174 }
2175
2176
SetRootGcRootsReference()2177 void V8HeapExplorer::SetRootGcRootsReference() {
2178 filler_->SetIndexedAutoIndexReference(
2179 HeapGraphEdge::kElement,
2180 kInternalRootObject, snapshot_->root(),
2181 kGcRootsObject, snapshot_->gc_roots());
2182 }
2183
2184
SetRootShortcutReference(Object * child_obj)2185 void V8HeapExplorer::SetRootShortcutReference(Object* child_obj) {
2186 HeapEntry* child_entry = GetEntry(child_obj);
2187 ASSERT(child_entry != NULL);
2188 filler_->SetNamedAutoIndexReference(
2189 HeapGraphEdge::kShortcut,
2190 kInternalRootObject, snapshot_->root(),
2191 child_obj, child_entry);
2192 }
2193
2194
SetGcRootsReference(Object * child_obj)2195 void V8HeapExplorer::SetGcRootsReference(Object* child_obj) {
2196 HeapEntry* child_entry = GetEntry(child_obj);
2197 if (child_entry != NULL) {
2198 filler_->SetIndexedAutoIndexReference(
2199 HeapGraphEdge::kElement,
2200 kGcRootsObject, snapshot_->gc_roots(),
2201 child_obj, child_entry);
2202 }
2203 }
2204
2205
2206 class GlobalHandlesExtractor : public ObjectVisitor {
2207 public:
GlobalHandlesExtractor(NativeObjectsExplorer * explorer)2208 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2209 : explorer_(explorer) {}
~GlobalHandlesExtractor()2210 virtual ~GlobalHandlesExtractor() {}
VisitPointers(Object ** start,Object ** end)2211 virtual void VisitPointers(Object** start, Object** end) {
2212 UNREACHABLE();
2213 }
VisitEmbedderReference(Object ** p,uint16_t class_id)2214 virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2215 explorer_->VisitSubtreeWrapper(p, class_id);
2216 }
2217 private:
2218 NativeObjectsExplorer* explorer_;
2219 };
2220
2221 HeapThing const NativeObjectsExplorer::kNativesRootObject =
2222 reinterpret_cast<HeapThing>(
2223 static_cast<intptr_t>(HeapObjectsMap::kNativesRootObjectId));
2224
2225
NativeObjectsExplorer(HeapSnapshot * snapshot,SnapshottingProgressReportingInterface * progress)2226 NativeObjectsExplorer::NativeObjectsExplorer(
2227 HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
2228 : snapshot_(snapshot),
2229 collection_(snapshot_->collection()),
2230 progress_(progress),
2231 embedder_queried_(false),
2232 objects_by_info_(RetainedInfosMatch),
2233 filler_(NULL) {
2234 }
2235
2236
~NativeObjectsExplorer()2237 NativeObjectsExplorer::~NativeObjectsExplorer() {
2238 for (HashMap::Entry* p = objects_by_info_.Start();
2239 p != NULL;
2240 p = objects_by_info_.Next(p)) {
2241 v8::RetainedObjectInfo* info =
2242 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2243 info->Dispose();
2244 List<HeapObject*>* objects =
2245 reinterpret_cast<List<HeapObject*>* >(p->value);
2246 delete objects;
2247 }
2248 }
2249
2250
AllocateEntry(HeapThing ptr,int children_count,int retainers_count)2251 HeapEntry* NativeObjectsExplorer::AllocateEntry(
2252 HeapThing ptr, int children_count, int retainers_count) {
2253 if (ptr == kNativesRootObject) {
2254 return snapshot_->AddNativesRootEntry(children_count, retainers_count);
2255 } else {
2256 v8::RetainedObjectInfo* info =
2257 reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2258 intptr_t elements = info->GetElementCount();
2259 intptr_t size = info->GetSizeInBytes();
2260 return snapshot_->AddEntry(
2261 HeapEntry::kNative,
2262 elements != -1 ?
2263 collection_->names()->GetFormatted(
2264 "%s / %" V8_PTR_PREFIX "d entries",
2265 info->GetLabel(),
2266 info->GetElementCount()) :
2267 collection_->names()->GetCopy(info->GetLabel()),
2268 HeapObjectsMap::GenerateId(info),
2269 size != -1 ? static_cast<int>(size) : 0,
2270 children_count,
2271 retainers_count);
2272 }
2273 }
2274
2275
AddRootEntries(SnapshotFillerInterface * filler)2276 void NativeObjectsExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
2277 if (EstimateObjectsCount() <= 0) return;
2278 filler->AddEntry(kNativesRootObject, this);
2279 }
2280
2281
EstimateObjectsCount()2282 int NativeObjectsExplorer::EstimateObjectsCount() {
2283 FillRetainedObjects();
2284 return objects_by_info_.occupancy();
2285 }
2286
2287
FillRetainedObjects()2288 void NativeObjectsExplorer::FillRetainedObjects() {
2289 if (embedder_queried_) return;
2290 Isolate* isolate = Isolate::Current();
2291 // Record objects that are joined into ObjectGroups.
2292 isolate->heap()->CallGlobalGCPrologueCallback();
2293 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2294 for (int i = 0; i < groups->length(); ++i) {
2295 ObjectGroup* group = groups->at(i);
2296 if (group->info_ == NULL) continue;
2297 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info_);
2298 for (size_t j = 0; j < group->length_; ++j) {
2299 HeapObject* obj = HeapObject::cast(*group->objects_[j]);
2300 list->Add(obj);
2301 in_groups_.Insert(obj);
2302 }
2303 group->info_ = NULL; // Acquire info object ownership.
2304 }
2305 isolate->global_handles()->RemoveObjectGroups();
2306 isolate->heap()->CallGlobalGCEpilogueCallback();
2307 // Record objects that are not in ObjectGroups, but have class ID.
2308 GlobalHandlesExtractor extractor(this);
2309 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2310 embedder_queried_ = true;
2311 }
2312
2313
GetListMaybeDisposeInfo(v8::RetainedObjectInfo * info)2314 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2315 v8::RetainedObjectInfo* info) {
2316 HashMap::Entry* entry =
2317 objects_by_info_.Lookup(info, InfoHash(info), true);
2318 if (entry->value != NULL) {
2319 info->Dispose();
2320 } else {
2321 entry->value = new List<HeapObject*>(4);
2322 }
2323 return reinterpret_cast<List<HeapObject*>* >(entry->value);
2324 }
2325
2326
IterateAndExtractReferences(SnapshotFillerInterface * filler)2327 bool NativeObjectsExplorer::IterateAndExtractReferences(
2328 SnapshotFillerInterface* filler) {
2329 if (EstimateObjectsCount() <= 0) return true;
2330 filler_ = filler;
2331 FillRetainedObjects();
2332 for (HashMap::Entry* p = objects_by_info_.Start();
2333 p != NULL;
2334 p = objects_by_info_.Next(p)) {
2335 v8::RetainedObjectInfo* info =
2336 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2337 SetNativeRootReference(info);
2338 List<HeapObject*>* objects =
2339 reinterpret_cast<List<HeapObject*>* >(p->value);
2340 for (int i = 0; i < objects->length(); ++i) {
2341 SetWrapperNativeReferences(objects->at(i), info);
2342 }
2343 }
2344 SetRootNativesRootReference();
2345 filler_ = NULL;
2346 return true;
2347 }
2348
2349
SetNativeRootReference(v8::RetainedObjectInfo * info)2350 void NativeObjectsExplorer::SetNativeRootReference(
2351 v8::RetainedObjectInfo* info) {
2352 HeapEntry* child_entry = filler_->FindOrAddEntry(info, this);
2353 ASSERT(child_entry != NULL);
2354 filler_->SetIndexedAutoIndexReference(
2355 HeapGraphEdge::kElement,
2356 kNativesRootObject, snapshot_->natives_root(),
2357 info, child_entry);
2358 }
2359
2360
SetWrapperNativeReferences(HeapObject * wrapper,v8::RetainedObjectInfo * info)2361 void NativeObjectsExplorer::SetWrapperNativeReferences(
2362 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2363 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2364 ASSERT(wrapper_entry != NULL);
2365 HeapEntry* info_entry = filler_->FindOrAddEntry(info, this);
2366 ASSERT(info_entry != NULL);
2367 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2368 wrapper, wrapper_entry,
2369 "native",
2370 info, info_entry);
2371 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2372 info, info_entry,
2373 wrapper, wrapper_entry);
2374 }
2375
2376
SetRootNativesRootReference()2377 void NativeObjectsExplorer::SetRootNativesRootReference() {
2378 filler_->SetIndexedAutoIndexReference(
2379 HeapGraphEdge::kElement,
2380 V8HeapExplorer::kInternalRootObject, snapshot_->root(),
2381 kNativesRootObject, snapshot_->natives_root());
2382 }
2383
2384
VisitSubtreeWrapper(Object ** p,uint16_t class_id)2385 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2386 if (in_groups_.Contains(*p)) return;
2387 Isolate* isolate = Isolate::Current();
2388 v8::RetainedObjectInfo* info =
2389 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2390 if (info == NULL) return;
2391 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2392 }
2393
2394
HeapSnapshotGenerator(HeapSnapshot * snapshot,v8::ActivityControl * control)2395 HeapSnapshotGenerator::HeapSnapshotGenerator(HeapSnapshot* snapshot,
2396 v8::ActivityControl* control)
2397 : snapshot_(snapshot),
2398 control_(control),
2399 v8_heap_explorer_(snapshot_, this),
2400 dom_explorer_(snapshot_, this) {
2401 }
2402
2403
2404 class SnapshotCounter : public SnapshotFillerInterface {
2405 public:
SnapshotCounter(HeapEntriesMap * entries)2406 explicit SnapshotCounter(HeapEntriesMap* entries) : entries_(entries) { }
AddEntry(HeapThing ptr,HeapEntriesAllocator * allocator)2407 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2408 entries_->Pair(ptr, allocator, HeapEntriesMap::kHeapEntryPlaceholder);
2409 return HeapEntriesMap::kHeapEntryPlaceholder;
2410 }
FindEntry(HeapThing ptr)2411 HeapEntry* FindEntry(HeapThing ptr) {
2412 return entries_->Map(ptr);
2413 }
FindOrAddEntry(HeapThing ptr,HeapEntriesAllocator * allocator)2414 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2415 HeapEntry* entry = FindEntry(ptr);
2416 return entry != NULL ? entry : AddEntry(ptr, allocator);
2417 }
SetIndexedReference(HeapGraphEdge::Type,HeapThing parent_ptr,HeapEntry *,int,HeapThing child_ptr,HeapEntry *)2418 void SetIndexedReference(HeapGraphEdge::Type,
2419 HeapThing parent_ptr,
2420 HeapEntry*,
2421 int,
2422 HeapThing child_ptr,
2423 HeapEntry*) {
2424 entries_->CountReference(parent_ptr, child_ptr);
2425 }
SetIndexedAutoIndexReference(HeapGraphEdge::Type,HeapThing parent_ptr,HeapEntry *,HeapThing child_ptr,HeapEntry *)2426 void SetIndexedAutoIndexReference(HeapGraphEdge::Type,
2427 HeapThing parent_ptr,
2428 HeapEntry*,
2429 HeapThing child_ptr,
2430 HeapEntry*) {
2431 entries_->CountReference(parent_ptr, child_ptr);
2432 }
SetNamedReference(HeapGraphEdge::Type,HeapThing parent_ptr,HeapEntry *,const char *,HeapThing child_ptr,HeapEntry *)2433 void SetNamedReference(HeapGraphEdge::Type,
2434 HeapThing parent_ptr,
2435 HeapEntry*,
2436 const char*,
2437 HeapThing child_ptr,
2438 HeapEntry*) {
2439 entries_->CountReference(parent_ptr, child_ptr);
2440 }
SetNamedAutoIndexReference(HeapGraphEdge::Type,HeapThing parent_ptr,HeapEntry *,HeapThing child_ptr,HeapEntry *)2441 void SetNamedAutoIndexReference(HeapGraphEdge::Type,
2442 HeapThing parent_ptr,
2443 HeapEntry*,
2444 HeapThing child_ptr,
2445 HeapEntry*) {
2446 entries_->CountReference(parent_ptr, child_ptr);
2447 }
2448 private:
2449 HeapEntriesMap* entries_;
2450 };
2451
2452
2453 class SnapshotFiller : public SnapshotFillerInterface {
2454 public:
SnapshotFiller(HeapSnapshot * snapshot,HeapEntriesMap * entries)2455 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
2456 : snapshot_(snapshot),
2457 collection_(snapshot->collection()),
2458 entries_(entries) { }
AddEntry(HeapThing ptr,HeapEntriesAllocator * allocator)2459 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2460 UNREACHABLE();
2461 return NULL;
2462 }
FindEntry(HeapThing ptr)2463 HeapEntry* FindEntry(HeapThing ptr) {
2464 return entries_->Map(ptr);
2465 }
FindOrAddEntry(HeapThing ptr,HeapEntriesAllocator * allocator)2466 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2467 HeapEntry* entry = FindEntry(ptr);
2468 return entry != NULL ? entry : AddEntry(ptr, allocator);
2469 }
SetIndexedReference(HeapGraphEdge::Type type,HeapThing parent_ptr,HeapEntry * parent_entry,int index,HeapThing child_ptr,HeapEntry * child_entry)2470 void SetIndexedReference(HeapGraphEdge::Type type,
2471 HeapThing parent_ptr,
2472 HeapEntry* parent_entry,
2473 int index,
2474 HeapThing child_ptr,
2475 HeapEntry* child_entry) {
2476 int child_index, retainer_index;
2477 entries_->CountReference(
2478 parent_ptr, child_ptr, &child_index, &retainer_index);
2479 parent_entry->SetIndexedReference(
2480 type, child_index, index, child_entry, retainer_index);
2481 }
SetIndexedAutoIndexReference(HeapGraphEdge::Type type,HeapThing parent_ptr,HeapEntry * parent_entry,HeapThing child_ptr,HeapEntry * child_entry)2482 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
2483 HeapThing parent_ptr,
2484 HeapEntry* parent_entry,
2485 HeapThing child_ptr,
2486 HeapEntry* child_entry) {
2487 int child_index, retainer_index;
2488 entries_->CountReference(
2489 parent_ptr, child_ptr, &child_index, &retainer_index);
2490 parent_entry->SetIndexedReference(
2491 type, child_index, child_index + 1, child_entry, retainer_index);
2492 }
SetNamedReference(HeapGraphEdge::Type type,HeapThing parent_ptr,HeapEntry * parent_entry,const char * reference_name,HeapThing child_ptr,HeapEntry * child_entry)2493 void SetNamedReference(HeapGraphEdge::Type type,
2494 HeapThing parent_ptr,
2495 HeapEntry* parent_entry,
2496 const char* reference_name,
2497 HeapThing child_ptr,
2498 HeapEntry* child_entry) {
2499 int child_index, retainer_index;
2500 entries_->CountReference(
2501 parent_ptr, child_ptr, &child_index, &retainer_index);
2502 parent_entry->SetNamedReference(
2503 type, child_index, reference_name, child_entry, retainer_index);
2504 }
SetNamedAutoIndexReference(HeapGraphEdge::Type type,HeapThing parent_ptr,HeapEntry * parent_entry,HeapThing child_ptr,HeapEntry * child_entry)2505 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
2506 HeapThing parent_ptr,
2507 HeapEntry* parent_entry,
2508 HeapThing child_ptr,
2509 HeapEntry* child_entry) {
2510 int child_index, retainer_index;
2511 entries_->CountReference(
2512 parent_ptr, child_ptr, &child_index, &retainer_index);
2513 parent_entry->SetNamedReference(type,
2514 child_index,
2515 collection_->names()->GetName(child_index + 1),
2516 child_entry,
2517 retainer_index);
2518 }
2519 private:
2520 HeapSnapshot* snapshot_;
2521 HeapSnapshotsCollection* collection_;
2522 HeapEntriesMap* entries_;
2523 };
2524
2525
GenerateSnapshot()2526 bool HeapSnapshotGenerator::GenerateSnapshot() {
2527 AssertNoAllocation no_alloc;
2528
2529 SetProgressTotal(4); // 2 passes + dominators + sizes.
2530
2531 // Pass 1. Iterate heap contents to count entries and references.
2532 if (!CountEntriesAndReferences()) return false;
2533
2534 // Allocate and fill entries in the snapshot, allocate references.
2535 snapshot_->AllocateEntries(entries_.entries_count(),
2536 entries_.total_children_count(),
2537 entries_.total_retainers_count());
2538 entries_.AllocateEntries();
2539
2540 // Pass 2. Fill references.
2541 if (!FillReferences()) return false;
2542
2543 if (!SetEntriesDominators()) return false;
2544 if (!ApproximateRetainedSizes()) return false;
2545
2546 progress_counter_ = progress_total_;
2547 if (!ProgressReport(true)) return false;
2548 return true;
2549 }
2550
2551
ProgressStep()2552 void HeapSnapshotGenerator::ProgressStep() {
2553 ++progress_counter_;
2554 }
2555
2556
ProgressReport(bool force)2557 bool HeapSnapshotGenerator::ProgressReport(bool force) {
2558 const int kProgressReportGranularity = 10000;
2559 if (control_ != NULL
2560 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2561 return
2562 control_->ReportProgressValue(progress_counter_, progress_total_) ==
2563 v8::ActivityControl::kContinue;
2564 }
2565 return true;
2566 }
2567
2568
SetProgressTotal(int iterations_count)2569 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2570 if (control_ == NULL) return;
2571 progress_total_ = (
2572 v8_heap_explorer_.EstimateObjectsCount() +
2573 dom_explorer_.EstimateObjectsCount()) * iterations_count;
2574 progress_counter_ = 0;
2575 }
2576
2577
CountEntriesAndReferences()2578 bool HeapSnapshotGenerator::CountEntriesAndReferences() {
2579 SnapshotCounter counter(&entries_);
2580 v8_heap_explorer_.AddRootEntries(&counter);
2581 dom_explorer_.AddRootEntries(&counter);
2582 return
2583 v8_heap_explorer_.IterateAndExtractReferences(&counter) &&
2584 dom_explorer_.IterateAndExtractReferences(&counter);
2585 }
2586
2587
FillReferences()2588 bool HeapSnapshotGenerator::FillReferences() {
2589 SnapshotFiller filler(snapshot_, &entries_);
2590 return
2591 v8_heap_explorer_.IterateAndExtractReferences(&filler) &&
2592 dom_explorer_.IterateAndExtractReferences(&filler);
2593 }
2594
2595
FillReversePostorderIndexes(Vector<HeapEntry * > * entries)2596 void HeapSnapshotGenerator::FillReversePostorderIndexes(
2597 Vector<HeapEntry*>* entries) {
2598 snapshot_->ClearPaint();
2599 int current_entry = 0;
2600 List<HeapEntry*> nodes_to_visit;
2601 nodes_to_visit.Add(snapshot_->root());
2602 snapshot_->root()->paint_reachable();
2603 while (!nodes_to_visit.is_empty()) {
2604 HeapEntry* entry = nodes_to_visit.last();
2605 Vector<HeapGraphEdge> children = entry->children();
2606 bool has_new_edges = false;
2607 for (int i = 0; i < children.length(); ++i) {
2608 if (children[i].type() == HeapGraphEdge::kShortcut) continue;
2609 HeapEntry* child = children[i].to();
2610 if (!child->painted_reachable()) {
2611 nodes_to_visit.Add(child);
2612 child->paint_reachable();
2613 has_new_edges = true;
2614 }
2615 }
2616 if (!has_new_edges) {
2617 entry->set_ordered_index(current_entry);
2618 (*entries)[current_entry++] = entry;
2619 nodes_to_visit.RemoveLast();
2620 }
2621 }
2622 entries->Truncate(current_entry);
2623 }
2624
2625
Intersect(int i1,int i2,const Vector<HeapEntry * > & dominators)2626 static int Intersect(int i1, int i2, const Vector<HeapEntry*>& dominators) {
2627 int finger1 = i1, finger2 = i2;
2628 while (finger1 != finger2) {
2629 while (finger1 < finger2) finger1 = dominators[finger1]->ordered_index();
2630 while (finger2 < finger1) finger2 = dominators[finger2]->ordered_index();
2631 }
2632 return finger1;
2633 }
2634
2635 // The algorithm is based on the article:
2636 // K. Cooper, T. Harvey and K. Kennedy "A Simple, Fast Dominance Algorithm"
2637 // Softw. Pract. Exper. 4 (2001), pp. 1-10.
BuildDominatorTree(const Vector<HeapEntry * > & entries,Vector<HeapEntry * > * dominators)2638 bool HeapSnapshotGenerator::BuildDominatorTree(
2639 const Vector<HeapEntry*>& entries,
2640 Vector<HeapEntry*>* dominators) {
2641 if (entries.length() == 0) return true;
2642 const int entries_length = entries.length(), root_index = entries_length - 1;
2643 for (int i = 0; i < root_index; ++i) (*dominators)[i] = NULL;
2644 (*dominators)[root_index] = entries[root_index];
2645 int changed = 1;
2646 const int base_progress_counter = progress_counter_;
2647 while (changed != 0) {
2648 changed = 0;
2649 for (int i = root_index - 1; i >= 0; --i) {
2650 HeapEntry* new_idom = NULL;
2651 Vector<HeapGraphEdge*> rets = entries[i]->retainers();
2652 int j = 0;
2653 for (; j < rets.length(); ++j) {
2654 if (rets[j]->type() == HeapGraphEdge::kShortcut) continue;
2655 HeapEntry* ret = rets[j]->From();
2656 if (dominators->at(ret->ordered_index()) != NULL) {
2657 new_idom = ret;
2658 break;
2659 }
2660 }
2661 for (++j; j < rets.length(); ++j) {
2662 if (rets[j]->type() == HeapGraphEdge::kShortcut) continue;
2663 HeapEntry* ret = rets[j]->From();
2664 if (dominators->at(ret->ordered_index()) != NULL) {
2665 new_idom = entries[Intersect(ret->ordered_index(),
2666 new_idom->ordered_index(),
2667 *dominators)];
2668 }
2669 }
2670 if (new_idom != NULL && dominators->at(i) != new_idom) {
2671 (*dominators)[i] = new_idom;
2672 ++changed;
2673 }
2674 }
2675 int remaining = entries_length - changed;
2676 if (remaining < 0) remaining = 0;
2677 progress_counter_ = base_progress_counter + remaining;
2678 if (!ProgressReport(true)) return false;
2679 }
2680 return true;
2681 }
2682
2683
SetEntriesDominators()2684 bool HeapSnapshotGenerator::SetEntriesDominators() {
2685 // This array is used for maintaining reverse postorder of nodes.
2686 ScopedVector<HeapEntry*> ordered_entries(snapshot_->entries()->length());
2687 FillReversePostorderIndexes(&ordered_entries);
2688 ScopedVector<HeapEntry*> dominators(ordered_entries.length());
2689 if (!BuildDominatorTree(ordered_entries, &dominators)) return false;
2690 for (int i = 0; i < ordered_entries.length(); ++i) {
2691 ASSERT(dominators[i] != NULL);
2692 ordered_entries[i]->set_dominator(dominators[i]);
2693 }
2694 return true;
2695 }
2696
2697
ApproximateRetainedSizes()2698 bool HeapSnapshotGenerator::ApproximateRetainedSizes() {
2699 // As for the dominators tree we only know parent nodes, not
2700 // children, to sum up total sizes we "bubble" node's self size
2701 // adding it to all of its parents.
2702 for (int i = 0; i < snapshot_->entries()->length(); ++i) {
2703 HeapEntry* entry = snapshot_->entries()->at(i);
2704 entry->set_retained_size(entry->self_size());
2705 }
2706 for (int i = 0;
2707 i < snapshot_->entries()->length();
2708 ++i, ProgressStep()) {
2709 HeapEntry* entry = snapshot_->entries()->at(i);
2710 int entry_size = entry->self_size();
2711 for (HeapEntry* dominator = entry->dominator();
2712 dominator != entry;
2713 entry = dominator, dominator = entry->dominator()) {
2714 dominator->add_retained_size(entry_size);
2715 }
2716 if (!ProgressReport()) return false;
2717 }
2718 return true;
2719 }
2720
2721
2722 class OutputStreamWriter {
2723 public:
OutputStreamWriter(v8::OutputStream * stream)2724 explicit OutputStreamWriter(v8::OutputStream* stream)
2725 : stream_(stream),
2726 chunk_size_(stream->GetChunkSize()),
2727 chunk_(chunk_size_),
2728 chunk_pos_(0),
2729 aborted_(false) {
2730 ASSERT(chunk_size_ > 0);
2731 }
aborted()2732 bool aborted() { return aborted_; }
AddCharacter(char c)2733 void AddCharacter(char c) {
2734 ASSERT(c != '\0');
2735 ASSERT(chunk_pos_ < chunk_size_);
2736 chunk_[chunk_pos_++] = c;
2737 MaybeWriteChunk();
2738 }
AddString(const char * s)2739 void AddString(const char* s) {
2740 AddSubstring(s, StrLength(s));
2741 }
AddSubstring(const char * s,int n)2742 void AddSubstring(const char* s, int n) {
2743 if (n <= 0) return;
2744 ASSERT(static_cast<size_t>(n) <= strlen(s));
2745 const char* s_end = s + n;
2746 while (s < s_end) {
2747 int s_chunk_size = Min(
2748 chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2749 ASSERT(s_chunk_size > 0);
2750 memcpy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2751 s += s_chunk_size;
2752 chunk_pos_ += s_chunk_size;
2753 MaybeWriteChunk();
2754 }
2755 }
AddNumber(int n)2756 void AddNumber(int n) { AddNumberImpl<int>(n, "%d"); }
AddNumber(unsigned n)2757 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
AddNumber(uint64_t n)2758 void AddNumber(uint64_t n) { AddNumberImpl<uint64_t>(n, "%llu"); }
Finalize()2759 void Finalize() {
2760 if (aborted_) return;
2761 ASSERT(chunk_pos_ < chunk_size_);
2762 if (chunk_pos_ != 0) {
2763 WriteChunk();
2764 }
2765 stream_->EndOfStream();
2766 }
2767
2768 private:
2769 template<typename T>
AddNumberImpl(T n,const char * format)2770 void AddNumberImpl(T n, const char* format) {
2771 ScopedVector<char> buffer(32);
2772 int result = OS::SNPrintF(buffer, format, n);
2773 USE(result);
2774 ASSERT(result != -1);
2775 AddString(buffer.start());
2776 }
MaybeWriteChunk()2777 void MaybeWriteChunk() {
2778 ASSERT(chunk_pos_ <= chunk_size_);
2779 if (chunk_pos_ == chunk_size_) {
2780 WriteChunk();
2781 chunk_pos_ = 0;
2782 }
2783 }
WriteChunk()2784 void WriteChunk() {
2785 if (aborted_) return;
2786 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2787 v8::OutputStream::kAbort) aborted_ = true;
2788 }
2789
2790 v8::OutputStream* stream_;
2791 int chunk_size_;
2792 ScopedVector<char> chunk_;
2793 int chunk_pos_;
2794 bool aborted_;
2795 };
2796
Serialize(v8::OutputStream * stream)2797 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2798 ASSERT(writer_ == NULL);
2799 writer_ = new OutputStreamWriter(stream);
2800
2801 // Since nodes graph is cyclic, we need the first pass to enumerate
2802 // them. Strings can be serialized in one pass.
2803 EnumerateNodes();
2804 SerializeImpl();
2805
2806 delete writer_;
2807 writer_ = NULL;
2808 }
2809
2810
SerializeImpl()2811 void HeapSnapshotJSONSerializer::SerializeImpl() {
2812 writer_->AddCharacter('{');
2813 writer_->AddString("\"snapshot\":{");
2814 SerializeSnapshot();
2815 if (writer_->aborted()) return;
2816 writer_->AddString("},\n");
2817 writer_->AddString("\"nodes\":[");
2818 SerializeNodes();
2819 if (writer_->aborted()) return;
2820 writer_->AddString("],\n");
2821 writer_->AddString("\"strings\":[");
2822 SerializeStrings();
2823 if (writer_->aborted()) return;
2824 writer_->AddCharacter(']');
2825 writer_->AddCharacter('}');
2826 writer_->Finalize();
2827 }
2828
2829
2830 class HeapSnapshotJSONSerializerEnumerator {
2831 public:
HeapSnapshotJSONSerializerEnumerator(HeapSnapshotJSONSerializer * s)2832 explicit HeapSnapshotJSONSerializerEnumerator(HeapSnapshotJSONSerializer* s)
2833 : s_(s) {
2834 }
Apply(HeapEntry ** entry)2835 void Apply(HeapEntry** entry) {
2836 s_->GetNodeId(*entry);
2837 }
2838 private:
2839 HeapSnapshotJSONSerializer* s_;
2840 };
2841
EnumerateNodes()2842 void HeapSnapshotJSONSerializer::EnumerateNodes() {
2843 GetNodeId(snapshot_->root()); // Make sure root gets the first id.
2844 HeapSnapshotJSONSerializerEnumerator iter(this);
2845 snapshot_->IterateEntries(&iter);
2846 }
2847
2848
GetNodeId(HeapEntry * entry)2849 int HeapSnapshotJSONSerializer::GetNodeId(HeapEntry* entry) {
2850 HashMap::Entry* cache_entry = nodes_.Lookup(entry, ObjectHash(entry), true);
2851 if (cache_entry->value == NULL) {
2852 cache_entry->value = reinterpret_cast<void*>(next_node_id_++);
2853 }
2854 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2855 }
2856
2857
GetStringId(const char * s)2858 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2859 HashMap::Entry* cache_entry = strings_.Lookup(
2860 const_cast<char*>(s), ObjectHash(s), true);
2861 if (cache_entry->value == NULL) {
2862 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2863 }
2864 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2865 }
2866
2867
SerializeEdge(HeapGraphEdge * edge)2868 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge) {
2869 writer_->AddCharacter(',');
2870 writer_->AddNumber(edge->type());
2871 writer_->AddCharacter(',');
2872 if (edge->type() == HeapGraphEdge::kElement
2873 || edge->type() == HeapGraphEdge::kHidden) {
2874 writer_->AddNumber(edge->index());
2875 } else {
2876 writer_->AddNumber(GetStringId(edge->name()));
2877 }
2878 writer_->AddCharacter(',');
2879 writer_->AddNumber(GetNodeId(edge->to()));
2880 }
2881
2882
SerializeNode(HeapEntry * entry)2883 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2884 writer_->AddCharacter('\n');
2885 writer_->AddCharacter(',');
2886 writer_->AddNumber(entry->type());
2887 writer_->AddCharacter(',');
2888 writer_->AddNumber(GetStringId(entry->name()));
2889 writer_->AddCharacter(',');
2890 writer_->AddNumber(entry->id());
2891 writer_->AddCharacter(',');
2892 writer_->AddNumber(entry->self_size());
2893 writer_->AddCharacter(',');
2894 writer_->AddNumber(entry->RetainedSize(false));
2895 writer_->AddCharacter(',');
2896 writer_->AddNumber(GetNodeId(entry->dominator()));
2897 Vector<HeapGraphEdge> children = entry->children();
2898 writer_->AddCharacter(',');
2899 writer_->AddNumber(children.length());
2900 for (int i = 0; i < children.length(); ++i) {
2901 SerializeEdge(&children[i]);
2902 if (writer_->aborted()) return;
2903 }
2904 }
2905
2906
SerializeNodes()2907 void HeapSnapshotJSONSerializer::SerializeNodes() {
2908 // The first (zero) item of nodes array is an object describing node
2909 // serialization layout. We use a set of macros to improve
2910 // readability.
2911 #define JSON_A(s) "["s"]"
2912 #define JSON_O(s) "{"s"}"
2913 #define JSON_S(s) "\""s"\""
2914 writer_->AddString(JSON_O(
2915 JSON_S("fields") ":" JSON_A(
2916 JSON_S("type")
2917 "," JSON_S("name")
2918 "," JSON_S("id")
2919 "," JSON_S("self_size")
2920 "," JSON_S("retained_size")
2921 "," JSON_S("dominator")
2922 "," JSON_S("children_count")
2923 "," JSON_S("children"))
2924 "," JSON_S("types") ":" JSON_A(
2925 JSON_A(
2926 JSON_S("hidden")
2927 "," JSON_S("array")
2928 "," JSON_S("string")
2929 "," JSON_S("object")
2930 "," JSON_S("code")
2931 "," JSON_S("closure")
2932 "," JSON_S("regexp")
2933 "," JSON_S("number")
2934 "," JSON_S("native"))
2935 "," JSON_S("string")
2936 "," JSON_S("number")
2937 "," JSON_S("number")
2938 "," JSON_S("number")
2939 "," JSON_S("number")
2940 "," JSON_S("number")
2941 "," JSON_O(
2942 JSON_S("fields") ":" JSON_A(
2943 JSON_S("type")
2944 "," JSON_S("name_or_index")
2945 "," JSON_S("to_node"))
2946 "," JSON_S("types") ":" JSON_A(
2947 JSON_A(
2948 JSON_S("context")
2949 "," JSON_S("element")
2950 "," JSON_S("property")
2951 "," JSON_S("internal")
2952 "," JSON_S("hidden")
2953 "," JSON_S("shortcut"))
2954 "," JSON_S("string_or_number")
2955 "," JSON_S("node"))))));
2956 #undef JSON_S
2957 #undef JSON_O
2958 #undef JSON_A
2959
2960 const int node_fields_count = 7;
2961 // type,name,id,self_size,retained_size,dominator,children_count.
2962 const int edge_fields_count = 3; // type,name|index,to_node.
2963 List<HashMap::Entry*> sorted_nodes;
2964 SortHashMap(&nodes_, &sorted_nodes);
2965 // Rewrite node ids, so they refer to actual array positions.
2966 if (sorted_nodes.length() > 1) {
2967 // Nodes start from array index 1.
2968 int prev_value = 1;
2969 sorted_nodes[0]->value = reinterpret_cast<void*>(prev_value);
2970 for (int i = 1; i < sorted_nodes.length(); ++i) {
2971 HeapEntry* prev_heap_entry =
2972 reinterpret_cast<HeapEntry*>(sorted_nodes[i-1]->key);
2973 prev_value += node_fields_count +
2974 prev_heap_entry->children().length() * edge_fields_count;
2975 sorted_nodes[i]->value = reinterpret_cast<void*>(prev_value);
2976 }
2977 }
2978 for (int i = 0; i < sorted_nodes.length(); ++i) {
2979 SerializeNode(reinterpret_cast<HeapEntry*>(sorted_nodes[i]->key));
2980 if (writer_->aborted()) return;
2981 }
2982 }
2983
2984
SerializeSnapshot()2985 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2986 writer_->AddString("\"title\":\"");
2987 writer_->AddString(snapshot_->title());
2988 writer_->AddString("\"");
2989 writer_->AddString(",\"uid\":");
2990 writer_->AddNumber(snapshot_->uid());
2991 }
2992
2993
WriteUChar(OutputStreamWriter * w,unibrow::uchar u)2994 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2995 static const char hex_chars[] = "0123456789ABCDEF";
2996 w->AddString("\\u");
2997 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2998 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2999 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
3000 w->AddCharacter(hex_chars[u & 0xf]);
3001 }
3002
SerializeString(const unsigned char * s)3003 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3004 writer_->AddCharacter('\n');
3005 writer_->AddCharacter('\"');
3006 for ( ; *s != '\0'; ++s) {
3007 switch (*s) {
3008 case '\b':
3009 writer_->AddString("\\b");
3010 continue;
3011 case '\f':
3012 writer_->AddString("\\f");
3013 continue;
3014 case '\n':
3015 writer_->AddString("\\n");
3016 continue;
3017 case '\r':
3018 writer_->AddString("\\r");
3019 continue;
3020 case '\t':
3021 writer_->AddString("\\t");
3022 continue;
3023 case '\"':
3024 case '\\':
3025 writer_->AddCharacter('\\');
3026 writer_->AddCharacter(*s);
3027 continue;
3028 default:
3029 if (*s > 31 && *s < 128) {
3030 writer_->AddCharacter(*s);
3031 } else if (*s <= 31) {
3032 // Special character with no dedicated literal.
3033 WriteUChar(writer_, *s);
3034 } else {
3035 // Convert UTF-8 into \u UTF-16 literal.
3036 unsigned length = 1, cursor = 0;
3037 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3038 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3039 if (c != unibrow::Utf8::kBadChar) {
3040 WriteUChar(writer_, c);
3041 ASSERT(cursor != 0);
3042 s += cursor - 1;
3043 } else {
3044 writer_->AddCharacter('?');
3045 }
3046 }
3047 }
3048 }
3049 writer_->AddCharacter('\"');
3050 }
3051
3052
SerializeStrings()3053 void HeapSnapshotJSONSerializer::SerializeStrings() {
3054 List<HashMap::Entry*> sorted_strings;
3055 SortHashMap(&strings_, &sorted_strings);
3056 writer_->AddString("\"<dummy>\"");
3057 for (int i = 0; i < sorted_strings.length(); ++i) {
3058 writer_->AddCharacter(',');
3059 SerializeString(
3060 reinterpret_cast<const unsigned char*>(sorted_strings[i]->key));
3061 if (writer_->aborted()) return;
3062 }
3063 }
3064
3065
3066 template<typename T>
SortUsingEntryValue(const T * x,const T * y)3067 inline static int SortUsingEntryValue(const T* x, const T* y) {
3068 uintptr_t x_uint = reinterpret_cast<uintptr_t>((*x)->value);
3069 uintptr_t y_uint = reinterpret_cast<uintptr_t>((*y)->value);
3070 if (x_uint > y_uint) {
3071 return 1;
3072 } else if (x_uint == y_uint) {
3073 return 0;
3074 } else {
3075 return -1;
3076 }
3077 }
3078
3079
SortHashMap(HashMap * map,List<HashMap::Entry * > * sorted_entries)3080 void HeapSnapshotJSONSerializer::SortHashMap(
3081 HashMap* map, List<HashMap::Entry*>* sorted_entries) {
3082 for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p))
3083 sorted_entries->Add(p);
3084 sorted_entries->Sort(SortUsingEntryValue);
3085 }
3086
3087
GetConstructorNameForHeapProfile(JSObject * object)3088 String* GetConstructorNameForHeapProfile(JSObject* object) {
3089 if (object->IsJSFunction()) return HEAP->closure_symbol();
3090 return object->constructor_name();
3091 }
3092
3093 } } // namespace v8::internal
3094
3095 #endif // ENABLE_LOGGING_AND_PROFILING
3096