1 // Copyright 2015 the V8 project authors. All rights reserved.
2 //
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5
6 #include "src/heap/object-stats.h"
7
8 #include <unordered_set>
9
10 #include "src/base/bits.h"
11 #include "src/codegen/assembler-inl.h"
12 #include "src/codegen/compilation-cache.h"
13 #include "src/common/globals.h"
14 #include "src/execution/isolate.h"
15 #include "src/heap/combined-heap.h"
16 #include "src/heap/heap-inl.h"
17 #include "src/heap/mark-compact.h"
18 #include "src/logging/counters.h"
19 #include "src/objects/compilation-cache-table-inl.h"
20 #include "src/objects/heap-object.h"
21 #include "src/objects/js-array-inl.h"
22 #include "src/objects/js-collection-inl.h"
23 #include "src/objects/literal-objects-inl.h"
24 #include "src/objects/slots.h"
25 #include "src/objects/templates.h"
26 #include "src/utils/memcopy.h"
27 #include "src/utils/ostreams.h"
28
29 namespace v8 {
30 namespace internal {
31
32 static base::LazyMutex object_stats_mutex = LAZY_MUTEX_INITIALIZER;
33
34 class FieldStatsCollector : public ObjectVisitor {
35 public:
FieldStatsCollector(size_t * tagged_fields_count,size_t * embedder_fields_count,size_t * inobject_smi_fields_count,size_t * unboxed_double_fields_count,size_t * boxed_double_fields_count,size_t * string_data_count,size_t * raw_fields_count)36 FieldStatsCollector(size_t* tagged_fields_count,
37 size_t* embedder_fields_count,
38 size_t* inobject_smi_fields_count,
39 size_t* unboxed_double_fields_count,
40 size_t* boxed_double_fields_count,
41 size_t* string_data_count, size_t* raw_fields_count)
42 : tagged_fields_count_(tagged_fields_count),
43 embedder_fields_count_(embedder_fields_count),
44 inobject_smi_fields_count_(inobject_smi_fields_count),
45 unboxed_double_fields_count_(unboxed_double_fields_count),
46 boxed_double_fields_count_(boxed_double_fields_count),
47 string_data_count_(string_data_count),
48 raw_fields_count_(raw_fields_count) {}
49
RecordStats(HeapObject host)50 void RecordStats(HeapObject host) {
51 size_t old_pointer_fields_count = *tagged_fields_count_;
52 host.Iterate(this);
53 size_t tagged_fields_count_in_object =
54 *tagged_fields_count_ - old_pointer_fields_count;
55
56 int object_size_in_words = host.Size() / kTaggedSize;
57 DCHECK_LE(tagged_fields_count_in_object, object_size_in_words);
58 size_t raw_fields_count_in_object =
59 object_size_in_words - tagged_fields_count_in_object;
60
61 if (host.IsJSObject()) {
62 JSObjectFieldStats field_stats = GetInobjectFieldStats(host.map());
63 // Embedder fields are already included into pointer words.
64 DCHECK_LE(field_stats.embedded_fields_count_,
65 tagged_fields_count_in_object);
66 tagged_fields_count_in_object -= field_stats.embedded_fields_count_;
67 *tagged_fields_count_ -= field_stats.embedded_fields_count_;
68 *embedder_fields_count_ += field_stats.embedded_fields_count_;
69
70 // Smi fields are also included into pointer words.
71 DCHECK_LE(
72 field_stats.unboxed_double_fields_count_ * kDoubleSize / kTaggedSize,
73 raw_fields_count_in_object);
74 tagged_fields_count_in_object -= field_stats.smi_fields_count_;
75 *tagged_fields_count_ -= field_stats.smi_fields_count_;
76 *inobject_smi_fields_count_ += field_stats.smi_fields_count_;
77
78 // The rest are data words.
79 DCHECK_LE(
80 field_stats.unboxed_double_fields_count_ * kDoubleSize / kTaggedSize,
81 raw_fields_count_in_object);
82 raw_fields_count_in_object -=
83 field_stats.unboxed_double_fields_count_ * kDoubleSize / kTaggedSize;
84 *unboxed_double_fields_count_ += field_stats.unboxed_double_fields_count_;
85 } else if (host.IsHeapNumber()) {
86 DCHECK_LE(kDoubleSize / kTaggedSize, raw_fields_count_in_object);
87 raw_fields_count_in_object -= kDoubleSize / kTaggedSize;
88 *boxed_double_fields_count_ += 1;
89 } else if (host.IsSeqString()) {
90 int string_data = SeqString::cast(host).synchronized_length() *
91 (String::cast(host).IsOneByteRepresentation() ? 1 : 2) /
92 kTaggedSize;
93 DCHECK_LE(string_data, raw_fields_count_in_object);
94 raw_fields_count_in_object -= string_data;
95 *string_data_count_ += string_data;
96 }
97 *raw_fields_count_ += raw_fields_count_in_object;
98 }
99
VisitPointers(HeapObject host,ObjectSlot start,ObjectSlot end)100 void VisitPointers(HeapObject host, ObjectSlot start,
101 ObjectSlot end) override {
102 *tagged_fields_count_ += (end - start);
103 }
VisitPointers(HeapObject host,MaybeObjectSlot start,MaybeObjectSlot end)104 void VisitPointers(HeapObject host, MaybeObjectSlot start,
105 MaybeObjectSlot end) override {
106 *tagged_fields_count_ += (end - start);
107 }
108
VisitCodeTarget(Code host,RelocInfo * rinfo)109 void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
110 // Code target is most likely encoded as a relative 32-bit offset and not
111 // as a full tagged value, so there's nothing to count.
112 }
113
VisitEmbeddedPointer(Code host,RelocInfo * rinfo)114 void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
115 *tagged_fields_count_ += 1;
116 }
117
118 private:
119 struct JSObjectFieldStats {
JSObjectFieldStatsv8::internal::FieldStatsCollector::JSObjectFieldStats120 JSObjectFieldStats()
121 : embedded_fields_count_(0),
122 smi_fields_count_(0),
123 unboxed_double_fields_count_(0) {}
124
125 unsigned embedded_fields_count_ : kDescriptorIndexBitCount;
126 unsigned smi_fields_count_ : kDescriptorIndexBitCount;
127 unsigned unboxed_double_fields_count_ : kDescriptorIndexBitCount;
128 };
129 std::unordered_map<Map, JSObjectFieldStats, Object::Hasher>
130 field_stats_cache_;
131
132 JSObjectFieldStats GetInobjectFieldStats(Map map);
133
134 size_t* const tagged_fields_count_;
135 size_t* const embedder_fields_count_;
136 size_t* const inobject_smi_fields_count_;
137 size_t* const unboxed_double_fields_count_;
138 size_t* const boxed_double_fields_count_;
139 size_t* const string_data_count_;
140 size_t* const raw_fields_count_;
141 };
142
143 FieldStatsCollector::JSObjectFieldStats
GetInobjectFieldStats(Map map)144 FieldStatsCollector::GetInobjectFieldStats(Map map) {
145 auto iter = field_stats_cache_.find(map);
146 if (iter != field_stats_cache_.end()) {
147 return iter->second;
148 }
149 // Iterate descriptor array and calculate stats.
150 JSObjectFieldStats stats;
151 stats.embedded_fields_count_ = JSObject::GetEmbedderFieldCount(map);
152 if (!map.is_dictionary_map()) {
153 DescriptorArray descriptors = map.instance_descriptors(kRelaxedLoad);
154 for (InternalIndex descriptor : map.IterateOwnDescriptors()) {
155 PropertyDetails details = descriptors.GetDetails(descriptor);
156 if (details.location() == kField) {
157 FieldIndex index = FieldIndex::ForDescriptor(map, descriptor);
158 // Stop on first out-of-object field.
159 if (!index.is_inobject()) break;
160 if (details.representation().IsDouble() &&
161 map.IsUnboxedDoubleField(index)) {
162 ++stats.unboxed_double_fields_count_;
163 }
164 if (details.representation().IsSmi()) {
165 ++stats.smi_fields_count_;
166 }
167 }
168 }
169 }
170 field_stats_cache_.insert(std::make_pair(map, stats));
171 return stats;
172 }
173
ClearObjectStats(bool clear_last_time_stats)174 void ObjectStats::ClearObjectStats(bool clear_last_time_stats) {
175 memset(object_counts_, 0, sizeof(object_counts_));
176 memset(object_sizes_, 0, sizeof(object_sizes_));
177 memset(over_allocated_, 0, sizeof(over_allocated_));
178 memset(size_histogram_, 0, sizeof(size_histogram_));
179 memset(over_allocated_histogram_, 0, sizeof(over_allocated_histogram_));
180 if (clear_last_time_stats) {
181 memset(object_counts_last_time_, 0, sizeof(object_counts_last_time_));
182 memset(object_sizes_last_time_, 0, sizeof(object_sizes_last_time_));
183 }
184 tagged_fields_count_ = 0;
185 embedder_fields_count_ = 0;
186 inobject_smi_fields_count_ = 0;
187 unboxed_double_fields_count_ = 0;
188 boxed_double_fields_count_ = 0;
189 string_data_count_ = 0;
190 raw_fields_count_ = 0;
191 }
192
193 // Tell the compiler to never inline this: occasionally, the optimizer will
194 // decide to inline this and unroll the loop, making the compiled code more than
195 // 100KB larger.
PrintJSONArray(size_t * array,const int len)196 V8_NOINLINE static void PrintJSONArray(size_t* array, const int len) {
197 PrintF("[ ");
198 for (int i = 0; i < len; i++) {
199 PrintF("%zu", array[i]);
200 if (i != (len - 1)) PrintF(", ");
201 }
202 PrintF(" ]");
203 }
204
DumpJSONArray(std::stringstream & stream,size_t * array,const int len)205 V8_NOINLINE static void DumpJSONArray(std::stringstream& stream, size_t* array,
206 const int len) {
207 stream << PrintCollection(Vector<size_t>(array, len));
208 }
209
PrintKeyAndId(const char * key,int gc_count)210 void ObjectStats::PrintKeyAndId(const char* key, int gc_count) {
211 PrintF("\"isolate\": \"%p\", \"id\": %d, \"key\": \"%s\", ",
212 reinterpret_cast<void*>(isolate()), gc_count, key);
213 }
214
PrintInstanceTypeJSON(const char * key,int gc_count,const char * name,int index)215 void ObjectStats::PrintInstanceTypeJSON(const char* key, int gc_count,
216 const char* name, int index) {
217 PrintF("{ ");
218 PrintKeyAndId(key, gc_count);
219 PrintF("\"type\": \"instance_type_data\", ");
220 PrintF("\"instance_type\": %d, ", index);
221 PrintF("\"instance_type_name\": \"%s\", ", name);
222 PrintF("\"overall\": %zu, ", object_sizes_[index]);
223 PrintF("\"count\": %zu, ", object_counts_[index]);
224 PrintF("\"over_allocated\": %zu, ", over_allocated_[index]);
225 PrintF("\"histogram\": ");
226 PrintJSONArray(size_histogram_[index], kNumberOfBuckets);
227 PrintF(",");
228 PrintF("\"over_allocated_histogram\": ");
229 PrintJSONArray(over_allocated_histogram_[index], kNumberOfBuckets);
230 PrintF(" }\n");
231 }
232
PrintJSON(const char * key)233 void ObjectStats::PrintJSON(const char* key) {
234 double time = isolate()->time_millis_since_init();
235 int gc_count = heap()->gc_count();
236
237 // gc_descriptor
238 PrintF("{ ");
239 PrintKeyAndId(key, gc_count);
240 PrintF("\"type\": \"gc_descriptor\", \"time\": %f }\n", time);
241 // field_data
242 PrintF("{ ");
243 PrintKeyAndId(key, gc_count);
244 PrintF("\"type\": \"field_data\"");
245 PrintF(", \"tagged_fields\": %zu", tagged_fields_count_ * kTaggedSize);
246 PrintF(", \"embedder_fields\": %zu",
247 embedder_fields_count_ * kEmbedderDataSlotSize);
248 PrintF(", \"inobject_smi_fields\": %zu",
249 inobject_smi_fields_count_ * kTaggedSize);
250 PrintF(", \"unboxed_double_fields\": %zu",
251 unboxed_double_fields_count_ * kDoubleSize);
252 PrintF(", \"boxed_double_fields\": %zu",
253 boxed_double_fields_count_ * kDoubleSize);
254 PrintF(", \"string_data\": %zu", string_data_count_ * kTaggedSize);
255 PrintF(", \"other_raw_fields\": %zu", raw_fields_count_ * kSystemPointerSize);
256 PrintF(" }\n");
257 // bucket_sizes
258 PrintF("{ ");
259 PrintKeyAndId(key, gc_count);
260 PrintF("\"type\": \"bucket_sizes\", \"sizes\": [ ");
261 for (int i = 0; i < kNumberOfBuckets; i++) {
262 PrintF("%d", 1 << (kFirstBucketShift + i));
263 if (i != (kNumberOfBuckets - 1)) PrintF(", ");
264 }
265 PrintF(" ] }\n");
266
267 #define INSTANCE_TYPE_WRAPPER(name) \
268 PrintInstanceTypeJSON(key, gc_count, #name, name);
269
270 #define VIRTUAL_INSTANCE_TYPE_WRAPPER(name) \
271 PrintInstanceTypeJSON(key, gc_count, #name, FIRST_VIRTUAL_TYPE + name);
272
273 INSTANCE_TYPE_LIST(INSTANCE_TYPE_WRAPPER)
274 VIRTUAL_INSTANCE_TYPE_LIST(VIRTUAL_INSTANCE_TYPE_WRAPPER)
275
276 #undef INSTANCE_TYPE_WRAPPER
277 #undef VIRTUAL_INSTANCE_TYPE_WRAPPER
278 }
279
DumpInstanceTypeData(std::stringstream & stream,const char * name,int index)280 void ObjectStats::DumpInstanceTypeData(std::stringstream& stream,
281 const char* name, int index) {
282 stream << "\"" << name << "\":{";
283 stream << "\"type\":" << static_cast<int>(index) << ",";
284 stream << "\"overall\":" << object_sizes_[index] << ",";
285 stream << "\"count\":" << object_counts_[index] << ",";
286 stream << "\"over_allocated\":" << over_allocated_[index] << ",";
287 stream << "\"histogram\":";
288 DumpJSONArray(stream, size_histogram_[index], kNumberOfBuckets);
289 stream << ",\"over_allocated_histogram\":";
290 DumpJSONArray(stream, over_allocated_histogram_[index], kNumberOfBuckets);
291 stream << "},";
292 }
293
Dump(std::stringstream & stream)294 void ObjectStats::Dump(std::stringstream& stream) {
295 double time = isolate()->time_millis_since_init();
296 int gc_count = heap()->gc_count();
297
298 stream << "{";
299 stream << "\"isolate\":\"" << reinterpret_cast<void*>(isolate()) << "\",";
300 stream << "\"id\":" << gc_count << ",";
301 stream << "\"time\":" << time << ",";
302
303 // field_data
304 stream << "\"field_data\":{";
305 stream << "\"tagged_fields\":" << (tagged_fields_count_ * kTaggedSize);
306 stream << ",\"embedder_fields\":"
307 << (embedder_fields_count_ * kEmbedderDataSlotSize);
308 stream << ",\"inobject_smi_fields\": "
309 << (inobject_smi_fields_count_ * kTaggedSize);
310 stream << ",\"unboxed_double_fields\": "
311 << (unboxed_double_fields_count_ * kDoubleSize);
312 stream << ",\"boxed_double_fields\": "
313 << (boxed_double_fields_count_ * kDoubleSize);
314 stream << ",\"string_data\": " << (string_data_count_ * kTaggedSize);
315 stream << ",\"other_raw_fields\":"
316 << (raw_fields_count_ * kSystemPointerSize);
317 stream << "}, ";
318
319 stream << "\"bucket_sizes\":[";
320 for (int i = 0; i < kNumberOfBuckets; i++) {
321 stream << (1 << (kFirstBucketShift + i));
322 if (i != (kNumberOfBuckets - 1)) stream << ",";
323 }
324 stream << "],";
325 stream << "\"type_data\":{";
326
327 #define INSTANCE_TYPE_WRAPPER(name) DumpInstanceTypeData(stream, #name, name);
328
329 #define VIRTUAL_INSTANCE_TYPE_WRAPPER(name) \
330 DumpInstanceTypeData(stream, #name, FIRST_VIRTUAL_TYPE + name);
331
332 INSTANCE_TYPE_LIST(INSTANCE_TYPE_WRAPPER);
333 VIRTUAL_INSTANCE_TYPE_LIST(VIRTUAL_INSTANCE_TYPE_WRAPPER)
334 stream << "\"END\":{}}}";
335
336 #undef INSTANCE_TYPE_WRAPPER
337 #undef VIRTUAL_INSTANCE_TYPE_WRAPPER
338 }
339
CheckpointObjectStats()340 void ObjectStats::CheckpointObjectStats() {
341 base::MutexGuard lock_guard(object_stats_mutex.Pointer());
342 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
343 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
344 ClearObjectStats();
345 }
346
347 namespace {
348
Log2ForSize(size_t size)349 int Log2ForSize(size_t size) {
350 DCHECK_GT(size, 0);
351 return kSizetSize * 8 - 1 - base::bits::CountLeadingZeros(size);
352 }
353
354 } // namespace
355
HistogramIndexFromSize(size_t size)356 int ObjectStats::HistogramIndexFromSize(size_t size) {
357 if (size == 0) return 0;
358 return Min(Max(Log2ForSize(size) + 1 - kFirstBucketShift, 0),
359 kLastValueBucketIndex);
360 }
361
RecordObjectStats(InstanceType type,size_t size,size_t over_allocated)362 void ObjectStats::RecordObjectStats(InstanceType type, size_t size,
363 size_t over_allocated) {
364 DCHECK_LE(type, LAST_TYPE);
365 object_counts_[type]++;
366 object_sizes_[type] += size;
367 size_histogram_[type][HistogramIndexFromSize(size)]++;
368 over_allocated_[type] += over_allocated;
369 over_allocated_histogram_[type][HistogramIndexFromSize(size)]++;
370 }
371
RecordVirtualObjectStats(VirtualInstanceType type,size_t size,size_t over_allocated)372 void ObjectStats::RecordVirtualObjectStats(VirtualInstanceType type,
373 size_t size, size_t over_allocated) {
374 DCHECK_LE(type, LAST_VIRTUAL_TYPE);
375 object_counts_[FIRST_VIRTUAL_TYPE + type]++;
376 object_sizes_[FIRST_VIRTUAL_TYPE + type] += size;
377 size_histogram_[FIRST_VIRTUAL_TYPE + type][HistogramIndexFromSize(size)]++;
378 over_allocated_[FIRST_VIRTUAL_TYPE + type] += over_allocated;
379 over_allocated_histogram_[FIRST_VIRTUAL_TYPE + type]
380 [HistogramIndexFromSize(size)]++;
381 }
382
isolate()383 Isolate* ObjectStats::isolate() { return heap()->isolate(); }
384
385 class ObjectStatsCollectorImpl {
386 public:
387 enum Phase {
388 kPhase1,
389 kPhase2,
390 };
391 static const int kNumberOfPhases = kPhase2 + 1;
392
393 ObjectStatsCollectorImpl(Heap* heap, ObjectStats* stats);
394
395 void CollectGlobalStatistics();
396
397 enum class CollectFieldStats { kNo, kYes };
398 void CollectStatistics(HeapObject obj, Phase phase,
399 CollectFieldStats collect_field_stats);
400
401 private:
402 enum CowMode {
403 kCheckCow,
404 kIgnoreCow,
405 };
406
isolate()407 Isolate* isolate() { return heap_->isolate(); }
408
409 bool RecordVirtualObjectStats(HeapObject parent, HeapObject obj,
410 ObjectStats::VirtualInstanceType type,
411 size_t size, size_t over_allocated,
412 CowMode check_cow_array = kCheckCow);
413 void RecordExternalResourceStats(Address resource,
414 ObjectStats::VirtualInstanceType type,
415 size_t size);
416 // Gets size from |ob| and assumes no over allocating.
417 bool RecordSimpleVirtualObjectStats(HeapObject parent, HeapObject obj,
418 ObjectStats::VirtualInstanceType type);
419 // For HashTable it is possible to compute over allocated memory.
420 template <typename Derived, typename Shape>
421 void RecordHashTableVirtualObjectStats(HeapObject parent,
422 HashTable<Derived, Shape> hash_table,
423 ObjectStats::VirtualInstanceType type);
424
425 bool SameLiveness(HeapObject obj1, HeapObject obj2);
426 bool CanRecordFixedArray(FixedArrayBase array);
427 bool IsCowArray(FixedArrayBase array);
428
429 // Blocklist for objects that should not be recorded using
430 // VirtualObjectStats and RecordSimpleVirtualObjectStats. For recording those
431 // objects dispatch to the low level ObjectStats::RecordObjectStats manually.
432 bool ShouldRecordObject(HeapObject object, CowMode check_cow_array);
433
434 void RecordObjectStats(
435 HeapObject obj, InstanceType type, size_t size,
436 size_t over_allocated = ObjectStats::kNoOverAllocation);
437
438 // Specific recursion into constant pool or embedded code objects. Records
439 // FixedArrays and Tuple2.
440 void RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
441 HeapObject parent, HeapObject object,
442 ObjectStats::VirtualInstanceType type);
443
444 // Details.
445 void RecordVirtualAllocationSiteDetails(AllocationSite site);
446 void RecordVirtualBytecodeArrayDetails(BytecodeArray bytecode);
447 void RecordVirtualCodeDetails(Code code);
448 void RecordVirtualContext(Context context);
449 void RecordVirtualFeedbackVectorDetails(FeedbackVector vector);
450 void RecordVirtualFixedArrayDetails(FixedArray array);
451 void RecordVirtualFunctionTemplateInfoDetails(FunctionTemplateInfo fti);
452 void RecordVirtualJSGlobalObjectDetails(JSGlobalObject object);
453 void RecordVirtualJSObjectDetails(JSObject object);
454 void RecordVirtualMapDetails(Map map);
455 void RecordVirtualScriptDetails(Script script);
456 void RecordVirtualExternalStringDetails(ExternalString script);
457 void RecordVirtualSharedFunctionInfoDetails(SharedFunctionInfo info);
458
459 void RecordVirtualArrayBoilerplateDescription(
460 ArrayBoilerplateDescription description);
461 Heap* heap_;
462 ObjectStats* stats_;
463 MarkCompactCollector::NonAtomicMarkingState* marking_state_;
464 std::unordered_set<HeapObject, Object::Hasher> virtual_objects_;
465 std::unordered_set<Address> external_resources_;
466 FieldStatsCollector field_stats_collector_;
467 };
468
ObjectStatsCollectorImpl(Heap * heap,ObjectStats * stats)469 ObjectStatsCollectorImpl::ObjectStatsCollectorImpl(Heap* heap,
470 ObjectStats* stats)
471 : heap_(heap),
472 stats_(stats),
473 marking_state_(
474 heap->mark_compact_collector()->non_atomic_marking_state()),
475 field_stats_collector_(
476 &stats->tagged_fields_count_, &stats->embedder_fields_count_,
477 &stats->inobject_smi_fields_count_,
478 &stats->unboxed_double_fields_count_,
479 &stats->boxed_double_fields_count_, &stats->string_data_count_,
480 &stats->raw_fields_count_) {}
481
ShouldRecordObject(HeapObject obj,CowMode check_cow_array)482 bool ObjectStatsCollectorImpl::ShouldRecordObject(HeapObject obj,
483 CowMode check_cow_array) {
484 if (obj.IsFixedArrayExact()) {
485 FixedArray fixed_array = FixedArray::cast(obj);
486 bool cow_check = check_cow_array == kIgnoreCow || !IsCowArray(fixed_array);
487 return CanRecordFixedArray(fixed_array) && cow_check;
488 }
489 if (obj == ReadOnlyRoots(heap_).empty_property_array()) return false;
490 return true;
491 }
492
493 template <typename Derived, typename Shape>
RecordHashTableVirtualObjectStats(HeapObject parent,HashTable<Derived,Shape> hash_table,ObjectStats::VirtualInstanceType type)494 void ObjectStatsCollectorImpl::RecordHashTableVirtualObjectStats(
495 HeapObject parent, HashTable<Derived, Shape> hash_table,
496 ObjectStats::VirtualInstanceType type) {
497 size_t over_allocated =
498 (hash_table.Capacity() -
499 (hash_table.NumberOfElements() + hash_table.NumberOfDeletedElements())) *
500 HashTable<Derived, Shape>::kEntrySize * kTaggedSize;
501 RecordVirtualObjectStats(parent, hash_table, type, hash_table.Size(),
502 over_allocated);
503 }
504
RecordSimpleVirtualObjectStats(HeapObject parent,HeapObject obj,ObjectStats::VirtualInstanceType type)505 bool ObjectStatsCollectorImpl::RecordSimpleVirtualObjectStats(
506 HeapObject parent, HeapObject obj, ObjectStats::VirtualInstanceType type) {
507 return RecordVirtualObjectStats(parent, obj, type, obj.Size(),
508 ObjectStats::kNoOverAllocation, kCheckCow);
509 }
510
RecordVirtualObjectStats(HeapObject parent,HeapObject obj,ObjectStats::VirtualInstanceType type,size_t size,size_t over_allocated,CowMode check_cow_array)511 bool ObjectStatsCollectorImpl::RecordVirtualObjectStats(
512 HeapObject parent, HeapObject obj, ObjectStats::VirtualInstanceType type,
513 size_t size, size_t over_allocated, CowMode check_cow_array) {
514 CHECK_LT(over_allocated, size);
515 if (!SameLiveness(parent, obj) || !ShouldRecordObject(obj, check_cow_array)) {
516 return false;
517 }
518
519 if (virtual_objects_.find(obj) == virtual_objects_.end()) {
520 virtual_objects_.insert(obj);
521 stats_->RecordVirtualObjectStats(type, size, over_allocated);
522 return true;
523 }
524 return false;
525 }
526
RecordExternalResourceStats(Address resource,ObjectStats::VirtualInstanceType type,size_t size)527 void ObjectStatsCollectorImpl::RecordExternalResourceStats(
528 Address resource, ObjectStats::VirtualInstanceType type, size_t size) {
529 if (external_resources_.find(resource) == external_resources_.end()) {
530 external_resources_.insert(resource);
531 stats_->RecordVirtualObjectStats(type, size, 0);
532 }
533 }
534
RecordVirtualAllocationSiteDetails(AllocationSite site)535 void ObjectStatsCollectorImpl::RecordVirtualAllocationSiteDetails(
536 AllocationSite site) {
537 if (!site.PointsToLiteral()) return;
538 JSObject boilerplate = site.boilerplate();
539 if (boilerplate.IsJSArray()) {
540 RecordSimpleVirtualObjectStats(site, boilerplate,
541 ObjectStats::JS_ARRAY_BOILERPLATE_TYPE);
542 // Array boilerplates cannot have properties.
543 } else {
544 RecordVirtualObjectStats(
545 site, boilerplate, ObjectStats::JS_OBJECT_BOILERPLATE_TYPE,
546 boilerplate.Size(), ObjectStats::kNoOverAllocation);
547 if (boilerplate.HasFastProperties()) {
548 // We'll mis-classify the empty_property_array here. Given that there is a
549 // single instance, this is negligible.
550 PropertyArray properties = boilerplate.property_array();
551 RecordSimpleVirtualObjectStats(
552 site, properties, ObjectStats::BOILERPLATE_PROPERTY_ARRAY_TYPE);
553 } else {
554 NameDictionary properties = boilerplate.property_dictionary();
555 RecordSimpleVirtualObjectStats(
556 site, properties, ObjectStats::BOILERPLATE_PROPERTY_DICTIONARY_TYPE);
557 }
558 }
559 FixedArrayBase elements = boilerplate.elements();
560 RecordSimpleVirtualObjectStats(site, elements,
561 ObjectStats::BOILERPLATE_ELEMENTS_TYPE);
562 }
563
RecordVirtualFunctionTemplateInfoDetails(FunctionTemplateInfo fti)564 void ObjectStatsCollectorImpl::RecordVirtualFunctionTemplateInfoDetails(
565 FunctionTemplateInfo fti) {
566 // named_property_handler and indexed_property_handler are recorded as
567 // INTERCEPTOR_INFO_TYPE.
568 HeapObject call_code = fti.call_code(kAcquireLoad);
569 if (!call_code.IsUndefined(isolate())) {
570 RecordSimpleVirtualObjectStats(
571 fti, CallHandlerInfo::cast(call_code),
572 ObjectStats::FUNCTION_TEMPLATE_INFO_ENTRIES_TYPE);
573 }
574 if (!fti.GetInstanceCallHandler().IsUndefined(isolate())) {
575 RecordSimpleVirtualObjectStats(
576 fti, CallHandlerInfo::cast(fti.GetInstanceCallHandler()),
577 ObjectStats::FUNCTION_TEMPLATE_INFO_ENTRIES_TYPE);
578 }
579 }
580
RecordVirtualJSGlobalObjectDetails(JSGlobalObject object)581 void ObjectStatsCollectorImpl::RecordVirtualJSGlobalObjectDetails(
582 JSGlobalObject object) {
583 // Properties.
584 GlobalDictionary properties = object.global_dictionary();
585 RecordHashTableVirtualObjectStats(object, properties,
586 ObjectStats::GLOBAL_PROPERTIES_TYPE);
587 // Elements.
588 FixedArrayBase elements = object.elements();
589 RecordSimpleVirtualObjectStats(object, elements,
590 ObjectStats::GLOBAL_ELEMENTS_TYPE);
591 }
592
RecordVirtualJSObjectDetails(JSObject object)593 void ObjectStatsCollectorImpl::RecordVirtualJSObjectDetails(JSObject object) {
594 // JSGlobalObject is recorded separately.
595 if (object.IsJSGlobalObject()) return;
596
597 // Uncompiled JSFunction has a separate type.
598 if (object.IsJSFunction() && !JSFunction::cast(object).is_compiled()) {
599 RecordSimpleVirtualObjectStats(HeapObject(), object,
600 ObjectStats::JS_UNCOMPILED_FUNCTION_TYPE);
601 }
602
603 // Properties.
604 if (object.HasFastProperties()) {
605 PropertyArray properties = object.property_array();
606 if (properties != ReadOnlyRoots(heap_).empty_property_array()) {
607 size_t over_allocated = object.map().UnusedPropertyFields() * kTaggedSize;
608 RecordVirtualObjectStats(object, properties,
609 object.map().is_prototype_map()
610 ? ObjectStats::PROTOTYPE_PROPERTY_ARRAY_TYPE
611 : ObjectStats::OBJECT_PROPERTY_ARRAY_TYPE,
612 properties.Size(), over_allocated);
613 }
614 } else {
615 NameDictionary properties = object.property_dictionary();
616 RecordHashTableVirtualObjectStats(
617 object, properties,
618 object.map().is_prototype_map()
619 ? ObjectStats::PROTOTYPE_PROPERTY_DICTIONARY_TYPE
620 : ObjectStats::OBJECT_PROPERTY_DICTIONARY_TYPE);
621 }
622
623 // Elements.
624 FixedArrayBase elements = object.elements();
625 if (object.HasDictionaryElements()) {
626 RecordHashTableVirtualObjectStats(
627 object, NumberDictionary::cast(elements),
628 object.IsJSArray() ? ObjectStats::ARRAY_DICTIONARY_ELEMENTS_TYPE
629 : ObjectStats::OBJECT_DICTIONARY_ELEMENTS_TYPE);
630 } else if (object.IsJSArray()) {
631 if (elements != ReadOnlyRoots(heap_).empty_fixed_array()) {
632 size_t element_size =
633 (elements.Size() - FixedArrayBase::kHeaderSize) / elements.length();
634 uint32_t length = JSArray::cast(object).length().Number();
635 size_t over_allocated = (elements.length() - length) * element_size;
636 RecordVirtualObjectStats(object, elements,
637 ObjectStats::ARRAY_ELEMENTS_TYPE,
638 elements.Size(), over_allocated);
639 }
640 } else {
641 RecordSimpleVirtualObjectStats(object, elements,
642 ObjectStats::OBJECT_ELEMENTS_TYPE);
643 }
644
645 // JSCollections.
646 if (object.IsJSCollection()) {
647 // TODO(bmeurer): Properly compute over-allocation here.
648 RecordSimpleVirtualObjectStats(
649 object, FixedArray::cast(JSCollection::cast(object).table()),
650 ObjectStats::JS_COLLECTION_TABLE_TYPE);
651 }
652 }
653
GetFeedbackSlotType(MaybeObject maybe_obj,FeedbackSlotKind kind,Isolate * isolate)654 static ObjectStats::VirtualInstanceType GetFeedbackSlotType(
655 MaybeObject maybe_obj, FeedbackSlotKind kind, Isolate* isolate) {
656 if (maybe_obj->IsCleared())
657 return ObjectStats::FEEDBACK_VECTOR_SLOT_OTHER_TYPE;
658 Object obj = maybe_obj->GetHeapObjectOrSmi();
659 switch (kind) {
660 case FeedbackSlotKind::kCall:
661 if (obj == *isolate->factory()->uninitialized_symbol()) {
662 return ObjectStats::FEEDBACK_VECTOR_SLOT_CALL_UNUSED_TYPE;
663 }
664 return ObjectStats::FEEDBACK_VECTOR_SLOT_CALL_TYPE;
665
666 case FeedbackSlotKind::kLoadProperty:
667 case FeedbackSlotKind::kLoadGlobalInsideTypeof:
668 case FeedbackSlotKind::kLoadGlobalNotInsideTypeof:
669 case FeedbackSlotKind::kLoadKeyed:
670 case FeedbackSlotKind::kHasKeyed:
671 if (obj == *isolate->factory()->uninitialized_symbol()) {
672 return ObjectStats::FEEDBACK_VECTOR_SLOT_LOAD_UNUSED_TYPE;
673 }
674 return ObjectStats::FEEDBACK_VECTOR_SLOT_LOAD_TYPE;
675
676 case FeedbackSlotKind::kStoreNamedSloppy:
677 case FeedbackSlotKind::kStoreNamedStrict:
678 case FeedbackSlotKind::kStoreOwnNamed:
679 case FeedbackSlotKind::kStoreGlobalSloppy:
680 case FeedbackSlotKind::kStoreGlobalStrict:
681 case FeedbackSlotKind::kStoreKeyedSloppy:
682 case FeedbackSlotKind::kStoreKeyedStrict:
683 if (obj == *isolate->factory()->uninitialized_symbol()) {
684 return ObjectStats::FEEDBACK_VECTOR_SLOT_STORE_UNUSED_TYPE;
685 }
686 return ObjectStats::FEEDBACK_VECTOR_SLOT_STORE_TYPE;
687
688 case FeedbackSlotKind::kBinaryOp:
689 case FeedbackSlotKind::kCompareOp:
690 return ObjectStats::FEEDBACK_VECTOR_SLOT_ENUM_TYPE;
691
692 default:
693 return ObjectStats::FEEDBACK_VECTOR_SLOT_OTHER_TYPE;
694 }
695 }
696
RecordVirtualFeedbackVectorDetails(FeedbackVector vector)697 void ObjectStatsCollectorImpl::RecordVirtualFeedbackVectorDetails(
698 FeedbackVector vector) {
699 if (virtual_objects_.find(vector) != virtual_objects_.end()) return;
700 // Manually insert the feedback vector into the virtual object list, since
701 // we're logging its component parts separately.
702 virtual_objects_.insert(vector);
703
704 size_t calculated_size = 0;
705
706 // Log the feedback vector's header (fixed fields).
707 size_t header_size = vector.slots_start().address() - vector.address();
708 stats_->RecordVirtualObjectStats(ObjectStats::FEEDBACK_VECTOR_HEADER_TYPE,
709 header_size, ObjectStats::kNoOverAllocation);
710 calculated_size += header_size;
711
712 // Iterate over the feedback slots and log each one.
713 if (!vector.shared_function_info().HasFeedbackMetadata()) return;
714
715 FeedbackMetadataIterator it(vector.metadata());
716 while (it.HasNext()) {
717 FeedbackSlot slot = it.Next();
718 // Log the entry (or entries) taken up by this slot.
719 size_t slot_size = it.entry_size() * kTaggedSize;
720 stats_->RecordVirtualObjectStats(
721 GetFeedbackSlotType(vector.Get(slot), it.kind(), heap_->isolate()),
722 slot_size, ObjectStats::kNoOverAllocation);
723 calculated_size += slot_size;
724
725 // Log the monomorphic/polymorphic helper objects that this slot owns.
726 for (int i = 0; i < it.entry_size(); i++) {
727 MaybeObject raw_object = vector.Get(slot.WithOffset(i));
728 HeapObject object;
729 if (raw_object->GetHeapObject(&object)) {
730 if (object.IsCell() || object.IsWeakFixedArray()) {
731 RecordSimpleVirtualObjectStats(
732 vector, object, ObjectStats::FEEDBACK_VECTOR_ENTRY_TYPE);
733 }
734 }
735 }
736 }
737
738 CHECK_EQ(calculated_size, vector.Size());
739 }
740
RecordVirtualFixedArrayDetails(FixedArray array)741 void ObjectStatsCollectorImpl::RecordVirtualFixedArrayDetails(
742 FixedArray array) {
743 if (IsCowArray(array)) {
744 RecordVirtualObjectStats(HeapObject(), array, ObjectStats::COW_ARRAY_TYPE,
745 array.Size(), ObjectStats::kNoOverAllocation,
746 kIgnoreCow);
747 }
748 }
749
CollectStatistics(HeapObject obj,Phase phase,CollectFieldStats collect_field_stats)750 void ObjectStatsCollectorImpl::CollectStatistics(
751 HeapObject obj, Phase phase, CollectFieldStats collect_field_stats) {
752 Map map = obj.map();
753 switch (phase) {
754 case kPhase1:
755 if (obj.IsFeedbackVector()) {
756 RecordVirtualFeedbackVectorDetails(FeedbackVector::cast(obj));
757 } else if (obj.IsMap()) {
758 RecordVirtualMapDetails(Map::cast(obj));
759 } else if (obj.IsBytecodeArray()) {
760 RecordVirtualBytecodeArrayDetails(BytecodeArray::cast(obj));
761 } else if (obj.IsCode()) {
762 RecordVirtualCodeDetails(Code::cast(obj));
763 } else if (obj.IsFunctionTemplateInfo()) {
764 RecordVirtualFunctionTemplateInfoDetails(
765 FunctionTemplateInfo::cast(obj));
766 } else if (obj.IsJSGlobalObject()) {
767 RecordVirtualJSGlobalObjectDetails(JSGlobalObject::cast(obj));
768 } else if (obj.IsJSObject()) {
769 // This phase needs to come after RecordVirtualAllocationSiteDetails
770 // to properly split among boilerplates.
771 RecordVirtualJSObjectDetails(JSObject::cast(obj));
772 } else if (obj.IsSharedFunctionInfo()) {
773 RecordVirtualSharedFunctionInfoDetails(SharedFunctionInfo::cast(obj));
774 } else if (obj.IsContext()) {
775 RecordVirtualContext(Context::cast(obj));
776 } else if (obj.IsScript()) {
777 RecordVirtualScriptDetails(Script::cast(obj));
778 } else if (obj.IsArrayBoilerplateDescription()) {
779 RecordVirtualArrayBoilerplateDescription(
780 ArrayBoilerplateDescription::cast(obj));
781 } else if (obj.IsFixedArrayExact()) {
782 // Has to go last as it triggers too eagerly.
783 RecordVirtualFixedArrayDetails(FixedArray::cast(obj));
784 }
785 break;
786 case kPhase2:
787 if (obj.IsExternalString()) {
788 // This has to be in Phase2 to avoid conflicting with recording Script
789 // sources. We still want to run RecordObjectStats after though.
790 RecordVirtualExternalStringDetails(ExternalString::cast(obj));
791 }
792 size_t over_allocated = ObjectStats::kNoOverAllocation;
793 if (obj.IsJSObject()) {
794 over_allocated = map.instance_size() - map.UsedInstanceSize();
795 }
796 RecordObjectStats(obj, map.instance_type(), obj.Size(), over_allocated);
797 if (collect_field_stats == CollectFieldStats::kYes) {
798 field_stats_collector_.RecordStats(obj);
799 }
800 break;
801 }
802 }
803
CollectGlobalStatistics()804 void ObjectStatsCollectorImpl::CollectGlobalStatistics() {
805 // Iterate boilerplates first to disambiguate them from regular JS objects.
806 Object list = heap_->allocation_sites_list();
807 while (list.IsAllocationSite()) {
808 AllocationSite site = AllocationSite::cast(list);
809 RecordVirtualAllocationSiteDetails(site);
810 list = site.weak_next();
811 }
812
813 // FixedArray.
814 RecordSimpleVirtualObjectStats(HeapObject(), heap_->serialized_objects(),
815 ObjectStats::SERIALIZED_OBJECTS_TYPE);
816 RecordSimpleVirtualObjectStats(HeapObject(), heap_->number_string_cache(),
817 ObjectStats::NUMBER_STRING_CACHE_TYPE);
818 RecordSimpleVirtualObjectStats(
819 HeapObject(), heap_->single_character_string_cache(),
820 ObjectStats::SINGLE_CHARACTER_STRING_CACHE_TYPE);
821 RecordSimpleVirtualObjectStats(HeapObject(), heap_->string_split_cache(),
822 ObjectStats::STRING_SPLIT_CACHE_TYPE);
823 RecordSimpleVirtualObjectStats(HeapObject(), heap_->regexp_multiple_cache(),
824 ObjectStats::REGEXP_MULTIPLE_CACHE_TYPE);
825
826 // WeakArrayList.
827 RecordSimpleVirtualObjectStats(HeapObject(),
828 WeakArrayList::cast(heap_->script_list()),
829 ObjectStats::SCRIPT_LIST_TYPE);
830 }
831
RecordObjectStats(HeapObject obj,InstanceType type,size_t size,size_t over_allocated)832 void ObjectStatsCollectorImpl::RecordObjectStats(HeapObject obj,
833 InstanceType type, size_t size,
834 size_t over_allocated) {
835 if (virtual_objects_.find(obj) == virtual_objects_.end()) {
836 stats_->RecordObjectStats(type, size, over_allocated);
837 }
838 }
839
CanRecordFixedArray(FixedArrayBase array)840 bool ObjectStatsCollectorImpl::CanRecordFixedArray(FixedArrayBase array) {
841 ReadOnlyRoots roots(heap_);
842 return array != roots.empty_fixed_array() &&
843 array != roots.empty_slow_element_dictionary() &&
844 array != roots.empty_property_dictionary();
845 }
846
IsCowArray(FixedArrayBase array)847 bool ObjectStatsCollectorImpl::IsCowArray(FixedArrayBase array) {
848 return array.map() == ReadOnlyRoots(heap_).fixed_cow_array_map();
849 }
850
SameLiveness(HeapObject obj1,HeapObject obj2)851 bool ObjectStatsCollectorImpl::SameLiveness(HeapObject obj1, HeapObject obj2) {
852 return obj1.is_null() || obj2.is_null() ||
853 marking_state_->Color(obj1) == marking_state_->Color(obj2);
854 }
855
RecordVirtualMapDetails(Map map)856 void ObjectStatsCollectorImpl::RecordVirtualMapDetails(Map map) {
857 // TODO(mlippautz): map->dependent_code(): DEPENDENT_CODE_TYPE.
858
859 // For Map we want to distinguish between various different states
860 // to get a better picture of what's going on in MapSpace. This
861 // method computes the virtual instance type to use for a given map,
862 // using MAP_TYPE for regular maps that aren't special in any way.
863 if (map.is_prototype_map()) {
864 if (map.is_dictionary_map()) {
865 RecordSimpleVirtualObjectStats(
866 HeapObject(), map, ObjectStats::MAP_PROTOTYPE_DICTIONARY_TYPE);
867 } else if (map.is_abandoned_prototype_map()) {
868 RecordSimpleVirtualObjectStats(HeapObject(), map,
869 ObjectStats::MAP_ABANDONED_PROTOTYPE_TYPE);
870 } else {
871 RecordSimpleVirtualObjectStats(HeapObject(), map,
872 ObjectStats::MAP_PROTOTYPE_TYPE);
873 }
874 } else if (map.is_deprecated()) {
875 RecordSimpleVirtualObjectStats(HeapObject(), map,
876 ObjectStats::MAP_DEPRECATED_TYPE);
877 } else if (map.is_dictionary_map()) {
878 RecordSimpleVirtualObjectStats(HeapObject(), map,
879 ObjectStats::MAP_DICTIONARY_TYPE);
880 } else if (map.is_stable()) {
881 RecordSimpleVirtualObjectStats(HeapObject(), map,
882 ObjectStats::MAP_STABLE_TYPE);
883 } else {
884 // This will be logged as MAP_TYPE in Phase2.
885 }
886
887 DescriptorArray array = map.instance_descriptors(kRelaxedLoad);
888 if (map.owns_descriptors() &&
889 array != ReadOnlyRoots(heap_).empty_descriptor_array()) {
890 // Generally DescriptorArrays have their own instance type already
891 // (DESCRIPTOR_ARRAY_TYPE), but we'd like to be able to tell which
892 // of those are for (abandoned) prototypes, and which of those are
893 // owned by deprecated maps.
894 if (map.is_prototype_map()) {
895 RecordSimpleVirtualObjectStats(
896 map, array, ObjectStats::PROTOTYPE_DESCRIPTOR_ARRAY_TYPE);
897 } else if (map.is_deprecated()) {
898 RecordSimpleVirtualObjectStats(
899 map, array, ObjectStats::DEPRECATED_DESCRIPTOR_ARRAY_TYPE);
900 }
901
902 EnumCache enum_cache = array.enum_cache();
903 RecordSimpleVirtualObjectStats(array, enum_cache.keys(),
904 ObjectStats::ENUM_KEYS_CACHE_TYPE);
905 RecordSimpleVirtualObjectStats(array, enum_cache.indices(),
906 ObjectStats::ENUM_INDICES_CACHE_TYPE);
907 }
908
909 if (map.is_prototype_map()) {
910 if (map.prototype_info().IsPrototypeInfo()) {
911 PrototypeInfo info = PrototypeInfo::cast(map.prototype_info());
912 Object users = info.prototype_users();
913 if (users.IsWeakFixedArray()) {
914 RecordSimpleVirtualObjectStats(map, WeakArrayList::cast(users),
915 ObjectStats::PROTOTYPE_USERS_TYPE);
916 }
917 }
918 }
919 }
920
RecordVirtualScriptDetails(Script script)921 void ObjectStatsCollectorImpl::RecordVirtualScriptDetails(Script script) {
922 RecordSimpleVirtualObjectStats(
923 script, script.shared_function_infos(),
924 ObjectStats::SCRIPT_SHARED_FUNCTION_INFOS_TYPE);
925
926 // Log the size of external source code.
927 Object raw_source = script.source();
928 if (raw_source.IsExternalString()) {
929 // The contents of external strings aren't on the heap, so we have to record
930 // them manually. The on-heap String object is recorded indepentendely in
931 // the normal pass.
932 ExternalString string = ExternalString::cast(raw_source);
933 Address resource = string.resource_as_address();
934 size_t off_heap_size = string.ExternalPayloadSize();
935 RecordExternalResourceStats(
936 resource,
937 string.IsOneByteRepresentation()
938 ? ObjectStats::SCRIPT_SOURCE_EXTERNAL_ONE_BYTE_TYPE
939 : ObjectStats::SCRIPT_SOURCE_EXTERNAL_TWO_BYTE_TYPE,
940 off_heap_size);
941 } else if (raw_source.IsString()) {
942 String source = String::cast(raw_source);
943 RecordSimpleVirtualObjectStats(
944 script, source,
945 source.IsOneByteRepresentation()
946 ? ObjectStats::SCRIPT_SOURCE_NON_EXTERNAL_ONE_BYTE_TYPE
947 : ObjectStats::SCRIPT_SOURCE_NON_EXTERNAL_TWO_BYTE_TYPE);
948 }
949 }
950
RecordVirtualExternalStringDetails(ExternalString string)951 void ObjectStatsCollectorImpl::RecordVirtualExternalStringDetails(
952 ExternalString string) {
953 // Track the external string resource size in a separate category.
954
955 Address resource = string.resource_as_address();
956 size_t off_heap_size = string.ExternalPayloadSize();
957 RecordExternalResourceStats(
958 resource,
959 string.IsOneByteRepresentation()
960 ? ObjectStats::STRING_EXTERNAL_RESOURCE_ONE_BYTE_TYPE
961 : ObjectStats::STRING_EXTERNAL_RESOURCE_TWO_BYTE_TYPE,
962 off_heap_size);
963 }
964
RecordVirtualSharedFunctionInfoDetails(SharedFunctionInfo info)965 void ObjectStatsCollectorImpl::RecordVirtualSharedFunctionInfoDetails(
966 SharedFunctionInfo info) {
967 // Uncompiled SharedFunctionInfo gets its own category.
968 if (!info.is_compiled()) {
969 RecordSimpleVirtualObjectStats(
970 HeapObject(), info, ObjectStats::UNCOMPILED_SHARED_FUNCTION_INFO_TYPE);
971 }
972 }
973
RecordVirtualArrayBoilerplateDescription(ArrayBoilerplateDescription description)974 void ObjectStatsCollectorImpl::RecordVirtualArrayBoilerplateDescription(
975 ArrayBoilerplateDescription description) {
976 RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
977 description, description.constant_elements(),
978 ObjectStats::ARRAY_BOILERPLATE_DESCRIPTION_ELEMENTS_TYPE);
979 }
980
981 void ObjectStatsCollectorImpl::
RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(HeapObject parent,HeapObject object,ObjectStats::VirtualInstanceType type)982 RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
983 HeapObject parent, HeapObject object,
984 ObjectStats::VirtualInstanceType type) {
985 if (!RecordSimpleVirtualObjectStats(parent, object, type)) return;
986 if (object.IsFixedArrayExact()) {
987 FixedArray array = FixedArray::cast(object);
988 for (int i = 0; i < array.length(); i++) {
989 Object entry = array.get(i);
990 if (!entry.IsHeapObject()) continue;
991 RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
992 array, HeapObject::cast(entry), type);
993 }
994 }
995 }
996
RecordVirtualBytecodeArrayDetails(BytecodeArray bytecode)997 void ObjectStatsCollectorImpl::RecordVirtualBytecodeArrayDetails(
998 BytecodeArray bytecode) {
999 RecordSimpleVirtualObjectStats(
1000 bytecode, bytecode.constant_pool(),
1001 ObjectStats::BYTECODE_ARRAY_CONSTANT_POOL_TYPE);
1002 // FixedArrays on constant pool are used for holding descriptor information.
1003 // They are shared with optimized code.
1004 FixedArray constant_pool = FixedArray::cast(bytecode.constant_pool());
1005 for (int i = 0; i < constant_pool.length(); i++) {
1006 Object entry = constant_pool.get(i);
1007 if (entry.IsFixedArrayExact()) {
1008 RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
1009 constant_pool, HeapObject::cast(entry),
1010 ObjectStats::EMBEDDED_OBJECT_TYPE);
1011 }
1012 }
1013 RecordSimpleVirtualObjectStats(
1014 bytecode, bytecode.handler_table(),
1015 ObjectStats::BYTECODE_ARRAY_HANDLER_TABLE_TYPE);
1016 if (bytecode.HasSourcePositionTable()) {
1017 RecordSimpleVirtualObjectStats(bytecode, bytecode.SourcePositionTable(),
1018 ObjectStats::SOURCE_POSITION_TABLE_TYPE);
1019 }
1020 }
1021
1022 namespace {
1023
CodeKindToVirtualInstanceType(CodeKind kind)1024 ObjectStats::VirtualInstanceType CodeKindToVirtualInstanceType(CodeKind kind) {
1025 switch (kind) {
1026 #define CODE_KIND_CASE(type) \
1027 case CodeKind::type: \
1028 return ObjectStats::type;
1029 CODE_KIND_LIST(CODE_KIND_CASE)
1030 #undef CODE_KIND_CASE
1031 }
1032 UNREACHABLE();
1033 }
1034
1035 } // namespace
1036
RecordVirtualCodeDetails(Code code)1037 void ObjectStatsCollectorImpl::RecordVirtualCodeDetails(Code code) {
1038 RecordSimpleVirtualObjectStats(HeapObject(), code,
1039 CodeKindToVirtualInstanceType(code.kind()));
1040 RecordSimpleVirtualObjectStats(code, code.deoptimization_data(),
1041 ObjectStats::DEOPTIMIZATION_DATA_TYPE);
1042 RecordSimpleVirtualObjectStats(code, code.relocation_info(),
1043 ObjectStats::RELOC_INFO_TYPE);
1044 Object source_position_table = code.source_position_table();
1045 if (source_position_table.IsHeapObject()) {
1046 RecordSimpleVirtualObjectStats(code,
1047 HeapObject::cast(source_position_table),
1048 ObjectStats::SOURCE_POSITION_TABLE_TYPE);
1049 }
1050 if (CodeKindIsOptimizedJSFunction(code.kind())) {
1051 DeoptimizationData input_data =
1052 DeoptimizationData::cast(code.deoptimization_data());
1053 if (input_data.length() > 0) {
1054 RecordSimpleVirtualObjectStats(code.deoptimization_data(),
1055 input_data.LiteralArray(),
1056 ObjectStats::OPTIMIZED_CODE_LITERALS_TYPE);
1057 }
1058 }
1059 int const mode_mask = RelocInfo::EmbeddedObjectModeMask();
1060 for (RelocIterator it(code, mode_mask); !it.done(); it.next()) {
1061 DCHECK(RelocInfo::IsEmbeddedObjectMode(it.rinfo()->rmode()));
1062 Object target = it.rinfo()->target_object();
1063 if (target.IsFixedArrayExact()) {
1064 RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
1065 code, HeapObject::cast(target), ObjectStats::EMBEDDED_OBJECT_TYPE);
1066 }
1067 }
1068 }
1069
RecordVirtualContext(Context context)1070 void ObjectStatsCollectorImpl::RecordVirtualContext(Context context) {
1071 if (context.IsNativeContext()) {
1072 RecordObjectStats(context, NATIVE_CONTEXT_TYPE, context.Size());
1073 RecordSimpleVirtualObjectStats(context, context.retained_maps(),
1074 ObjectStats::RETAINED_MAPS_TYPE);
1075
1076 } else if (context.IsFunctionContext()) {
1077 RecordObjectStats(context, FUNCTION_CONTEXT_TYPE, context.Size());
1078 } else {
1079 RecordSimpleVirtualObjectStats(HeapObject(), context,
1080 ObjectStats::OTHER_CONTEXT_TYPE);
1081 }
1082 }
1083
1084 class ObjectStatsVisitor {
1085 public:
ObjectStatsVisitor(Heap * heap,ObjectStatsCollectorImpl * live_collector,ObjectStatsCollectorImpl * dead_collector,ObjectStatsCollectorImpl::Phase phase)1086 ObjectStatsVisitor(Heap* heap, ObjectStatsCollectorImpl* live_collector,
1087 ObjectStatsCollectorImpl* dead_collector,
1088 ObjectStatsCollectorImpl::Phase phase)
1089 : live_collector_(live_collector),
1090 dead_collector_(dead_collector),
1091 marking_state_(
1092 heap->mark_compact_collector()->non_atomic_marking_state()),
1093 phase_(phase) {}
1094
Visit(HeapObject obj,int size)1095 bool Visit(HeapObject obj, int size) {
1096 if (marking_state_->IsBlack(obj)) {
1097 live_collector_->CollectStatistics(
1098 obj, phase_, ObjectStatsCollectorImpl::CollectFieldStats::kYes);
1099 } else {
1100 DCHECK(!marking_state_->IsGrey(obj));
1101 dead_collector_->CollectStatistics(
1102 obj, phase_, ObjectStatsCollectorImpl::CollectFieldStats::kNo);
1103 }
1104 return true;
1105 }
1106
1107 private:
1108 ObjectStatsCollectorImpl* live_collector_;
1109 ObjectStatsCollectorImpl* dead_collector_;
1110 MarkCompactCollector::NonAtomicMarkingState* marking_state_;
1111 ObjectStatsCollectorImpl::Phase phase_;
1112 };
1113
1114 namespace {
1115
IterateHeap(Heap * heap,ObjectStatsVisitor * visitor)1116 void IterateHeap(Heap* heap, ObjectStatsVisitor* visitor) {
1117 CombinedHeapObjectIterator iterator(heap);
1118 for (HeapObject obj = iterator.Next(); !obj.is_null();
1119 obj = iterator.Next()) {
1120 visitor->Visit(obj, obj.Size());
1121 }
1122 }
1123
1124 } // namespace
1125
Collect()1126 void ObjectStatsCollector::Collect() {
1127 ObjectStatsCollectorImpl live_collector(heap_, live_);
1128 ObjectStatsCollectorImpl dead_collector(heap_, dead_);
1129 live_collector.CollectGlobalStatistics();
1130 for (int i = 0; i < ObjectStatsCollectorImpl::kNumberOfPhases; i++) {
1131 ObjectStatsVisitor visitor(heap_, &live_collector, &dead_collector,
1132 static_cast<ObjectStatsCollectorImpl::Phase>(i));
1133 IterateHeap(heap_, &visitor);
1134 }
1135 }
1136
1137 } // namespace internal
1138 } // namespace v8
1139