1 // Copyright 2015 the V8 project authors. All rights reserved.
2 //
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5
6 #include "src/heap/object-stats.h"
7
8 #include <unordered_set>
9
10 #include "src/base/bits.h"
11 #include "src/codegen/assembler-inl.h"
12 #include "src/codegen/compilation-cache.h"
13 #include "src/common/globals.h"
14 #include "src/execution/isolate.h"
15 #include "src/heap/combined-heap.h"
16 #include "src/heap/heap-inl.h"
17 #include "src/heap/mark-compact.h"
18 #include "src/logging/counters.h"
19 #include "src/objects/compilation-cache-table-inl.h"
20 #include "src/objects/heap-object.h"
21 #include "src/objects/js-array-inl.h"
22 #include "src/objects/js-collection-inl.h"
23 #include "src/objects/literal-objects-inl.h"
24 #include "src/objects/slots.h"
25 #include "src/objects/templates.h"
26 #include "src/objects/visitors.h"
27 #include "src/utils/memcopy.h"
28 #include "src/utils/ostreams.h"
29
30 namespace v8 {
31 namespace internal {
32
33 static base::LazyMutex object_stats_mutex = LAZY_MUTEX_INITIALIZER;
34
35 class FieldStatsCollector : public ObjectVisitorWithCageBases {
36 public:
FieldStatsCollector(Heap * heap,size_t * tagged_fields_count,size_t * embedder_fields_count,size_t * inobject_smi_fields_count,size_t * boxed_double_fields_count,size_t * string_data_count,size_t * raw_fields_count)37 FieldStatsCollector(Heap* heap, size_t* tagged_fields_count,
38 size_t* embedder_fields_count,
39 size_t* inobject_smi_fields_count,
40 size_t* boxed_double_fields_count,
41 size_t* string_data_count, size_t* raw_fields_count)
42 : ObjectVisitorWithCageBases(heap),
43 tagged_fields_count_(tagged_fields_count),
44 embedder_fields_count_(embedder_fields_count),
45 inobject_smi_fields_count_(inobject_smi_fields_count),
46 boxed_double_fields_count_(boxed_double_fields_count),
47 string_data_count_(string_data_count),
48 raw_fields_count_(raw_fields_count) {}
49
RecordStats(HeapObject host)50 void RecordStats(HeapObject host) {
51 size_t old_pointer_fields_count = *tagged_fields_count_;
52 host.Iterate(cage_base(), this);
53 size_t tagged_fields_count_in_object =
54 *tagged_fields_count_ - old_pointer_fields_count;
55
56 int object_size_in_words = host.Size(cage_base()) / kTaggedSize;
57 DCHECK_LE(tagged_fields_count_in_object, object_size_in_words);
58 size_t raw_fields_count_in_object =
59 object_size_in_words - tagged_fields_count_in_object;
60
61 if (host.IsJSObject(cage_base())) {
62 JSObjectFieldStats field_stats = GetInobjectFieldStats(host.map());
63 // Embedder fields are already included into pointer words.
64 DCHECK_LE(field_stats.embedded_fields_count_,
65 tagged_fields_count_in_object);
66 tagged_fields_count_in_object -= field_stats.embedded_fields_count_;
67 *tagged_fields_count_ -= field_stats.embedded_fields_count_;
68 *embedder_fields_count_ += field_stats.embedded_fields_count_;
69
70 // Smi fields are also included into pointer words.
71 tagged_fields_count_in_object -= field_stats.smi_fields_count_;
72 *tagged_fields_count_ -= field_stats.smi_fields_count_;
73 *inobject_smi_fields_count_ += field_stats.smi_fields_count_;
74 } else if (host.IsHeapNumber(cage_base())) {
75 DCHECK_LE(kDoubleSize / kTaggedSize, raw_fields_count_in_object);
76 raw_fields_count_in_object -= kDoubleSize / kTaggedSize;
77 *boxed_double_fields_count_ += 1;
78 } else if (host.IsSeqString(cage_base())) {
79 int string_data = SeqString::cast(host).length(kAcquireLoad) *
80 (String::cast(host).IsOneByteRepresentation() ? 1 : 2) /
81 kTaggedSize;
82 DCHECK_LE(string_data, raw_fields_count_in_object);
83 raw_fields_count_in_object -= string_data;
84 *string_data_count_ += string_data;
85 }
86 *raw_fields_count_ += raw_fields_count_in_object;
87 }
88
VisitPointers(HeapObject host,ObjectSlot start,ObjectSlot end)89 void VisitPointers(HeapObject host, ObjectSlot start,
90 ObjectSlot end) override {
91 *tagged_fields_count_ += (end - start);
92 }
VisitPointers(HeapObject host,MaybeObjectSlot start,MaybeObjectSlot end)93 void VisitPointers(HeapObject host, MaybeObjectSlot start,
94 MaybeObjectSlot end) override {
95 *tagged_fields_count_ += (end - start);
96 }
97
VisitCodePointer(HeapObject host,CodeObjectSlot slot)98 V8_INLINE void VisitCodePointer(HeapObject host,
99 CodeObjectSlot slot) override {
100 CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
101 *tagged_fields_count_ += 1;
102 }
103
VisitCodeTarget(Code host,RelocInfo * rinfo)104 void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
105 // Code target is most likely encoded as a relative 32-bit offset and not
106 // as a full tagged value, so there's nothing to count.
107 }
108
VisitEmbeddedPointer(Code host,RelocInfo * rinfo)109 void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {
110 *tagged_fields_count_ += 1;
111 }
112
VisitMapPointer(HeapObject host)113 void VisitMapPointer(HeapObject host) override {
114 // Just do nothing, but avoid the inherited UNREACHABLE implementation.
115 }
116
117 private:
118 struct JSObjectFieldStats {
JSObjectFieldStatsv8::internal::FieldStatsCollector::JSObjectFieldStats119 JSObjectFieldStats() : embedded_fields_count_(0), smi_fields_count_(0) {}
120
121 unsigned embedded_fields_count_ : kDescriptorIndexBitCount;
122 unsigned smi_fields_count_ : kDescriptorIndexBitCount;
123 };
124 std::unordered_map<Map, JSObjectFieldStats, Object::Hasher>
125 field_stats_cache_;
126
127 JSObjectFieldStats GetInobjectFieldStats(Map map);
128
129 size_t* const tagged_fields_count_;
130 size_t* const embedder_fields_count_;
131 size_t* const inobject_smi_fields_count_;
132 size_t* const boxed_double_fields_count_;
133 size_t* const string_data_count_;
134 size_t* const raw_fields_count_;
135 };
136
137 FieldStatsCollector::JSObjectFieldStats
GetInobjectFieldStats(Map map)138 FieldStatsCollector::GetInobjectFieldStats(Map map) {
139 auto iter = field_stats_cache_.find(map);
140 if (iter != field_stats_cache_.end()) {
141 return iter->second;
142 }
143 // Iterate descriptor array and calculate stats.
144 JSObjectFieldStats stats;
145 stats.embedded_fields_count_ = JSObject::GetEmbedderFieldCount(map);
146 if (!map.is_dictionary_map()) {
147 DescriptorArray descriptors = map.instance_descriptors();
148 for (InternalIndex descriptor : map.IterateOwnDescriptors()) {
149 PropertyDetails details = descriptors.GetDetails(descriptor);
150 if (details.location() == PropertyLocation::kField) {
151 FieldIndex index = FieldIndex::ForDescriptor(map, descriptor);
152 // Stop on first out-of-object field.
153 if (!index.is_inobject()) break;
154 if (details.representation().IsSmi()) {
155 ++stats.smi_fields_count_;
156 }
157 }
158 }
159 }
160 field_stats_cache_.insert(std::make_pair(map, stats));
161 return stats;
162 }
163
ClearObjectStats(bool clear_last_time_stats)164 void ObjectStats::ClearObjectStats(bool clear_last_time_stats) {
165 memset(object_counts_, 0, sizeof(object_counts_));
166 memset(object_sizes_, 0, sizeof(object_sizes_));
167 memset(over_allocated_, 0, sizeof(over_allocated_));
168 memset(size_histogram_, 0, sizeof(size_histogram_));
169 memset(over_allocated_histogram_, 0, sizeof(over_allocated_histogram_));
170 if (clear_last_time_stats) {
171 memset(object_counts_last_time_, 0, sizeof(object_counts_last_time_));
172 memset(object_sizes_last_time_, 0, sizeof(object_sizes_last_time_));
173 }
174 tagged_fields_count_ = 0;
175 embedder_fields_count_ = 0;
176 inobject_smi_fields_count_ = 0;
177 boxed_double_fields_count_ = 0;
178 string_data_count_ = 0;
179 raw_fields_count_ = 0;
180 }
181
182 // Tell the compiler to never inline this: occasionally, the optimizer will
183 // decide to inline this and unroll the loop, making the compiled code more than
184 // 100KB larger.
PrintJSONArray(size_t * array,const int len)185 V8_NOINLINE static void PrintJSONArray(size_t* array, const int len) {
186 PrintF("[ ");
187 for (int i = 0; i < len; i++) {
188 PrintF("%zu", array[i]);
189 if (i != (len - 1)) PrintF(", ");
190 }
191 PrintF(" ]");
192 }
193
DumpJSONArray(std::stringstream & stream,size_t * array,const int len)194 V8_NOINLINE static void DumpJSONArray(std::stringstream& stream, size_t* array,
195 const int len) {
196 stream << PrintCollection(base::Vector<size_t>(array, len));
197 }
198
PrintKeyAndId(const char * key,int gc_count)199 void ObjectStats::PrintKeyAndId(const char* key, int gc_count) {
200 PrintF("\"isolate\": \"%p\", \"id\": %d, \"key\": \"%s\", ",
201 reinterpret_cast<void*>(isolate()), gc_count, key);
202 }
203
PrintInstanceTypeJSON(const char * key,int gc_count,const char * name,int index)204 void ObjectStats::PrintInstanceTypeJSON(const char* key, int gc_count,
205 const char* name, int index) {
206 PrintF("{ ");
207 PrintKeyAndId(key, gc_count);
208 PrintF("\"type\": \"instance_type_data\", ");
209 PrintF("\"instance_type\": %d, ", index);
210 PrintF("\"instance_type_name\": \"%s\", ", name);
211 PrintF("\"overall\": %zu, ", object_sizes_[index]);
212 PrintF("\"count\": %zu, ", object_counts_[index]);
213 PrintF("\"over_allocated\": %zu, ", over_allocated_[index]);
214 PrintF("\"histogram\": ");
215 PrintJSONArray(size_histogram_[index], kNumberOfBuckets);
216 PrintF(",");
217 PrintF("\"over_allocated_histogram\": ");
218 PrintJSONArray(over_allocated_histogram_[index], kNumberOfBuckets);
219 PrintF(" }\n");
220 }
221
PrintJSON(const char * key)222 void ObjectStats::PrintJSON(const char* key) {
223 double time = isolate()->time_millis_since_init();
224 int gc_count = heap()->gc_count();
225
226 // gc_descriptor
227 PrintF("{ ");
228 PrintKeyAndId(key, gc_count);
229 PrintF("\"type\": \"gc_descriptor\", \"time\": %f }\n", time);
230 // field_data
231 PrintF("{ ");
232 PrintKeyAndId(key, gc_count);
233 PrintF("\"type\": \"field_data\"");
234 PrintF(", \"tagged_fields\": %zu", tagged_fields_count_ * kTaggedSize);
235 PrintF(", \"embedder_fields\": %zu",
236 embedder_fields_count_ * kEmbedderDataSlotSize);
237 PrintF(", \"inobject_smi_fields\": %zu",
238 inobject_smi_fields_count_ * kTaggedSize);
239 PrintF(", \"boxed_double_fields\": %zu",
240 boxed_double_fields_count_ * kDoubleSize);
241 PrintF(", \"string_data\": %zu", string_data_count_ * kTaggedSize);
242 PrintF(", \"other_raw_fields\": %zu", raw_fields_count_ * kSystemPointerSize);
243 PrintF(" }\n");
244 // bucket_sizes
245 PrintF("{ ");
246 PrintKeyAndId(key, gc_count);
247 PrintF("\"type\": \"bucket_sizes\", \"sizes\": [ ");
248 for (int i = 0; i < kNumberOfBuckets; i++) {
249 PrintF("%d", 1 << (kFirstBucketShift + i));
250 if (i != (kNumberOfBuckets - 1)) PrintF(", ");
251 }
252 PrintF(" ] }\n");
253
254 #define INSTANCE_TYPE_WRAPPER(name) \
255 PrintInstanceTypeJSON(key, gc_count, #name, name);
256
257 #define VIRTUAL_INSTANCE_TYPE_WRAPPER(name) \
258 PrintInstanceTypeJSON(key, gc_count, #name, FIRST_VIRTUAL_TYPE + name);
259
260 INSTANCE_TYPE_LIST(INSTANCE_TYPE_WRAPPER)
261 VIRTUAL_INSTANCE_TYPE_LIST(VIRTUAL_INSTANCE_TYPE_WRAPPER)
262
263 #undef INSTANCE_TYPE_WRAPPER
264 #undef VIRTUAL_INSTANCE_TYPE_WRAPPER
265 }
266
DumpInstanceTypeData(std::stringstream & stream,const char * name,int index)267 void ObjectStats::DumpInstanceTypeData(std::stringstream& stream,
268 const char* name, int index) {
269 stream << "\"" << name << "\":{";
270 stream << "\"type\":" << static_cast<int>(index) << ",";
271 stream << "\"overall\":" << object_sizes_[index] << ",";
272 stream << "\"count\":" << object_counts_[index] << ",";
273 stream << "\"over_allocated\":" << over_allocated_[index] << ",";
274 stream << "\"histogram\":";
275 DumpJSONArray(stream, size_histogram_[index], kNumberOfBuckets);
276 stream << ",\"over_allocated_histogram\":";
277 DumpJSONArray(stream, over_allocated_histogram_[index], kNumberOfBuckets);
278 stream << "},";
279 }
280
Dump(std::stringstream & stream)281 void ObjectStats::Dump(std::stringstream& stream) {
282 double time = isolate()->time_millis_since_init();
283 int gc_count = heap()->gc_count();
284
285 stream << "{";
286 stream << "\"isolate\":\"" << reinterpret_cast<void*>(isolate()) << "\",";
287 stream << "\"id\":" << gc_count << ",";
288 stream << "\"time\":" << time << ",";
289
290 // field_data
291 stream << "\"field_data\":{";
292 stream << "\"tagged_fields\":" << (tagged_fields_count_ * kTaggedSize);
293 stream << ",\"embedder_fields\":"
294 << (embedder_fields_count_ * kEmbedderDataSlotSize);
295 stream << ",\"inobject_smi_fields\": "
296 << (inobject_smi_fields_count_ * kTaggedSize);
297 stream << ",\"boxed_double_fields\": "
298 << (boxed_double_fields_count_ * kDoubleSize);
299 stream << ",\"string_data\": " << (string_data_count_ * kTaggedSize);
300 stream << ",\"other_raw_fields\":"
301 << (raw_fields_count_ * kSystemPointerSize);
302 stream << "}, ";
303
304 stream << "\"bucket_sizes\":[";
305 for (int i = 0; i < kNumberOfBuckets; i++) {
306 stream << (1 << (kFirstBucketShift + i));
307 if (i != (kNumberOfBuckets - 1)) stream << ",";
308 }
309 stream << "],";
310 stream << "\"type_data\":{";
311
312 #define INSTANCE_TYPE_WRAPPER(name) DumpInstanceTypeData(stream, #name, name);
313
314 #define VIRTUAL_INSTANCE_TYPE_WRAPPER(name) \
315 DumpInstanceTypeData(stream, #name, FIRST_VIRTUAL_TYPE + name);
316
317 INSTANCE_TYPE_LIST(INSTANCE_TYPE_WRAPPER);
318 VIRTUAL_INSTANCE_TYPE_LIST(VIRTUAL_INSTANCE_TYPE_WRAPPER)
319 stream << "\"END\":{}}}";
320
321 #undef INSTANCE_TYPE_WRAPPER
322 #undef VIRTUAL_INSTANCE_TYPE_WRAPPER
323 }
324
CheckpointObjectStats()325 void ObjectStats::CheckpointObjectStats() {
326 base::MutexGuard lock_guard(object_stats_mutex.Pointer());
327 MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
328 MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
329 ClearObjectStats();
330 }
331
332 namespace {
333
Log2ForSize(size_t size)334 int Log2ForSize(size_t size) {
335 DCHECK_GT(size, 0);
336 return kSizetSize * 8 - 1 - base::bits::CountLeadingZeros(size);
337 }
338
339 } // namespace
340
HistogramIndexFromSize(size_t size)341 int ObjectStats::HistogramIndexFromSize(size_t size) {
342 if (size == 0) return 0;
343 return std::min({std::max(Log2ForSize(size) + 1 - kFirstBucketShift, 0),
344 kLastValueBucketIndex});
345 }
346
RecordObjectStats(InstanceType type,size_t size,size_t over_allocated)347 void ObjectStats::RecordObjectStats(InstanceType type, size_t size,
348 size_t over_allocated) {
349 DCHECK_LE(type, LAST_TYPE);
350 object_counts_[type]++;
351 object_sizes_[type] += size;
352 size_histogram_[type][HistogramIndexFromSize(size)]++;
353 over_allocated_[type] += over_allocated;
354 over_allocated_histogram_[type][HistogramIndexFromSize(size)]++;
355 }
356
RecordVirtualObjectStats(VirtualInstanceType type,size_t size,size_t over_allocated)357 void ObjectStats::RecordVirtualObjectStats(VirtualInstanceType type,
358 size_t size, size_t over_allocated) {
359 DCHECK_LE(type, LAST_VIRTUAL_TYPE);
360 object_counts_[FIRST_VIRTUAL_TYPE + type]++;
361 object_sizes_[FIRST_VIRTUAL_TYPE + type] += size;
362 size_histogram_[FIRST_VIRTUAL_TYPE + type][HistogramIndexFromSize(size)]++;
363 over_allocated_[FIRST_VIRTUAL_TYPE + type] += over_allocated;
364 over_allocated_histogram_[FIRST_VIRTUAL_TYPE + type]
365 [HistogramIndexFromSize(size)]++;
366 }
367
isolate()368 Isolate* ObjectStats::isolate() { return heap()->isolate(); }
369
370 class ObjectStatsCollectorImpl {
371 public:
372 enum Phase {
373 kPhase1,
374 kPhase2,
375 };
376 static const int kNumberOfPhases = kPhase2 + 1;
377
378 ObjectStatsCollectorImpl(Heap* heap, ObjectStats* stats);
379
380 void CollectGlobalStatistics();
381
382 enum class CollectFieldStats { kNo, kYes };
383 void CollectStatistics(HeapObject obj, Phase phase,
384 CollectFieldStats collect_field_stats);
385
386 private:
387 enum CowMode {
388 kCheckCow,
389 kIgnoreCow,
390 };
391
isolate()392 Isolate* isolate() { return heap_->isolate(); }
393
394 bool RecordVirtualObjectStats(HeapObject parent, HeapObject obj,
395 ObjectStats::VirtualInstanceType type,
396 size_t size, size_t over_allocated,
397 CowMode check_cow_array = kCheckCow);
398 void RecordExternalResourceStats(Address resource,
399 ObjectStats::VirtualInstanceType type,
400 size_t size);
401 // Gets size from |ob| and assumes no over allocating.
402 bool RecordSimpleVirtualObjectStats(HeapObject parent, HeapObject obj,
403 ObjectStats::VirtualInstanceType type);
404 // For HashTable it is possible to compute over allocated memory.
405 template <typename Derived, typename Shape>
406 void RecordHashTableVirtualObjectStats(HeapObject parent,
407 HashTable<Derived, Shape> hash_table,
408 ObjectStats::VirtualInstanceType type);
409
410 bool SameLiveness(HeapObject obj1, HeapObject obj2);
411 bool CanRecordFixedArray(FixedArrayBase array);
412 bool IsCowArray(FixedArrayBase array);
413
414 // Blocklist for objects that should not be recorded using
415 // VirtualObjectStats and RecordSimpleVirtualObjectStats. For recording those
416 // objects dispatch to the low level ObjectStats::RecordObjectStats manually.
417 bool ShouldRecordObject(HeapObject object, CowMode check_cow_array);
418
419 void RecordObjectStats(
420 HeapObject obj, InstanceType type, size_t size,
421 size_t over_allocated = ObjectStats::kNoOverAllocation);
422
423 // Specific recursion into constant pool or embedded code objects. Records
424 // FixedArrays and Tuple2.
425 void RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
426 HeapObject parent, HeapObject object,
427 ObjectStats::VirtualInstanceType type);
428
429 // Details.
430 void RecordVirtualAllocationSiteDetails(AllocationSite site);
431 void RecordVirtualBytecodeArrayDetails(BytecodeArray bytecode);
432 void RecordVirtualCodeDetails(Code code);
433 void RecordVirtualContext(Context context);
434 void RecordVirtualFeedbackVectorDetails(FeedbackVector vector);
435 void RecordVirtualFixedArrayDetails(FixedArray array);
436 void RecordVirtualFunctionTemplateInfoDetails(FunctionTemplateInfo fti);
437 void RecordVirtualJSGlobalObjectDetails(JSGlobalObject object);
438 void RecordVirtualJSObjectDetails(JSObject object);
439 void RecordVirtualMapDetails(Map map);
440 void RecordVirtualScriptDetails(Script script);
441 void RecordVirtualExternalStringDetails(ExternalString script);
442 void RecordVirtualSharedFunctionInfoDetails(SharedFunctionInfo info);
443
444 void RecordVirtualArrayBoilerplateDescription(
445 ArrayBoilerplateDescription description);
446
cage_base() const447 PtrComprCageBase cage_base() const {
448 return field_stats_collector_.cage_base();
449 }
450
451 Heap* heap_;
452 ObjectStats* stats_;
453 MarkCompactCollector::NonAtomicMarkingState* marking_state_;
454 std::unordered_set<HeapObject, Object::Hasher> virtual_objects_;
455 std::unordered_set<Address> external_resources_;
456 FieldStatsCollector field_stats_collector_;
457 };
458
ObjectStatsCollectorImpl(Heap * heap,ObjectStats * stats)459 ObjectStatsCollectorImpl::ObjectStatsCollectorImpl(Heap* heap,
460 ObjectStats* stats)
461 : heap_(heap),
462 stats_(stats),
463 marking_state_(
464 heap->mark_compact_collector()->non_atomic_marking_state()),
465 field_stats_collector_(
466 heap_, &stats->tagged_fields_count_, &stats->embedder_fields_count_,
467 &stats->inobject_smi_fields_count_,
468 &stats->boxed_double_fields_count_, &stats->string_data_count_,
469 &stats->raw_fields_count_) {}
470
ShouldRecordObject(HeapObject obj,CowMode check_cow_array)471 bool ObjectStatsCollectorImpl::ShouldRecordObject(HeapObject obj,
472 CowMode check_cow_array) {
473 if (obj.IsFixedArrayExact()) {
474 FixedArray fixed_array = FixedArray::cast(obj);
475 bool cow_check = check_cow_array == kIgnoreCow || !IsCowArray(fixed_array);
476 return CanRecordFixedArray(fixed_array) && cow_check;
477 }
478 if (obj == ReadOnlyRoots(heap_).empty_property_array()) return false;
479 return true;
480 }
481
482 template <typename Derived, typename Shape>
RecordHashTableVirtualObjectStats(HeapObject parent,HashTable<Derived,Shape> hash_table,ObjectStats::VirtualInstanceType type)483 void ObjectStatsCollectorImpl::RecordHashTableVirtualObjectStats(
484 HeapObject parent, HashTable<Derived, Shape> hash_table,
485 ObjectStats::VirtualInstanceType type) {
486 size_t over_allocated =
487 (hash_table.Capacity() -
488 (hash_table.NumberOfElements() + hash_table.NumberOfDeletedElements())) *
489 HashTable<Derived, Shape>::kEntrySize * kTaggedSize;
490 RecordVirtualObjectStats(parent, hash_table, type, hash_table.Size(),
491 over_allocated);
492 }
493
RecordSimpleVirtualObjectStats(HeapObject parent,HeapObject obj,ObjectStats::VirtualInstanceType type)494 bool ObjectStatsCollectorImpl::RecordSimpleVirtualObjectStats(
495 HeapObject parent, HeapObject obj, ObjectStats::VirtualInstanceType type) {
496 return RecordVirtualObjectStats(parent, obj, type, obj.Size(cage_base()),
497 ObjectStats::kNoOverAllocation, kCheckCow);
498 }
499
RecordVirtualObjectStats(HeapObject parent,HeapObject obj,ObjectStats::VirtualInstanceType type,size_t size,size_t over_allocated,CowMode check_cow_array)500 bool ObjectStatsCollectorImpl::RecordVirtualObjectStats(
501 HeapObject parent, HeapObject obj, ObjectStats::VirtualInstanceType type,
502 size_t size, size_t over_allocated, CowMode check_cow_array) {
503 CHECK_LT(over_allocated, size);
504 if (!SameLiveness(parent, obj) || !ShouldRecordObject(obj, check_cow_array)) {
505 return false;
506 }
507
508 if (virtual_objects_.find(obj) == virtual_objects_.end()) {
509 virtual_objects_.insert(obj);
510 stats_->RecordVirtualObjectStats(type, size, over_allocated);
511 return true;
512 }
513 return false;
514 }
515
RecordExternalResourceStats(Address resource,ObjectStats::VirtualInstanceType type,size_t size)516 void ObjectStatsCollectorImpl::RecordExternalResourceStats(
517 Address resource, ObjectStats::VirtualInstanceType type, size_t size) {
518 if (external_resources_.find(resource) == external_resources_.end()) {
519 external_resources_.insert(resource);
520 stats_->RecordVirtualObjectStats(type, size, 0);
521 }
522 }
523
RecordVirtualAllocationSiteDetails(AllocationSite site)524 void ObjectStatsCollectorImpl::RecordVirtualAllocationSiteDetails(
525 AllocationSite site) {
526 if (!site.PointsToLiteral()) return;
527 JSObject boilerplate = site.boilerplate();
528 if (boilerplate.IsJSArray()) {
529 RecordSimpleVirtualObjectStats(site, boilerplate,
530 ObjectStats::JS_ARRAY_BOILERPLATE_TYPE);
531 // Array boilerplates cannot have properties.
532 } else {
533 RecordVirtualObjectStats(
534 site, boilerplate, ObjectStats::JS_OBJECT_BOILERPLATE_TYPE,
535 boilerplate.Size(), ObjectStats::kNoOverAllocation);
536 if (boilerplate.HasFastProperties()) {
537 // We'll mis-classify the empty_property_array here. Given that there is a
538 // single instance, this is negligible.
539 PropertyArray properties = boilerplate.property_array();
540 RecordSimpleVirtualObjectStats(
541 site, properties, ObjectStats::BOILERPLATE_PROPERTY_ARRAY_TYPE);
542 } else {
543 NameDictionary properties = boilerplate.property_dictionary();
544 RecordSimpleVirtualObjectStats(
545 site, properties, ObjectStats::BOILERPLATE_PROPERTY_DICTIONARY_TYPE);
546 }
547 }
548 FixedArrayBase elements = boilerplate.elements();
549 RecordSimpleVirtualObjectStats(site, elements,
550 ObjectStats::BOILERPLATE_ELEMENTS_TYPE);
551 }
552
RecordVirtualFunctionTemplateInfoDetails(FunctionTemplateInfo fti)553 void ObjectStatsCollectorImpl::RecordVirtualFunctionTemplateInfoDetails(
554 FunctionTemplateInfo fti) {
555 // named_property_handler and indexed_property_handler are recorded as
556 // INTERCEPTOR_INFO_TYPE.
557 HeapObject call_code = fti.call_code(kAcquireLoad);
558 if (!call_code.IsUndefined(isolate())) {
559 RecordSimpleVirtualObjectStats(
560 fti, CallHandlerInfo::cast(call_code),
561 ObjectStats::FUNCTION_TEMPLATE_INFO_ENTRIES_TYPE);
562 }
563 if (!fti.GetInstanceCallHandler().IsUndefined(isolate())) {
564 RecordSimpleVirtualObjectStats(
565 fti, CallHandlerInfo::cast(fti.GetInstanceCallHandler()),
566 ObjectStats::FUNCTION_TEMPLATE_INFO_ENTRIES_TYPE);
567 }
568 }
569
RecordVirtualJSGlobalObjectDetails(JSGlobalObject object)570 void ObjectStatsCollectorImpl::RecordVirtualJSGlobalObjectDetails(
571 JSGlobalObject object) {
572 // Properties.
573 GlobalDictionary properties = object.global_dictionary(kAcquireLoad);
574 RecordHashTableVirtualObjectStats(object, properties,
575 ObjectStats::GLOBAL_PROPERTIES_TYPE);
576 // Elements.
577 FixedArrayBase elements = object.elements();
578 RecordSimpleVirtualObjectStats(object, elements,
579 ObjectStats::GLOBAL_ELEMENTS_TYPE);
580 }
581
RecordVirtualJSObjectDetails(JSObject object)582 void ObjectStatsCollectorImpl::RecordVirtualJSObjectDetails(JSObject object) {
583 // JSGlobalObject is recorded separately.
584 if (object.IsJSGlobalObject()) return;
585
586 // Uncompiled JSFunction has a separate type.
587 if (object.IsJSFunction() && !JSFunction::cast(object).is_compiled()) {
588 RecordSimpleVirtualObjectStats(HeapObject(), object,
589 ObjectStats::JS_UNCOMPILED_FUNCTION_TYPE);
590 }
591
592 // Properties.
593 if (object.HasFastProperties()) {
594 PropertyArray properties = object.property_array();
595 if (properties != ReadOnlyRoots(heap_).empty_property_array()) {
596 size_t over_allocated = object.map().UnusedPropertyFields() * kTaggedSize;
597 RecordVirtualObjectStats(object, properties,
598 object.map().is_prototype_map()
599 ? ObjectStats::PROTOTYPE_PROPERTY_ARRAY_TYPE
600 : ObjectStats::OBJECT_PROPERTY_ARRAY_TYPE,
601 properties.Size(), over_allocated);
602 }
603 } else {
604 NameDictionary properties = object.property_dictionary();
605 RecordHashTableVirtualObjectStats(
606 object, properties,
607 object.map().is_prototype_map()
608 ? ObjectStats::PROTOTYPE_PROPERTY_DICTIONARY_TYPE
609 : ObjectStats::OBJECT_PROPERTY_DICTIONARY_TYPE);
610 }
611
612 // Elements.
613 FixedArrayBase elements = object.elements();
614 if (object.HasDictionaryElements()) {
615 RecordHashTableVirtualObjectStats(
616 object, NumberDictionary::cast(elements),
617 object.IsJSArray() ? ObjectStats::ARRAY_DICTIONARY_ELEMENTS_TYPE
618 : ObjectStats::OBJECT_DICTIONARY_ELEMENTS_TYPE);
619 } else if (object.IsJSArray()) {
620 if (elements != ReadOnlyRoots(heap_).empty_fixed_array()) {
621 size_t element_size =
622 (elements.Size() - FixedArrayBase::kHeaderSize) / elements.length();
623 uint32_t length = JSArray::cast(object).length().Number();
624 size_t over_allocated = (elements.length() - length) * element_size;
625 RecordVirtualObjectStats(object, elements,
626 ObjectStats::ARRAY_ELEMENTS_TYPE,
627 elements.Size(), over_allocated);
628 }
629 } else {
630 RecordSimpleVirtualObjectStats(object, elements,
631 ObjectStats::OBJECT_ELEMENTS_TYPE);
632 }
633
634 // JSCollections.
635 if (object.IsJSCollection()) {
636 // TODO(bmeurer): Properly compute over-allocation here.
637 RecordSimpleVirtualObjectStats(
638 object, FixedArray::cast(JSCollection::cast(object).table()),
639 ObjectStats::JS_COLLECTION_TABLE_TYPE);
640 }
641 }
642
GetFeedbackSlotType(MaybeObject maybe_obj,FeedbackSlotKind kind,Isolate * isolate)643 static ObjectStats::VirtualInstanceType GetFeedbackSlotType(
644 MaybeObject maybe_obj, FeedbackSlotKind kind, Isolate* isolate) {
645 if (maybe_obj->IsCleared())
646 return ObjectStats::FEEDBACK_VECTOR_SLOT_OTHER_TYPE;
647 Object obj = maybe_obj->GetHeapObjectOrSmi();
648 switch (kind) {
649 case FeedbackSlotKind::kCall:
650 if (obj == *isolate->factory()->uninitialized_symbol()) {
651 return ObjectStats::FEEDBACK_VECTOR_SLOT_CALL_UNUSED_TYPE;
652 }
653 return ObjectStats::FEEDBACK_VECTOR_SLOT_CALL_TYPE;
654
655 case FeedbackSlotKind::kLoadProperty:
656 case FeedbackSlotKind::kLoadGlobalInsideTypeof:
657 case FeedbackSlotKind::kLoadGlobalNotInsideTypeof:
658 case FeedbackSlotKind::kLoadKeyed:
659 case FeedbackSlotKind::kHasKeyed:
660 if (obj == *isolate->factory()->uninitialized_symbol()) {
661 return ObjectStats::FEEDBACK_VECTOR_SLOT_LOAD_UNUSED_TYPE;
662 }
663 return ObjectStats::FEEDBACK_VECTOR_SLOT_LOAD_TYPE;
664
665 case FeedbackSlotKind::kSetNamedSloppy:
666 case FeedbackSlotKind::kSetNamedStrict:
667 case FeedbackSlotKind::kDefineNamedOwn:
668 case FeedbackSlotKind::kStoreGlobalSloppy:
669 case FeedbackSlotKind::kStoreGlobalStrict:
670 case FeedbackSlotKind::kSetKeyedSloppy:
671 case FeedbackSlotKind::kSetKeyedStrict:
672 if (obj == *isolate->factory()->uninitialized_symbol()) {
673 return ObjectStats::FEEDBACK_VECTOR_SLOT_STORE_UNUSED_TYPE;
674 }
675 return ObjectStats::FEEDBACK_VECTOR_SLOT_STORE_TYPE;
676
677 case FeedbackSlotKind::kBinaryOp:
678 case FeedbackSlotKind::kCompareOp:
679 return ObjectStats::FEEDBACK_VECTOR_SLOT_ENUM_TYPE;
680
681 default:
682 return ObjectStats::FEEDBACK_VECTOR_SLOT_OTHER_TYPE;
683 }
684 }
685
RecordVirtualFeedbackVectorDetails(FeedbackVector vector)686 void ObjectStatsCollectorImpl::RecordVirtualFeedbackVectorDetails(
687 FeedbackVector vector) {
688 if (virtual_objects_.find(vector) != virtual_objects_.end()) return;
689 // Manually insert the feedback vector into the virtual object list, since
690 // we're logging its component parts separately.
691 virtual_objects_.insert(vector);
692
693 size_t calculated_size = 0;
694
695 // Log the feedback vector's header (fixed fields).
696 size_t header_size = vector.slots_start().address() - vector.address();
697 stats_->RecordVirtualObjectStats(ObjectStats::FEEDBACK_VECTOR_HEADER_TYPE,
698 header_size, ObjectStats::kNoOverAllocation);
699 calculated_size += header_size;
700
701 // Iterate over the feedback slots and log each one.
702 if (!vector.shared_function_info().HasFeedbackMetadata()) return;
703
704 FeedbackMetadataIterator it(vector.metadata());
705 while (it.HasNext()) {
706 FeedbackSlot slot = it.Next();
707 // Log the entry (or entries) taken up by this slot.
708 size_t slot_size = it.entry_size() * kTaggedSize;
709 stats_->RecordVirtualObjectStats(
710 GetFeedbackSlotType(vector.Get(slot), it.kind(), heap_->isolate()),
711 slot_size, ObjectStats::kNoOverAllocation);
712 calculated_size += slot_size;
713
714 // Log the monomorphic/polymorphic helper objects that this slot owns.
715 for (int i = 0; i < it.entry_size(); i++) {
716 MaybeObject raw_object = vector.Get(slot.WithOffset(i));
717 HeapObject object;
718 if (raw_object->GetHeapObject(&object)) {
719 if (object.IsCell(cage_base()) ||
720 object.IsWeakFixedArray(cage_base())) {
721 RecordSimpleVirtualObjectStats(
722 vector, object, ObjectStats::FEEDBACK_VECTOR_ENTRY_TYPE);
723 }
724 }
725 }
726 }
727
728 CHECK_EQ(calculated_size, vector.Size());
729 }
730
RecordVirtualFixedArrayDetails(FixedArray array)731 void ObjectStatsCollectorImpl::RecordVirtualFixedArrayDetails(
732 FixedArray array) {
733 if (IsCowArray(array)) {
734 RecordVirtualObjectStats(HeapObject(), array, ObjectStats::COW_ARRAY_TYPE,
735 array.Size(), ObjectStats::kNoOverAllocation,
736 kIgnoreCow);
737 }
738 }
739
CollectStatistics(HeapObject obj,Phase phase,CollectFieldStats collect_field_stats)740 void ObjectStatsCollectorImpl::CollectStatistics(
741 HeapObject obj, Phase phase, CollectFieldStats collect_field_stats) {
742 DisallowGarbageCollection no_gc;
743 Map map = obj.map(cage_base());
744 InstanceType instance_type = map.instance_type();
745 switch (phase) {
746 case kPhase1:
747 if (InstanceTypeChecker::IsFeedbackVector(instance_type)) {
748 RecordVirtualFeedbackVectorDetails(FeedbackVector::cast(obj));
749 } else if (InstanceTypeChecker::IsMap(instance_type)) {
750 RecordVirtualMapDetails(Map::cast(obj));
751 } else if (InstanceTypeChecker::IsBytecodeArray(instance_type)) {
752 RecordVirtualBytecodeArrayDetails(BytecodeArray::cast(obj));
753 } else if (InstanceTypeChecker::IsCode(instance_type)) {
754 RecordVirtualCodeDetails(Code::cast(obj));
755 } else if (InstanceTypeChecker::IsFunctionTemplateInfo(instance_type)) {
756 RecordVirtualFunctionTemplateInfoDetails(
757 FunctionTemplateInfo::cast(obj));
758 } else if (InstanceTypeChecker::IsJSGlobalObject(instance_type)) {
759 RecordVirtualJSGlobalObjectDetails(JSGlobalObject::cast(obj));
760 } else if (InstanceTypeChecker::IsJSObject(instance_type)) {
761 // This phase needs to come after RecordVirtualAllocationSiteDetails
762 // to properly split among boilerplates.
763 RecordVirtualJSObjectDetails(JSObject::cast(obj));
764 } else if (InstanceTypeChecker::IsSharedFunctionInfo(instance_type)) {
765 RecordVirtualSharedFunctionInfoDetails(SharedFunctionInfo::cast(obj));
766 } else if (InstanceTypeChecker::IsContext(instance_type)) {
767 RecordVirtualContext(Context::cast(obj));
768 } else if (InstanceTypeChecker::IsScript(instance_type)) {
769 RecordVirtualScriptDetails(Script::cast(obj));
770 } else if (InstanceTypeChecker::IsArrayBoilerplateDescription(
771 instance_type)) {
772 RecordVirtualArrayBoilerplateDescription(
773 ArrayBoilerplateDescription::cast(obj));
774 } else if (InstanceTypeChecker::IsFixedArrayExact(instance_type)) {
775 // Has to go last as it triggers too eagerly.
776 RecordVirtualFixedArrayDetails(FixedArray::cast(obj));
777 }
778 break;
779 case kPhase2:
780 if (InstanceTypeChecker::IsExternalString(instance_type)) {
781 // This has to be in Phase2 to avoid conflicting with recording Script
782 // sources. We still want to run RecordObjectStats after though.
783 RecordVirtualExternalStringDetails(ExternalString::cast(obj));
784 }
785 size_t over_allocated = ObjectStats::kNoOverAllocation;
786 if (InstanceTypeChecker::IsJSObject(instance_type)) {
787 over_allocated = map.instance_size() - map.UsedInstanceSize();
788 }
789 RecordObjectStats(obj, instance_type, obj.Size(cage_base()),
790 over_allocated);
791 if (collect_field_stats == CollectFieldStats::kYes) {
792 field_stats_collector_.RecordStats(obj);
793 }
794 break;
795 }
796 }
797
CollectGlobalStatistics()798 void ObjectStatsCollectorImpl::CollectGlobalStatistics() {
799 // Iterate boilerplates first to disambiguate them from regular JS objects.
800 Object list = heap_->allocation_sites_list();
801 while (list.IsAllocationSite(cage_base())) {
802 AllocationSite site = AllocationSite::cast(list);
803 RecordVirtualAllocationSiteDetails(site);
804 list = site.weak_next();
805 }
806
807 // FixedArray.
808 RecordSimpleVirtualObjectStats(HeapObject(), heap_->serialized_objects(),
809 ObjectStats::SERIALIZED_OBJECTS_TYPE);
810 RecordSimpleVirtualObjectStats(HeapObject(), heap_->number_string_cache(),
811 ObjectStats::NUMBER_STRING_CACHE_TYPE);
812 RecordSimpleVirtualObjectStats(
813 HeapObject(), heap_->single_character_string_cache(),
814 ObjectStats::SINGLE_CHARACTER_STRING_CACHE_TYPE);
815 RecordSimpleVirtualObjectStats(HeapObject(), heap_->string_split_cache(),
816 ObjectStats::STRING_SPLIT_CACHE_TYPE);
817 RecordSimpleVirtualObjectStats(HeapObject(), heap_->regexp_multiple_cache(),
818 ObjectStats::REGEXP_MULTIPLE_CACHE_TYPE);
819
820 // WeakArrayList.
821 RecordSimpleVirtualObjectStats(HeapObject(),
822 WeakArrayList::cast(heap_->script_list()),
823 ObjectStats::SCRIPT_LIST_TYPE);
824 }
825
RecordObjectStats(HeapObject obj,InstanceType type,size_t size,size_t over_allocated)826 void ObjectStatsCollectorImpl::RecordObjectStats(HeapObject obj,
827 InstanceType type, size_t size,
828 size_t over_allocated) {
829 if (virtual_objects_.find(obj) == virtual_objects_.end()) {
830 stats_->RecordObjectStats(type, size, over_allocated);
831 }
832 }
833
CanRecordFixedArray(FixedArrayBase array)834 bool ObjectStatsCollectorImpl::CanRecordFixedArray(FixedArrayBase array) {
835 ReadOnlyRoots roots(heap_);
836 return array != roots.empty_fixed_array() &&
837 array != roots.empty_slow_element_dictionary() &&
838 array != roots.empty_property_dictionary();
839 }
840
IsCowArray(FixedArrayBase array)841 bool ObjectStatsCollectorImpl::IsCowArray(FixedArrayBase array) {
842 return array.map(cage_base()) == ReadOnlyRoots(heap_).fixed_cow_array_map();
843 }
844
SameLiveness(HeapObject obj1,HeapObject obj2)845 bool ObjectStatsCollectorImpl::SameLiveness(HeapObject obj1, HeapObject obj2) {
846 return obj1.is_null() || obj2.is_null() ||
847 marking_state_->Color(obj1) == marking_state_->Color(obj2);
848 }
849
RecordVirtualMapDetails(Map map)850 void ObjectStatsCollectorImpl::RecordVirtualMapDetails(Map map) {
851 // TODO(mlippautz): map->dependent_code(): DEPENDENT_CODE_TYPE.
852
853 // For Map we want to distinguish between various different states
854 // to get a better picture of what's going on in MapSpace. This
855 // method computes the virtual instance type to use for a given map,
856 // using MAP_TYPE for regular maps that aren't special in any way.
857 if (map.is_prototype_map()) {
858 if (map.is_dictionary_map()) {
859 RecordSimpleVirtualObjectStats(
860 HeapObject(), map, ObjectStats::MAP_PROTOTYPE_DICTIONARY_TYPE);
861 } else if (map.is_abandoned_prototype_map()) {
862 RecordSimpleVirtualObjectStats(HeapObject(), map,
863 ObjectStats::MAP_ABANDONED_PROTOTYPE_TYPE);
864 } else {
865 RecordSimpleVirtualObjectStats(HeapObject(), map,
866 ObjectStats::MAP_PROTOTYPE_TYPE);
867 }
868 } else if (map.is_deprecated()) {
869 RecordSimpleVirtualObjectStats(HeapObject(), map,
870 ObjectStats::MAP_DEPRECATED_TYPE);
871 } else if (map.is_dictionary_map()) {
872 RecordSimpleVirtualObjectStats(HeapObject(), map,
873 ObjectStats::MAP_DICTIONARY_TYPE);
874 } else if (map.is_stable()) {
875 RecordSimpleVirtualObjectStats(HeapObject(), map,
876 ObjectStats::MAP_STABLE_TYPE);
877 } else {
878 // This will be logged as MAP_TYPE in Phase2.
879 }
880
881 DescriptorArray array = map.instance_descriptors(cage_base());
882 if (map.owns_descriptors() &&
883 array != ReadOnlyRoots(heap_).empty_descriptor_array()) {
884 // Generally DescriptorArrays have their own instance type already
885 // (DESCRIPTOR_ARRAY_TYPE), but we'd like to be able to tell which
886 // of those are for (abandoned) prototypes, and which of those are
887 // owned by deprecated maps.
888 if (map.is_prototype_map()) {
889 RecordSimpleVirtualObjectStats(
890 map, array, ObjectStats::PROTOTYPE_DESCRIPTOR_ARRAY_TYPE);
891 } else if (map.is_deprecated()) {
892 RecordSimpleVirtualObjectStats(
893 map, array, ObjectStats::DEPRECATED_DESCRIPTOR_ARRAY_TYPE);
894 }
895
896 EnumCache enum_cache = array.enum_cache();
897 RecordSimpleVirtualObjectStats(array, enum_cache.keys(),
898 ObjectStats::ENUM_KEYS_CACHE_TYPE);
899 RecordSimpleVirtualObjectStats(array, enum_cache.indices(),
900 ObjectStats::ENUM_INDICES_CACHE_TYPE);
901 }
902
903 if (map.is_prototype_map()) {
904 if (map.prototype_info().IsPrototypeInfo(cage_base())) {
905 PrototypeInfo info = PrototypeInfo::cast(map.prototype_info());
906 Object users = info.prototype_users();
907 if (users.IsWeakFixedArray(cage_base())) {
908 RecordSimpleVirtualObjectStats(map, WeakArrayList::cast(users),
909 ObjectStats::PROTOTYPE_USERS_TYPE);
910 }
911 }
912 }
913 }
914
RecordVirtualScriptDetails(Script script)915 void ObjectStatsCollectorImpl::RecordVirtualScriptDetails(Script script) {
916 RecordSimpleVirtualObjectStats(
917 script, script.shared_function_infos(),
918 ObjectStats::SCRIPT_SHARED_FUNCTION_INFOS_TYPE);
919
920 // Log the size of external source code.
921 Object raw_source = script.source();
922 if (raw_source.IsExternalString(cage_base())) {
923 // The contents of external strings aren't on the heap, so we have to record
924 // them manually. The on-heap String object is recorded indepentendely in
925 // the normal pass.
926 ExternalString string = ExternalString::cast(raw_source);
927 Address resource = string.resource_as_address();
928 size_t off_heap_size = string.ExternalPayloadSize();
929 RecordExternalResourceStats(
930 resource,
931 string.IsOneByteRepresentation()
932 ? ObjectStats::SCRIPT_SOURCE_EXTERNAL_ONE_BYTE_TYPE
933 : ObjectStats::SCRIPT_SOURCE_EXTERNAL_TWO_BYTE_TYPE,
934 off_heap_size);
935 } else if (raw_source.IsString(cage_base())) {
936 String source = String::cast(raw_source);
937 RecordSimpleVirtualObjectStats(
938 script, source,
939 source.IsOneByteRepresentation()
940 ? ObjectStats::SCRIPT_SOURCE_NON_EXTERNAL_ONE_BYTE_TYPE
941 : ObjectStats::SCRIPT_SOURCE_NON_EXTERNAL_TWO_BYTE_TYPE);
942 }
943 }
944
RecordVirtualExternalStringDetails(ExternalString string)945 void ObjectStatsCollectorImpl::RecordVirtualExternalStringDetails(
946 ExternalString string) {
947 // Track the external string resource size in a separate category.
948
949 Address resource = string.resource_as_address();
950 size_t off_heap_size = string.ExternalPayloadSize();
951 RecordExternalResourceStats(
952 resource,
953 string.IsOneByteRepresentation(cage_base())
954 ? ObjectStats::STRING_EXTERNAL_RESOURCE_ONE_BYTE_TYPE
955 : ObjectStats::STRING_EXTERNAL_RESOURCE_TWO_BYTE_TYPE,
956 off_heap_size);
957 }
958
RecordVirtualSharedFunctionInfoDetails(SharedFunctionInfo info)959 void ObjectStatsCollectorImpl::RecordVirtualSharedFunctionInfoDetails(
960 SharedFunctionInfo info) {
961 // Uncompiled SharedFunctionInfo gets its own category.
962 if (!info.is_compiled()) {
963 RecordSimpleVirtualObjectStats(
964 HeapObject(), info, ObjectStats::UNCOMPILED_SHARED_FUNCTION_INFO_TYPE);
965 }
966 }
967
RecordVirtualArrayBoilerplateDescription(ArrayBoilerplateDescription description)968 void ObjectStatsCollectorImpl::RecordVirtualArrayBoilerplateDescription(
969 ArrayBoilerplateDescription description) {
970 RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
971 description, description.constant_elements(),
972 ObjectStats::ARRAY_BOILERPLATE_DESCRIPTION_ELEMENTS_TYPE);
973 }
974
975 void ObjectStatsCollectorImpl::
RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(HeapObject parent,HeapObject object,ObjectStats::VirtualInstanceType type)976 RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
977 HeapObject parent, HeapObject object,
978 ObjectStats::VirtualInstanceType type) {
979 if (!RecordSimpleVirtualObjectStats(parent, object, type)) return;
980 if (object.IsFixedArrayExact(cage_base())) {
981 FixedArray array = FixedArray::cast(object);
982 for (int i = 0; i < array.length(); i++) {
983 Object entry = array.get(i);
984 if (!entry.IsHeapObject()) continue;
985 RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
986 array, HeapObject::cast(entry), type);
987 }
988 }
989 }
990
RecordVirtualBytecodeArrayDetails(BytecodeArray bytecode)991 void ObjectStatsCollectorImpl::RecordVirtualBytecodeArrayDetails(
992 BytecodeArray bytecode) {
993 RecordSimpleVirtualObjectStats(
994 bytecode, bytecode.constant_pool(),
995 ObjectStats::BYTECODE_ARRAY_CONSTANT_POOL_TYPE);
996 // FixedArrays on constant pool are used for holding descriptor information.
997 // They are shared with optimized code.
998 FixedArray constant_pool = FixedArray::cast(bytecode.constant_pool());
999 for (int i = 0; i < constant_pool.length(); i++) {
1000 Object entry = constant_pool.get(i);
1001 if (entry.IsFixedArrayExact(cage_base())) {
1002 RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
1003 constant_pool, HeapObject::cast(entry),
1004 ObjectStats::EMBEDDED_OBJECT_TYPE);
1005 }
1006 }
1007 RecordSimpleVirtualObjectStats(
1008 bytecode, bytecode.handler_table(),
1009 ObjectStats::BYTECODE_ARRAY_HANDLER_TABLE_TYPE);
1010 if (bytecode.HasSourcePositionTable()) {
1011 RecordSimpleVirtualObjectStats(bytecode, bytecode.SourcePositionTable(),
1012 ObjectStats::SOURCE_POSITION_TABLE_TYPE);
1013 }
1014 }
1015
1016 namespace {
1017
CodeKindToVirtualInstanceType(CodeKind kind)1018 ObjectStats::VirtualInstanceType CodeKindToVirtualInstanceType(CodeKind kind) {
1019 switch (kind) {
1020 #define CODE_KIND_CASE(type) \
1021 case CodeKind::type: \
1022 return ObjectStats::type;
1023 CODE_KIND_LIST(CODE_KIND_CASE)
1024 #undef CODE_KIND_CASE
1025 }
1026 UNREACHABLE();
1027 }
1028
1029 } // namespace
1030
RecordVirtualCodeDetails(Code code)1031 void ObjectStatsCollectorImpl::RecordVirtualCodeDetails(Code code) {
1032 RecordSimpleVirtualObjectStats(HeapObject(), code,
1033 CodeKindToVirtualInstanceType(code.kind()));
1034 RecordSimpleVirtualObjectStats(code, code.relocation_info(),
1035 ObjectStats::RELOC_INFO_TYPE);
1036 if (CodeKindIsOptimizedJSFunction(code.kind())) {
1037 Object source_position_table = code.source_position_table();
1038 if (source_position_table.IsHeapObject()) {
1039 RecordSimpleVirtualObjectStats(code,
1040 HeapObject::cast(source_position_table),
1041 ObjectStats::SOURCE_POSITION_TABLE_TYPE);
1042 }
1043 RecordSimpleVirtualObjectStats(code, code.deoptimization_data(),
1044 ObjectStats::DEOPTIMIZATION_DATA_TYPE);
1045 DeoptimizationData input_data =
1046 DeoptimizationData::cast(code.deoptimization_data());
1047 if (input_data.length() > 0) {
1048 RecordSimpleVirtualObjectStats(code.deoptimization_data(),
1049 input_data.LiteralArray(),
1050 ObjectStats::OPTIMIZED_CODE_LITERALS_TYPE);
1051 }
1052 }
1053 int const mode_mask = RelocInfo::EmbeddedObjectModeMask();
1054 for (RelocIterator it(code, mode_mask); !it.done(); it.next()) {
1055 DCHECK(RelocInfo::IsEmbeddedObjectMode(it.rinfo()->rmode()));
1056 Object target = it.rinfo()->target_object(cage_base());
1057 if (target.IsFixedArrayExact(cage_base())) {
1058 RecordVirtualObjectsForConstantPoolOrEmbeddedObjects(
1059 code, HeapObject::cast(target), ObjectStats::EMBEDDED_OBJECT_TYPE);
1060 }
1061 }
1062 }
1063
RecordVirtualContext(Context context)1064 void ObjectStatsCollectorImpl::RecordVirtualContext(Context context) {
1065 if (context.IsNativeContext()) {
1066 RecordObjectStats(context, NATIVE_CONTEXT_TYPE, context.Size());
1067 if (context.retained_maps().IsWeakArrayList(cage_base())) {
1068 RecordSimpleVirtualObjectStats(
1069 context, WeakArrayList::cast(context.retained_maps()),
1070 ObjectStats::RETAINED_MAPS_TYPE);
1071 }
1072
1073 } else if (context.IsFunctionContext()) {
1074 RecordObjectStats(context, FUNCTION_CONTEXT_TYPE, context.Size());
1075 } else {
1076 RecordSimpleVirtualObjectStats(HeapObject(), context,
1077 ObjectStats::OTHER_CONTEXT_TYPE);
1078 }
1079 }
1080
1081 class ObjectStatsVisitor {
1082 public:
ObjectStatsVisitor(Heap * heap,ObjectStatsCollectorImpl * live_collector,ObjectStatsCollectorImpl * dead_collector,ObjectStatsCollectorImpl::Phase phase)1083 ObjectStatsVisitor(Heap* heap, ObjectStatsCollectorImpl* live_collector,
1084 ObjectStatsCollectorImpl* dead_collector,
1085 ObjectStatsCollectorImpl::Phase phase)
1086 : live_collector_(live_collector),
1087 dead_collector_(dead_collector),
1088 marking_state_(
1089 heap->mark_compact_collector()->non_atomic_marking_state()),
1090 phase_(phase) {}
1091
Visit(HeapObject obj)1092 void Visit(HeapObject obj) {
1093 if (marking_state_->IsBlack(obj)) {
1094 live_collector_->CollectStatistics(
1095 obj, phase_, ObjectStatsCollectorImpl::CollectFieldStats::kYes);
1096 } else {
1097 DCHECK(!marking_state_->IsGrey(obj));
1098 dead_collector_->CollectStatistics(
1099 obj, phase_, ObjectStatsCollectorImpl::CollectFieldStats::kNo);
1100 }
1101 }
1102
1103 private:
1104 ObjectStatsCollectorImpl* live_collector_;
1105 ObjectStatsCollectorImpl* dead_collector_;
1106 MarkCompactCollector::NonAtomicMarkingState* marking_state_;
1107 ObjectStatsCollectorImpl::Phase phase_;
1108 };
1109
1110 namespace {
1111
IterateHeap(Heap * heap,ObjectStatsVisitor * visitor)1112 void IterateHeap(Heap* heap, ObjectStatsVisitor* visitor) {
1113 // We don't perform a GC while collecting object stats but need this scope for
1114 // the nested SafepointScope inside CombinedHeapObjectIterator.
1115 AllowGarbageCollection allow_gc;
1116 CombinedHeapObjectIterator iterator(heap);
1117 for (HeapObject obj = iterator.Next(); !obj.is_null();
1118 obj = iterator.Next()) {
1119 visitor->Visit(obj);
1120 }
1121 }
1122
1123 } // namespace
1124
Collect()1125 void ObjectStatsCollector::Collect() {
1126 ObjectStatsCollectorImpl live_collector(heap_, live_);
1127 ObjectStatsCollectorImpl dead_collector(heap_, dead_);
1128 live_collector.CollectGlobalStatistics();
1129 for (int i = 0; i < ObjectStatsCollectorImpl::kNumberOfPhases; i++) {
1130 ObjectStatsVisitor visitor(heap_, &live_collector, &dead_collector,
1131 static_cast<ObjectStatsCollectorImpl::Phase>(i));
1132 IterateHeap(heap_, &visitor);
1133 }
1134 }
1135
1136 } // namespace internal
1137 } // namespace v8
1138