• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "src/trace_processor/importers/proto/heap_graph_tracker.h"
18 
19 #include <algorithm>
20 #include <array>
21 #include <cinttypes>
22 #include <cstdint>
23 #include <cstring>
24 #include <deque>
25 #include <map>
26 #include <memory>
27 #include <optional>
28 #include <set>
29 #include <string>
30 #include <tuple>
31 #include <utility>
32 #include <vector>
33 
34 #include "perfetto/base/logging.h"
35 #include "perfetto/ext/base/string_view.h"
36 #include "protos/perfetto/trace/profiling/heap_graph.pbzero.h"
37 #include "src/trace_processor/storage/stats.h"
38 #include "src/trace_processor/storage/trace_storage.h"
39 #include "src/trace_processor/tables/profiler_tables_py.h"
40 #include "src/trace_processor/util/profiler_util.h"
41 
42 namespace perfetto::trace_processor {
43 
44 namespace {
45 
46 using ClassTable = tables::HeapGraphClassTable;
47 using ObjectTable = tables::HeapGraphObjectTable;
48 using ReferenceTable = tables::HeapGraphReferenceTable;
49 
50 // Iterates all the references owned by the object `id`.
51 //
52 // Calls bool(*fn)(ObjectTable::RowReference) with the each row
53 // from the `storage.heap_graph_reference()` table associated to the |object|.
54 // When `fn` returns false (or when there are no more rows owned by |object|),
55 // stops the iteration.
56 template <typename F>
ForReferenceSet(TraceStorage * storage,ObjectTable::ConstRowReference object,F fn)57 void ForReferenceSet(TraceStorage* storage,
58                      ObjectTable::ConstRowReference object,
59                      F fn) {
60   std::optional<uint32_t> reference_set_id = object.reference_set_id();
61   if (!reference_set_id)
62     return;
63 
64   auto* ref = storage->mutable_heap_graph_reference_table();
65   Query q;
66   q.constraints = {ref->reference_set_id().eq(*reference_set_id)};
67   auto it = ref->FilterToIterator(q);
68 
69   for (; it; ++it) {
70     if (!fn(it.row_reference()))
71       break;
72   }
73 }
74 
75 struct ClassDescriptor {
76   StringId name;
77   std::optional<StringId> location;
78 
operator <perfetto::trace_processor::__anon717448380111::ClassDescriptor79   bool operator<(const ClassDescriptor& other) const {
80     return std::tie(name, location) < std::tie(other.name, other.location);
81   }
82 };
83 
GetClassDescriptor(const TraceStorage & storage,ObjectTable::Id obj_id)84 ClassDescriptor GetClassDescriptor(const TraceStorage& storage,
85                                    ObjectTable::Id obj_id) {
86   auto obj_row_ref = *storage.heap_graph_object_table().FindById(obj_id);
87   auto type_row_ref =
88       *storage.heap_graph_class_table().FindById(obj_row_ref.type_id());
89   return {type_row_ref.name(), type_row_ref.location()};
90 }
91 
GetReferredObj(const TraceStorage & storage,uint32_t ref_set_id,const std::string & field_name)92 std::optional<ObjectTable::Id> GetReferredObj(const TraceStorage& storage,
93                                               uint32_t ref_set_id,
94                                               const std::string& field_name) {
95   const auto& refs_tbl = storage.heap_graph_reference_table();
96   Query q;
97   q.constraints = {refs_tbl.reference_set_id().eq(ref_set_id),
98                    refs_tbl.field_name().eq(NullTermStringView(field_name))};
99   auto refs_it = refs_tbl.FilterToIterator(q);
100   if (!refs_it) {
101     return std::nullopt;
102   }
103   return refs_it.owned_id();
104 }
105 
106 // Maps from normalized class name and location, to superclass.
107 std::map<ClassDescriptor, ClassDescriptor>
BuildSuperclassMap(UniquePid upid,int64_t ts,TraceStorage * storage)108 BuildSuperclassMap(UniquePid upid, int64_t ts, TraceStorage* storage) {
109   std::map<ClassDescriptor, ClassDescriptor> superclass_map;
110 
111   // Resolve superclasses by iterating heap graph objects and identifying the
112   // superClass field.
113   const auto& objects_tbl = storage->heap_graph_object_table();
114   Query q;
115   q.constraints = {objects_tbl.upid().eq(upid),
116                    objects_tbl.graph_sample_ts().eq(ts)};
117   auto obj_it = objects_tbl.FilterToIterator(q);
118   for (; obj_it; ++obj_it) {
119     auto obj_id = obj_it.id();
120     auto class_descriptor = GetClassDescriptor(*storage, obj_id);
121     auto normalized =
122         GetNormalizedType(storage->GetString(class_descriptor.name));
123     // superClass ptrs are stored on the static class objects
124     // ignore arrays (as they are generated objects)
125     if (!normalized.is_static_class || normalized.number_of_arrays > 0)
126       continue;
127 
128     auto opt_ref_set_id = obj_it.reference_set_id();
129     if (!opt_ref_set_id)
130       continue;
131     auto super_obj_id =
132         GetReferredObj(*storage, *opt_ref_set_id, "java.lang.Class.superClass");
133     if (!super_obj_id) {
134       // This is expected to be missing for Object and primitive types
135       continue;
136     }
137 
138     // Lookup the super obj type id
139     auto super_class_descriptor = GetClassDescriptor(*storage, *super_obj_id);
140     auto super_class_name =
141         NormalizeTypeName(storage->GetString(super_class_descriptor.name));
142     StringId super_class_id = storage->InternString(super_class_name);
143     StringId class_id = storage->InternString(normalized.name);
144     superclass_map[{class_id, class_descriptor.location}] = {
145         super_class_id, super_class_descriptor.location};
146   }
147   return superclass_map;
148 }
149 
150 // Extract the size from `nar_size`, which is the value of a
151 // libcore.util.NativeAllocationRegistry.size field: it encodes the size, but
152 // uses the least significant bit to represent the source of the allocation.
GetSizeFromNativeAllocationRegistry(int64_t nar_size)153 int64_t GetSizeFromNativeAllocationRegistry(int64_t nar_size) {
154   constexpr uint64_t kIsMalloced = 1;
155   return static_cast<int64_t>(static_cast<uint64_t>(nar_size) & ~kIsMalloced);
156 }
157 
158 // A given object can be a heap root in different ways. Ensure analysis is
159 // consistent.
160 constexpr std::array<protos::pbzero::HeapGraphRoot::Type, 3>
161     kRootTypePrecedence = {
162         protos::pbzero::HeapGraphRoot::ROOT_STICKY_CLASS,
163         protos::pbzero::HeapGraphRoot::ROOT_JNI_GLOBAL,
164         protos::pbzero::HeapGraphRoot::ROOT_JNI_LOCAL,
165 };
166 }  // namespace
167 
GetStaticClassTypeName(base::StringView type)168 std::optional<base::StringView> GetStaticClassTypeName(base::StringView type) {
169   static const base::StringView kJavaClassTemplate("java.lang.Class<");
170   if (!type.empty() && type.at(type.size() - 1) == '>' &&
171       type.substr(0, kJavaClassTemplate.size()) == kJavaClassTemplate) {
172     return type.substr(kJavaClassTemplate.size(),
173                        type.size() - kJavaClassTemplate.size() - 1);
174   }
175   return {};
176 }
177 
NumberOfArrays(base::StringView type)178 size_t NumberOfArrays(base::StringView type) {
179   if (type.size() < 2)
180     return 0;
181 
182   size_t arrays = 0;
183   while (type.size() >= 2 * (arrays + 1) &&
184          memcmp(type.end() - 2 * (arrays + 1), "[]", 2) == 0) {
185     arrays++;
186   }
187   return arrays;
188 }
189 
GetNormalizedType(base::StringView type)190 NormalizedType GetNormalizedType(base::StringView type) {
191   auto static_class_type_name = GetStaticClassTypeName(type);
192   if (static_class_type_name.has_value()) {
193     type = static_class_type_name.value();
194   }
195   size_t number_of_arrays = NumberOfArrays(type);
196   return {base::StringView(type.data(), type.size() - number_of_arrays * 2),
197           static_class_type_name.has_value(), number_of_arrays};
198 }
199 
NormalizeTypeName(base::StringView type)200 base::StringView NormalizeTypeName(base::StringView type) {
201   return GetNormalizedType(type).name;
202 }
203 
DenormalizeTypeName(NormalizedType normalized,base::StringView deobfuscated_type_name)204 std::string DenormalizeTypeName(NormalizedType normalized,
205                                 base::StringView deobfuscated_type_name) {
206   std::string result = deobfuscated_type_name.ToStdString();
207   for (size_t i = 0; i < normalized.number_of_arrays; ++i) {
208     result += "[]";
209   }
210   if (normalized.is_static_class) {
211     result = "java.lang.Class<" + result + ">";
212   }
213   return result;
214 }
215 
HeapGraphTracker(TraceStorage * storage)216 HeapGraphTracker::HeapGraphTracker(TraceStorage* storage)
217     : storage_(storage),
218       cleaner_thunk_str_id_(storage_->InternString("sun.misc.Cleaner.thunk")),
219       referent_str_id_(
220           storage_->InternString("java.lang.ref.Reference.referent")),
221       cleaner_thunk_this0_str_id_(storage_->InternString(
222           "libcore.util.NativeAllocationRegistry$CleanerThunk.this$0")),
223       native_size_str_id_(
224           storage_->InternString("libcore.util.NativeAllocationRegistry.size")),
225       cleaner_next_str_id_(storage_->InternString("sun.misc.Cleaner.next")) {
226   for (size_t i = 0; i < root_type_string_ids_.size(); i++) {
227     auto val = static_cast<protos::pbzero::HeapGraphRoot::Type>(i);
228     auto str_view =
229         base::StringView(protos::pbzero::HeapGraphRoot_Type_Name(val));
230     root_type_string_ids_[i] = storage_->InternString(str_view);
231   }
232 
233   for (size_t i = 0; i < type_kind_string_ids_.size(); i++) {
234     auto val = static_cast<protos::pbzero::HeapGraphType::Kind>(i);
235     auto str_view =
236         base::StringView(protos::pbzero::HeapGraphType_Kind_Name(val));
237     type_kind_string_ids_[i] = storage_->InternString(str_view);
238   }
239 }
240 
GetOrCreateSequence(uint32_t seq_id)241 HeapGraphTracker::SequenceState& HeapGraphTracker::GetOrCreateSequence(
242     uint32_t seq_id) {
243   return sequence_state_[seq_id];
244 }
245 
SetPidAndTimestamp(SequenceState * sequence_state,UniquePid upid,int64_t ts)246 bool HeapGraphTracker::SetPidAndTimestamp(SequenceState* sequence_state,
247                                           UniquePid upid,
248                                           int64_t ts) {
249   if (sequence_state->current_upid != 0 &&
250       sequence_state->current_upid != upid) {
251     storage_->IncrementStats(stats::heap_graph_non_finalized_graph);
252     return false;
253   }
254   if (sequence_state->current_ts != 0 && sequence_state->current_ts != ts) {
255     storage_->IncrementStats(stats::heap_graph_non_finalized_graph);
256     return false;
257   }
258   sequence_state->current_upid = upid;
259   sequence_state->current_ts = ts;
260   return true;
261 }
262 
GetOrInsertObject(SequenceState * sequence_state,uint64_t object_id)263 ObjectTable::RowReference HeapGraphTracker::GetOrInsertObject(
264     SequenceState* sequence_state,
265     uint64_t object_id) {
266   auto* object_table = storage_->mutable_heap_graph_object_table();
267   auto* ptr = sequence_state->object_id_to_db_row.Find(object_id);
268   if (!ptr) {
269     auto id_and_row = object_table->Insert({sequence_state->current_upid,
270                                             sequence_state->current_ts,
271                                             -1,
272                                             0,
273                                             /*reference_set_id=*/std::nullopt,
274                                             /*reachable=*/0,
275                                             /*heap_type=*/std::nullopt,
276                                             {},
277                                             /*root_type=*/std::nullopt,
278                                             /*root_distance*/ -1});
279     bool inserted;
280     std::tie(ptr, inserted) = sequence_state->object_id_to_db_row.Insert(
281         object_id, id_and_row.row_number);
282   }
283   return ptr->ToRowReference(object_table);
284 }
285 
GetOrInsertType(SequenceState * sequence_state,uint64_t type_id)286 ClassTable::RowReference HeapGraphTracker::GetOrInsertType(
287     SequenceState* sequence_state,
288     uint64_t type_id) {
289   auto* class_table = storage_->mutable_heap_graph_class_table();
290   auto* ptr = sequence_state->type_id_to_db_row.Find(type_id);
291   if (!ptr) {
292     auto id_and_row =
293         class_table->Insert({StringId(), std::nullopt, std::nullopt});
294     bool inserted;
295     std::tie(ptr, inserted) = sequence_state->type_id_to_db_row.Insert(
296         type_id, id_and_row.row_number);
297   }
298   return ptr->ToRowReference(class_table);
299 }
300 
AddObject(uint32_t seq_id,UniquePid upid,int64_t ts,SourceObject obj)301 void HeapGraphTracker::AddObject(uint32_t seq_id,
302                                  UniquePid upid,
303                                  int64_t ts,
304                                  SourceObject obj) {
305   SequenceState& sequence_state = GetOrCreateSequence(seq_id);
306 
307   if (!SetPidAndTimestamp(&sequence_state, upid, ts))
308     return;
309 
310   sequence_state.last_object_id = obj.object_id;
311   sequence_state.last_heap_type = obj.heap_type;
312 
313   ObjectTable::RowReference owner_row_ref =
314       GetOrInsertObject(&sequence_state, obj.object_id);
315   ClassTable::RowReference type_row_ref =
316       GetOrInsertType(&sequence_state, obj.type_id);
317 
318   ClassTable::Id type_id = type_row_ref.id();
319 
320   owner_row_ref.set_self_size(static_cast<int64_t>(obj.self_size));
321   owner_row_ref.set_type_id(type_id);
322   if (obj.heap_type != protos::pbzero::HeapGraphObject::HEAP_TYPE_UNKNOWN) {
323     owner_row_ref.set_heap_type(storage_->InternString(base::StringView(
324         protos::pbzero::HeapGraphObject_HeapType_Name(obj.heap_type))));
325   }
326 
327   if (obj.self_size == 0) {
328     sequence_state.deferred_size_objects_for_type_[type_id].push_back(
329         owner_row_ref.ToRowNumber());
330   }
331 
332   uint32_t reference_set_id =
333       storage_->heap_graph_reference_table().row_count();
334   bool any_references = false;
335 
336   ObjectTable::Id owner_id = owner_row_ref.id();
337   for (size_t i = 0; i < obj.referred_objects.size(); ++i) {
338     uint64_t owned_object_id = obj.referred_objects[i];
339     // This is true for unset reference fields.
340     std::optional<ObjectTable::RowReference> owned_row_ref;
341     if (owned_object_id != 0)
342       owned_row_ref = GetOrInsertObject(&sequence_state, owned_object_id);
343 
344     auto ref_id_and_row =
345         storage_->mutable_heap_graph_reference_table()->Insert(
346             {reference_set_id,
347              owner_id,
348              owned_row_ref ? std::make_optional(owned_row_ref->id())
349                            : std::nullopt,
350              {},
351              {},
352              /*deobfuscated_field_name=*/std::nullopt});
353     if (!obj.field_name_ids.empty()) {
354       sequence_state.references_for_field_name_id[obj.field_name_ids[i]]
355           .push_back(ref_id_and_row.row_number);
356     }
357     any_references = true;
358   }
359   if (any_references) {
360     owner_row_ref.set_reference_set_id(reference_set_id);
361     if (obj.field_name_ids.empty()) {
362       sequence_state.deferred_reference_objects_for_type_[type_id].push_back(
363           owner_row_ref.ToRowNumber());
364     }
365   }
366 
367   if (obj.native_allocation_registry_size.has_value()) {
368     sequence_state.nar_size_by_obj_id[owner_id] =
369         *obj.native_allocation_registry_size;
370   }
371 }
372 
AddRoot(uint32_t seq_id,UniquePid upid,int64_t ts,SourceRoot root)373 void HeapGraphTracker::AddRoot(uint32_t seq_id,
374                                UniquePid upid,
375                                int64_t ts,
376                                SourceRoot root) {
377   SequenceState& sequence_state = GetOrCreateSequence(seq_id);
378   if (!SetPidAndTimestamp(&sequence_state, upid, ts))
379     return;
380 
381   sequence_state.current_roots.emplace_back(std::move(root));
382 }
383 
AddInternedLocationName(uint32_t seq_id,uint64_t intern_id,StringId strid)384 void HeapGraphTracker::AddInternedLocationName(uint32_t seq_id,
385                                                uint64_t intern_id,
386                                                StringId strid) {
387   SequenceState& sequence_state = GetOrCreateSequence(seq_id);
388   sequence_state.interned_location_names.emplace(intern_id, strid);
389 }
390 
AddInternedType(uint32_t seq_id,uint64_t intern_id,StringId strid,std::optional<uint64_t> location_id,uint64_t object_size,std::vector<uint64_t> field_name_ids,uint64_t superclass_id,uint64_t classloader_id,bool no_fields,protos::pbzero::HeapGraphType::Kind kind)391 void HeapGraphTracker::AddInternedType(
392     uint32_t seq_id,
393     uint64_t intern_id,
394     StringId strid,
395     std::optional<uint64_t> location_id,
396     uint64_t object_size,
397     std::vector<uint64_t> field_name_ids,
398     uint64_t superclass_id,
399     uint64_t classloader_id,
400     bool no_fields,
401     protos::pbzero::HeapGraphType::Kind kind) {
402   SequenceState& sequence_state = GetOrCreateSequence(seq_id);
403   InternedType& type = sequence_state.interned_types[intern_id];
404   type.name = strid;
405   type.location_id = location_id;
406   type.object_size = object_size;
407   type.field_name_ids = std::move(field_name_ids);
408   type.superclass_id = superclass_id;
409   type.classloader_id = classloader_id;
410   type.no_fields = no_fields;
411   type.kind = kind;
412 }
413 
AddInternedFieldName(uint32_t seq_id,uint64_t intern_id,base::StringView str)414 void HeapGraphTracker::AddInternedFieldName(uint32_t seq_id,
415                                             uint64_t intern_id,
416                                             base::StringView str) {
417   SequenceState& sequence_state = GetOrCreateSequence(seq_id);
418   size_t space = str.find(' ');
419   base::StringView type;
420   if (space != base::StringView::npos) {
421     type = str.substr(0, space);
422     str = str.substr(space + 1);
423   }
424   StringId field_name = storage_->InternString(str);
425   StringId type_name = storage_->InternString(type);
426 
427   sequence_state.interned_fields.Insert(intern_id,
428                                         InternedField{field_name, type_name});
429 
430   auto it = sequence_state.references_for_field_name_id.find(intern_id);
431   if (it != sequence_state.references_for_field_name_id.end()) {
432     auto* hgr = storage_->mutable_heap_graph_reference_table();
433     for (ReferenceTable::RowNumber reference_row_num : it->second) {
434       auto row_ref = reference_row_num.ToRowReference(hgr);
435       row_ref.set_field_name(field_name);
436       row_ref.set_field_type_name(type_name);
437       field_to_rows_[field_name].emplace_back(reference_row_num);
438     }
439   }
440 }
441 
SetPacketIndex(uint32_t seq_id,uint64_t index)442 void HeapGraphTracker::SetPacketIndex(uint32_t seq_id, uint64_t index) {
443   SequenceState& sequence_state = GetOrCreateSequence(seq_id);
444   bool dropped_packet = false;
445   // perfetto_hprof starts counting at index = 0.
446   if (!sequence_state.prev_index && index != 0) {
447     dropped_packet = true;
448   }
449 
450   if (sequence_state.prev_index && *sequence_state.prev_index + 1 != index) {
451     dropped_packet = true;
452   }
453 
454   if (dropped_packet) {
455     sequence_state.truncated = true;
456     if (sequence_state.prev_index) {
457       PERFETTO_ELOG("Missing packets between %" PRIu64 " and %" PRIu64,
458                     *sequence_state.prev_index, index);
459     } else {
460       PERFETTO_ELOG("Invalid first packet index %" PRIu64 " (!= 0)", index);
461     }
462 
463     storage_->IncrementIndexedStats(
464         stats::heap_graph_missing_packet,
465         static_cast<int>(sequence_state.current_upid));
466   }
467   sequence_state.prev_index = index;
468 }
469 
470 // This only works on Android S+ traces. We need to have ingested the whole
471 // profile before calling this function (e.g. in FinalizeProfile).
GetSuperClass(SequenceState * sequence_state,const InternedType * current_type)472 HeapGraphTracker::InternedType* HeapGraphTracker::GetSuperClass(
473     SequenceState* sequence_state,
474     const InternedType* current_type) {
475   if (current_type->superclass_id) {
476     auto it = sequence_state->interned_types.find(current_type->superclass_id);
477     if (it != sequence_state->interned_types.end())
478       return &it->second;
479   }
480   storage_->IncrementIndexedStats(
481       stats::heap_graph_malformed_packet,
482       static_cast<int>(sequence_state->current_upid));
483   return nullptr;
484 }
485 
FinalizeProfile(uint32_t seq_id)486 void HeapGraphTracker::FinalizeProfile(uint32_t seq_id) {
487   SequenceState& sequence_state = GetOrCreateSequence(seq_id);
488   if (sequence_state.truncated) {
489     truncated_graphs_.emplace(
490         std::make_pair(sequence_state.current_upid, sequence_state.current_ts));
491   }
492 
493   // We do this in FinalizeProfile because the interned_location_names get
494   // written at the end of the dump.
495   for (const auto& p : sequence_state.interned_types) {
496     uint64_t id = p.first;
497     const InternedType& interned_type = p.second;
498     std::optional<StringId> location_name;
499     if (interned_type.location_id) {
500       auto it = sequence_state.interned_location_names.find(
501           *interned_type.location_id);
502       if (it == sequence_state.interned_location_names.end()) {
503         storage_->IncrementIndexedStats(
504             stats::heap_graph_invalid_string_id,
505             static_cast<int>(sequence_state.current_upid));
506       } else {
507         location_name = it->second;
508       }
509     }
510     ClassTable::RowReference type_row_ref =
511         GetOrInsertType(&sequence_state, id);
512     ClassTable::Id type_id = type_row_ref.id();
513 
514     auto sz_obj_it =
515         sequence_state.deferred_size_objects_for_type_.find(type_id);
516     if (sz_obj_it != sequence_state.deferred_size_objects_for_type_.end()) {
517       auto* hgo = storage_->mutable_heap_graph_object_table();
518       for (ObjectTable::RowNumber obj_row_num : sz_obj_it->second) {
519         auto obj_row_ref = obj_row_num.ToRowReference(hgo);
520         obj_row_ref.set_self_size(
521             static_cast<int64_t>(interned_type.object_size));
522       }
523       sequence_state.deferred_size_objects_for_type_.erase(sz_obj_it);
524     }
525 
526     auto ref_obj_it =
527         sequence_state.deferred_reference_objects_for_type_.find(type_id);
528     if (ref_obj_it !=
529         sequence_state.deferred_reference_objects_for_type_.end()) {
530       for (ObjectTable::RowNumber obj_row_number : ref_obj_it->second) {
531         auto obj_row_ref = obj_row_number.ToRowReference(
532             storage_->mutable_heap_graph_object_table());
533         const InternedType* current_type = &interned_type;
534         if (interned_type.no_fields) {
535           continue;
536         }
537         size_t field_offset_in_cls = 0;
538         ForReferenceSet(
539             storage_, obj_row_ref,
540             [this, &current_type, &sequence_state,
541              &field_offset_in_cls](ReferenceTable::RowReference ref) {
542               while (current_type && field_offset_in_cls >=
543                                          current_type->field_name_ids.size()) {
544                 size_t prev_type_size = current_type->field_name_ids.size();
545                 current_type = GetSuperClass(&sequence_state, current_type);
546                 field_offset_in_cls -= prev_type_size;
547               }
548 
549               if (!current_type) {
550                 return false;
551               }
552 
553               uint64_t field_id =
554                   current_type->field_name_ids[field_offset_in_cls++];
555               auto* ptr = sequence_state.interned_fields.Find(field_id);
556               if (!ptr) {
557                 PERFETTO_DLOG("Invalid field id.");
558                 storage_->IncrementIndexedStats(
559                     stats::heap_graph_malformed_packet,
560                     static_cast<int>(sequence_state.current_upid));
561                 return true;
562               }
563               const InternedField& field = *ptr;
564               ref.set_field_name(field.name);
565               ref.set_field_type_name(field.type_name);
566               field_to_rows_[field.name].emplace_back(ref.ToRowNumber());
567               return true;
568             });
569       }
570       sequence_state.deferred_reference_objects_for_type_.erase(ref_obj_it);
571     }
572 
573     type_row_ref.set_name(interned_type.name);
574     if (interned_type.classloader_id) {
575       auto classloader_object_ref =
576           GetOrInsertObject(&sequence_state, interned_type.classloader_id);
577       type_row_ref.set_classloader_id(classloader_object_ref.id().value);
578     }
579     if (location_name)
580       type_row_ref.set_location(*location_name);
581     type_row_ref.set_kind(InternTypeKindString(interned_type.kind));
582 
583     base::StringView normalized_type =
584         NormalizeTypeName(storage_->GetString(interned_type.name));
585 
586     std::optional<StringId> class_package;
587     if (location_name) {
588       std::optional<std::string> package_name =
589           PackageFromLocation(storage_, storage_->GetString(*location_name));
590       if (package_name) {
591         class_package = storage_->InternString(base::StringView(*package_name));
592       }
593     }
594     if (!class_package) {
595       auto app_id = storage_->process_table()[sequence_state.current_upid]
596                         .android_appid();
597       if (app_id) {
598         for (auto it = storage_->package_list_table().IterateRows(); it; ++it) {
599           if (it.uid() == *app_id) {
600             class_package = it.package_name();
601             break;
602           }
603         }
604       }
605     }
606 
607     class_to_rows_[std::make_pair(class_package,
608                                   storage_->InternString(normalized_type))]
609         .emplace_back(type_row_ref.ToRowNumber());
610   }
611 
612   if (!sequence_state.deferred_size_objects_for_type_.empty() ||
613       !sequence_state.deferred_reference_objects_for_type_.empty()) {
614     storage_->IncrementIndexedStats(
615         stats::heap_graph_malformed_packet,
616         static_cast<int>(sequence_state.current_upid));
617   }
618 
619   for (const SourceRoot& root : sequence_state.current_roots) {
620     for (uint64_t obj_id : root.object_ids) {
621       auto ptr = sequence_state.object_id_to_db_row.Find(obj_id);
622       // This can only happen for an invalid type string id, which is already
623       // reported as an error. Silently continue here.
624       if (!ptr)
625         continue;
626 
627       ObjectTable::RowReference row_ref =
628           ptr->ToRowReference(storage_->mutable_heap_graph_object_table());
629       roots_[std::make_pair(sequence_state.current_upid,
630                             sequence_state.current_ts)]
631           .emplace(*ptr);
632       MarkRoot(row_ref, InternRootTypeString(root.root_type));
633     }
634   }
635 
636   PopulateSuperClasses(sequence_state);
637   PopulateNativeSize(sequence_state);
638   sequence_state_.erase(seq_id);
639 }
640 
GetReferenceByFieldName(ObjectTable::Id obj,StringId field)641 std::optional<ObjectTable::Id> HeapGraphTracker::GetReferenceByFieldName(
642     ObjectTable::Id obj,
643     StringId field) {
644   std::optional<ObjectTable::Id> referred;
645   auto obj_row_ref = *storage_->heap_graph_object_table().FindById(obj);
646   ForReferenceSet(storage_, obj_row_ref,
647                   [&](ReferenceTable::RowReference ref) -> bool {
648                     if (ref.field_name() == field) {
649                       referred = ref.owned_id();
650                       return false;
651                     }
652                     return true;
653                   });
654   return referred;
655 }
656 
PopulateNativeSize(const SequenceState & seq)657 void HeapGraphTracker::PopulateNativeSize(const SequenceState& seq) {
658   //             +-------------------------------+  .referent   +--------+
659   //             |       sun.misc.Cleaner        | -----------> | Object |
660   //             +-------------------------------+              +--------+
661   //                |
662   //                | .thunk
663   //                v
664   // +----------------------------------------------------+
665   // | libcore.util.NativeAllocationRegistry$CleanerThunk |
666   // +----------------------------------------------------+
667   //   |
668   //   | .this$0
669   //   v
670   // +----------------------------------------------------+
671   // |       libcore.util.NativeAllocationRegistry        |
672   // |                       .size                        |
673   // +----------------------------------------------------+
674   //
675   // `.size` should be attributed as the native size of Object
676 
677   const auto& class_tbl = storage_->heap_graph_class_table();
678   auto& objects_tbl = *storage_->mutable_heap_graph_object_table();
679 
680   struct Cleaner {
681     ObjectTable::Id referent;
682     ObjectTable::Id thunk;
683   };
684   std::vector<Cleaner> cleaners;
685 
686   Query q;
687   q.constraints = {class_tbl.name().eq("sun.misc.Cleaner")};
688   auto class_it = class_tbl.FilterToIterator(q);
689   for (; class_it; ++class_it) {
690     auto class_id = class_it.id();
691     Query query;
692     query.constraints = {objects_tbl.type_id().eq(class_id.value),
693                          objects_tbl.upid().eq(seq.current_upid),
694                          objects_tbl.graph_sample_ts().eq(seq.current_ts)};
695     auto obj_it = objects_tbl.FilterToIterator(query);
696     for (; obj_it; ++obj_it) {
697       ObjectTable::Id cleaner_obj_id = obj_it.id();
698       std::optional<ObjectTable::Id> referent_id =
699           GetReferenceByFieldName(cleaner_obj_id, referent_str_id_);
700       std::optional<ObjectTable::Id> thunk_id =
701           GetReferenceByFieldName(cleaner_obj_id, cleaner_thunk_str_id_);
702 
703       if (!referent_id || !thunk_id) {
704         continue;
705       }
706 
707       std::optional<ObjectTable::Id> next_id =
708           GetReferenceByFieldName(cleaner_obj_id, cleaner_next_str_id_);
709       if (next_id.has_value() && *next_id == cleaner_obj_id) {
710         // sun.misc.Cleaner.next points to the sun.misc.Cleaner: this means
711         // that the sun.misc.Cleaner.clean() has already been called. Skip this.
712         continue;
713       }
714       cleaners.push_back(Cleaner{*referent_id, *thunk_id});
715     }
716   }
717 
718   for (const auto& cleaner : cleaners) {
719     std::optional<ObjectTable::Id> this0 =
720         GetReferenceByFieldName(cleaner.thunk, cleaner_thunk_this0_str_id_);
721     if (!this0) {
722       continue;
723     }
724 
725     auto nar_size_it = seq.nar_size_by_obj_id.find(*this0);
726     if (nar_size_it == seq.nar_size_by_obj_id.end()) {
727       continue;
728     }
729 
730     int64_t native_size =
731         GetSizeFromNativeAllocationRegistry(nar_size_it->second);
732     auto referent_row_ref = *objects_tbl.FindById(cleaner.referent);
733     int64_t total_native_size = referent_row_ref.native_size() + native_size;
734     referent_row_ref.set_native_size(total_native_size);
735   }
736 }
737 
738 // TODO(fmayer): For Android S+ traces, use the superclass_id from the trace.
PopulateSuperClasses(const SequenceState & seq)739 void HeapGraphTracker::PopulateSuperClasses(const SequenceState& seq) {
740   // Maps from normalized class name and location, to superclass.
741   std::map<ClassDescriptor, ClassDescriptor> superclass_map =
742       BuildSuperclassMap(seq.current_upid, seq.current_ts, storage_);
743 
744   auto* classes_tbl = storage_->mutable_heap_graph_class_table();
745   std::map<ClassDescriptor, ClassTable::Id> class_to_id;
746   for (auto it = classes_tbl->IterateRows(); it; ++it) {
747     class_to_id[{it.name(), it.location()}] = it.id();
748   }
749 
750   // Iterate through the classes table and annotate with superclasses.
751   // We iterate all rows on the classes table (even though the superclass
752   // mapping was generated on the current sequence) - if we cannot identify
753   // a superclass we will just skip.
754   for (uint32_t i = 0; i < classes_tbl->row_count(); ++i) {
755     auto rr = (*classes_tbl)[i];
756     auto name = storage_->GetString(rr.name());
757     auto location = rr.location();
758     auto normalized = GetNormalizedType(name);
759     if (normalized.is_static_class || normalized.number_of_arrays > 0)
760       continue;
761 
762     StringId class_name_id = storage_->InternString(normalized.name);
763     auto map_it = superclass_map.find({class_name_id, location});
764     if (map_it == superclass_map.end()) {
765       continue;
766     }
767 
768     // Find the row for the superclass id
769     auto superclass_it = class_to_id.find(map_it->second);
770     if (superclass_it == class_to_id.end()) {
771       // This can happen for traces was captured before the patch to
772       // explicitly emit interned types (meaning classes without live
773       // instances would not appear here).
774       continue;
775     }
776     rr.set_superclass_id(superclass_it->second);
777   }
778 }
779 
GetChildren(ObjectTable::RowReference object,std::vector<ObjectTable::Id> & children)780 void HeapGraphTracker::GetChildren(ObjectTable::RowReference object,
781                                    std::vector<ObjectTable::Id>& children) {
782   children.clear();
783 
784   auto cls_row_ref =
785       *storage_->heap_graph_class_table().FindById(object.type_id());
786 
787   StringId kind = cls_row_ref.kind();
788 
789   bool is_ignored_reference =
790       kind == InternTypeKindString(
791                   protos::pbzero::HeapGraphType::KIND_WEAK_REFERENCE) ||
792       kind == InternTypeKindString(
793                   protos::pbzero::HeapGraphType::KIND_SOFT_REFERENCE) ||
794       kind == InternTypeKindString(
795                   protos::pbzero::HeapGraphType::KIND_FINALIZER_REFERENCE) ||
796       kind == InternTypeKindString(
797                   protos::pbzero::HeapGraphType::KIND_PHANTOM_REFERENCE);
798 
799   ForReferenceSet(
800       storage_, object,
801       [object, &children, is_ignored_reference,
802        this](ReferenceTable::RowReference ref) {
803         PERFETTO_CHECK(ref.owner_id() == object.id());
804         auto opt_owned = ref.owned_id();
805         if (!opt_owned) {
806           return true;
807         }
808         if (is_ignored_reference && ref.field_name() == referent_str_id_) {
809           // If `object` is a special reference kind, its
810           // "java.lang.ref.Reference.referent" field should be ignored.
811           return true;
812         }
813         children.push_back(*opt_owned);
814         return true;
815       });
816   std::sort(children.begin(), children.end(),
817             [](const ObjectTable::Id& a, const ObjectTable::Id& b) {
818               return a.value < b.value;
819             });
820   children.erase(std::unique(children.begin(), children.end()), children.end());
821 }
822 
RankRoot(StringId type)823 size_t HeapGraphTracker::RankRoot(StringId type) {
824   size_t idx = 0;
825   for (; idx < kRootTypePrecedence.size(); ++idx) {
826     if (type == InternRootTypeString(kRootTypePrecedence[idx])) {
827       break;
828     }
829   }
830   return idx;
831 }
832 
MarkRoot(ObjectTable::RowReference row_ref,StringId type)833 void HeapGraphTracker::MarkRoot(ObjectTable::RowReference row_ref,
834                                 StringId type) {
835   // Already marked as a root
836   if (row_ref.root_type()) {
837     if (RankRoot(type) < RankRoot(*row_ref.root_type())) {
838       row_ref.set_root_type(type);
839     }
840     return;
841   }
842   row_ref.set_root_type(type);
843 
844   std::vector<ObjectTable::Id> children;
845 
846   // DFS to mark reachability for all children
847   std::vector<ObjectTable::RowReference> stack({row_ref});
848   while (!stack.empty()) {
849     ObjectTable::RowReference cur_node = stack.back();
850     stack.pop_back();
851 
852     if (cur_node.reachable())
853       continue;
854     cur_node.set_reachable(true);
855 
856     GetChildren(cur_node, children);
857     for (ObjectTable::Id child_node : children) {
858       auto child_ref =
859           *storage_->mutable_heap_graph_object_table()->FindById(child_node);
860       stack.push_back(child_ref);
861     }
862   }
863 }
864 
UpdateShortestPaths(ObjectTable::RowReference row_ref)865 void HeapGraphTracker::UpdateShortestPaths(ObjectTable::RowReference row_ref) {
866   // Calculate shortest distance to a GC root.
867   std::deque<std::pair<int32_t, ObjectTable::RowReference>> reachable_nodes{
868       {0, row_ref}};
869 
870   std::vector<ObjectTable::Id> children;
871   while (!reachable_nodes.empty()) {
872     auto pair = reachable_nodes.front();
873 
874     int32_t distance = pair.first;
875     ObjectTable::RowReference cur_row_ref = pair.second;
876 
877     reachable_nodes.pop_front();
878     int32_t cur_distance = cur_row_ref.root_distance();
879     if (cur_distance == -1 || cur_distance > distance) {
880       cur_row_ref.set_root_distance(distance);
881 
882       GetChildren(cur_row_ref, children);
883       for (ObjectTable::Id child_node : children) {
884         auto child_row_ref =
885             *storage_->mutable_heap_graph_object_table()->FindById(child_node);
886         int32_t child_distance = child_row_ref.root_distance();
887         if (child_distance == -1 || child_distance > distance + 1)
888           reachable_nodes.emplace_back(distance + 1, child_row_ref);
889       }
890     }
891   }
892 }
893 
FindPathFromRoot(ObjectTable::RowReference row_ref,PathFromRoot * path)894 void HeapGraphTracker::FindPathFromRoot(ObjectTable::RowReference row_ref,
895                                         PathFromRoot* path) {
896   // We have long retention chains (e.g. from LinkedList). If we use the stack
897   // here, we risk running out of stack space. This is why we use a vector to
898   // simulate the stack.
899   struct StackElem {
900     ObjectTable::RowReference node;  // Node in the original graph.
901     size_t parent_id;                // id of parent node in the result tree.
902     size_t i;        // Index of the next child of this node to handle.
903     uint32_t depth;  // Depth in the resulting tree
904                      // (including artificial root).
905     std::vector<ObjectTable::Id> children;
906   };
907 
908   std::vector<StackElem> stack{{row_ref, PathFromRoot::kRoot, 0, 0, {}}};
909   while (!stack.empty()) {
910     ObjectTable::RowReference object_row_ref = stack.back().node;
911 
912     size_t parent_id = stack.back().parent_id;
913     uint32_t depth = stack.back().depth;
914     size_t& i = stack.back().i;
915     std::vector<ObjectTable::Id>& children = stack.back().children;
916 
917     ClassTable::Id type_id = object_row_ref.type_id();
918 
919     auto type_row_ref = *storage_->heap_graph_class_table().FindById(type_id);
920     std::optional<StringId> opt_class_name_id =
921         type_row_ref.deobfuscated_name();
922     if (!opt_class_name_id) {
923       opt_class_name_id = type_row_ref.name();
924     }
925     PERFETTO_CHECK(opt_class_name_id);
926     StringId class_name_id = *opt_class_name_id;
927     std::optional<StringId> root_type = object_row_ref.root_type();
928     if (root_type) {
929       class_name_id = storage_->InternString(base::StringView(
930           storage_->GetString(class_name_id).ToStdString() + " [" +
931           storage_->GetString(*root_type).ToStdString() + "]"));
932     }
933     auto it = path->nodes[parent_id].children.find(class_name_id);
934     if (it == path->nodes[parent_id].children.end()) {
935       size_t path_id = path->nodes.size();
936       path->nodes.emplace_back(PathFromRoot::Node{});
937       std::tie(it, std::ignore) =
938           path->nodes[parent_id].children.emplace(class_name_id, path_id);
939       path->nodes.back().class_name_id = class_name_id;
940       path->nodes.back().depth = depth;
941       path->nodes.back().parent_id = parent_id;
942     }
943     size_t path_id = it->second;
944     PathFromRoot::Node* output_tree_node = &path->nodes[path_id];
945 
946     if (i == 0) {
947       // This is the first time we are looking at this node, so add its
948       // size to the relevant node in the resulting tree.
949       output_tree_node->size += object_row_ref.self_size();
950       output_tree_node->count++;
951       GetChildren(object_row_ref, children);
952 
953       if (object_row_ref.native_size()) {
954         StringId native_class_name_id = storage_->InternString(
955             base::StringView(std::string("[native] ") +
956                              storage_->GetString(class_name_id).ToStdString()));
957         std::map<StringId, size_t>::iterator native_it;
958         bool inserted_new_node;
959         std::tie(native_it, inserted_new_node) =
960             path->nodes[path_id].children.insert({native_class_name_id, 0});
961         if (inserted_new_node) {
962           native_it->second = path->nodes.size();
963           path->nodes.emplace_back(PathFromRoot::Node{});
964 
965           path->nodes.back().class_name_id = native_class_name_id;
966           path->nodes.back().depth = depth + 1;
967           path->nodes.back().parent_id = path_id;
968         }
969         PathFromRoot::Node* new_output_tree_node =
970             &path->nodes[native_it->second];
971 
972         new_output_tree_node->size += object_row_ref.native_size();
973         new_output_tree_node->count++;
974       }
975     }
976 
977     // We have already handled this node and just need to get its i-th child.
978     if (!children.empty()) {
979       PERFETTO_CHECK(i < children.size());
980       ObjectTable::Id child = children[i];
981       auto child_row_ref =
982           *storage_->mutable_heap_graph_object_table()->FindById(child);
983       if (++i == children.size())
984         stack.pop_back();
985 
986       int32_t child_distance = child_row_ref.root_distance();
987       int32_t n_distance = object_row_ref.root_distance();
988       PERFETTO_CHECK(n_distance >= 0);
989       PERFETTO_CHECK(child_distance >= 0);
990 
991       bool visited = path->visited.count(child);
992 
993       if (child_distance == n_distance + 1 && !visited) {
994         path->visited.emplace(child);
995         stack.emplace_back(StackElem{child_row_ref, path_id, 0, depth + 1, {}});
996       }
997     } else {
998       stack.pop_back();
999     }
1000   }
1001 }
1002 
1003 std::unique_ptr<tables::ExperimentalFlamegraphTable>
BuildFlamegraph(const int64_t current_ts,const UniquePid current_upid)1004 HeapGraphTracker::BuildFlamegraph(const int64_t current_ts,
1005                                   const UniquePid current_upid) {
1006   auto profile_type = storage_->InternString("graph");
1007   auto java_mapping = storage_->InternString("JAVA");
1008 
1009   std::unique_ptr<tables::ExperimentalFlamegraphTable> tbl(
1010       new tables::ExperimentalFlamegraphTable(storage_->mutable_string_pool()));
1011 
1012   auto it = roots_.find(std::make_pair(current_upid, current_ts));
1013   if (it == roots_.end()) {
1014     // TODO(fmayer): This should not be within the flame graph but some marker
1015     // in the UI.
1016     if (IsTruncated(current_upid, current_ts)) {
1017       tables::ExperimentalFlamegraphTable::Row alloc_row{};
1018       alloc_row.ts = current_ts;
1019       alloc_row.upid = current_upid;
1020       alloc_row.profile_type = profile_type;
1021       alloc_row.depth = 0;
1022       alloc_row.name = storage_->InternString(
1023           "ERROR: INCOMPLETE GRAPH (try increasing buffer size)");
1024       alloc_row.map_name = java_mapping;
1025       alloc_row.count = 1;
1026       alloc_row.cumulative_count = 1;
1027       alloc_row.size = 1;
1028       alloc_row.cumulative_size = 1;
1029       alloc_row.parent_id = std::nullopt;
1030       tbl->Insert(alloc_row);
1031       return tbl;
1032     }
1033     // We haven't seen this graph, so we should raise an error.
1034     return nullptr;
1035   }
1036 
1037   const std::set<ObjectTable::RowNumber>& roots = it->second;
1038   auto* object_table = storage_->mutable_heap_graph_object_table();
1039 
1040   // First pass to calculate shortest paths
1041   for (ObjectTable::RowNumber root : roots) {
1042     UpdateShortestPaths(root.ToRowReference(object_table));
1043   }
1044   PathFromRoot init_path;
1045   for (ObjectTable::RowNumber root : roots) {
1046     FindPathFromRoot(root.ToRowReference(object_table), &init_path);
1047   }
1048 
1049   std::vector<int64_t> node_to_cumulative_size(init_path.nodes.size());
1050   std::vector<int64_t> node_to_cumulative_count(init_path.nodes.size());
1051   // i > 0 is to skip the artifical root node.
1052   for (size_t i = init_path.nodes.size() - 1; i > 0; --i) {
1053     const PathFromRoot::Node& node = init_path.nodes[i];
1054 
1055     node_to_cumulative_size[i] += node.size;
1056     node_to_cumulative_count[i] += node.count;
1057     node_to_cumulative_size[node.parent_id] += node_to_cumulative_size[i];
1058     node_to_cumulative_count[node.parent_id] += node_to_cumulative_count[i];
1059   }
1060 
1061   std::vector<FlamegraphId> node_to_id(init_path.nodes.size());
1062   // i = 1 is to skip the artifical root node.
1063   for (size_t i = 1; i < init_path.nodes.size(); ++i) {
1064     const PathFromRoot::Node& node = init_path.nodes[i];
1065     PERFETTO_CHECK(node.parent_id < i);
1066     std::optional<FlamegraphId> parent_id;
1067     if (node.parent_id != 0)
1068       parent_id = node_to_id[node.parent_id];
1069     const uint32_t depth = node.depth;
1070 
1071     tables::ExperimentalFlamegraphTable::Row alloc_row{};
1072     alloc_row.ts = current_ts;
1073     alloc_row.upid = current_upid;
1074     alloc_row.profile_type = profile_type;
1075     alloc_row.depth = depth;
1076     alloc_row.name = node.class_name_id;
1077     alloc_row.map_name = java_mapping;
1078     alloc_row.count = static_cast<int64_t>(node.count);
1079     alloc_row.cumulative_count =
1080         static_cast<int64_t>(node_to_cumulative_count[i]);
1081     alloc_row.size = static_cast<int64_t>(node.size);
1082     alloc_row.cumulative_size =
1083         static_cast<int64_t>(node_to_cumulative_size[i]);
1084     alloc_row.parent_id = parent_id;
1085     node_to_id[i] = tbl->Insert(alloc_row).id;
1086   }
1087   return tbl;
1088 }
1089 
FinalizeAllProfiles()1090 void HeapGraphTracker::FinalizeAllProfiles() {
1091   if (!sequence_state_.empty()) {
1092     storage_->IncrementStats(stats::heap_graph_non_finalized_graph);
1093     // There might still be valuable data even though the trace is truncated.
1094     while (!sequence_state_.empty()) {
1095       FinalizeProfile(sequence_state_.begin()->first);
1096     }
1097   }
1098 }
1099 
IsTruncated(UniquePid upid,int64_t ts)1100 bool HeapGraphTracker::IsTruncated(UniquePid upid, int64_t ts) {
1101   // The graph was finalized but was missing packets.
1102   if (truncated_graphs_.find(std::make_pair(upid, ts)) !=
1103       truncated_graphs_.end()) {
1104     return true;
1105   }
1106 
1107   // Or the graph was never finalized, so is missing packets at the end.
1108   for (const auto& p : sequence_state_) {
1109     const SequenceState& sequence_state = p.second;
1110     if (sequence_state.current_upid == upid &&
1111         sequence_state.current_ts == ts) {
1112       return true;
1113     }
1114   }
1115   return false;
1116 }
1117 
InternRootTypeString(protos::pbzero::HeapGraphRoot::Type root_type)1118 StringId HeapGraphTracker::InternRootTypeString(
1119     protos::pbzero::HeapGraphRoot::Type root_type) {
1120   size_t idx = static_cast<size_t>(root_type);
1121   if (idx >= root_type_string_ids_.size()) {
1122     idx = static_cast<size_t>(protos::pbzero::HeapGraphRoot::ROOT_UNKNOWN);
1123   }
1124 
1125   return root_type_string_ids_[idx];
1126 }
1127 
InternTypeKindString(protos::pbzero::HeapGraphType::Kind kind)1128 StringId HeapGraphTracker::InternTypeKindString(
1129     protos::pbzero::HeapGraphType::Kind kind) {
1130   size_t idx = static_cast<size_t>(kind);
1131   if (idx >= type_kind_string_ids_.size()) {
1132     idx = static_cast<size_t>(protos::pbzero::HeapGraphType::KIND_UNKNOWN);
1133   }
1134 
1135   return type_kind_string_ids_[idx];
1136 }
1137 
1138 HeapGraphTracker::~HeapGraphTracker() = default;
1139 
1140 }  // namespace perfetto::trace_processor
1141