• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "src/trace_processor/importers/proto/heap_graph_tracker.h"
18 
19 #include "perfetto/ext/base/optional.h"
20 #include "perfetto/ext/base/string_splitter.h"
21 #include "perfetto/ext/base/string_utils.h"
22 #include "src/trace_processor/importers/proto/profiler_util.h"
23 #include "src/trace_processor/tables/profiler_tables.h"
24 
25 #include <set>
26 #include <utility>
27 
28 namespace perfetto {
29 namespace trace_processor {
30 
31 namespace {
32 
33 template <typename F>
ForReferenceSet(const TraceStorage & storage,tables::HeapGraphObjectTable::Id id,F fn)34 void ForReferenceSet(const TraceStorage& storage,
35                      tables::HeapGraphObjectTable::Id id,
36                      F fn) {
37   uint32_t row = *storage.heap_graph_object_table().id().IndexOf(id);
38   base::Optional<uint32_t> reference_set_id =
39       storage.heap_graph_object_table().reference_set_id()[row];
40   if (!reference_set_id)
41     return;
42   uint32_t cur_reference_set_id;
43   for (uint32_t reference_row = *reference_set_id;
44        reference_row < storage.heap_graph_reference_table().row_count();
45        ++reference_row) {
46     cur_reference_set_id =
47         storage.heap_graph_reference_table().reference_set_id()[reference_row];
48     if (cur_reference_set_id != *reference_set_id)
49       break;
50     if (!fn(reference_row))
51       break;
52   }
53 }
54 
GetChildren(const TraceStorage & storage,tables::HeapGraphObjectTable::Id id)55 std::set<tables::HeapGraphObjectTable::Id> GetChildren(
56     const TraceStorage& storage,
57     tables::HeapGraphObjectTable::Id id) {
58   uint32_t obj_row = *storage.heap_graph_object_table().id().IndexOf(id);
59   uint32_t cls_row = *storage.heap_graph_class_table().id().IndexOf(
60       storage.heap_graph_object_table().type_id()[obj_row]);
61 
62   StringPool::Id kind = storage.heap_graph_class_table().kind()[cls_row];
63   base::Optional<StringPool::Id> weakref_kind =
64       storage.string_pool().GetId("KIND_WEAK_REFERENCE");
65   base::Optional<StringPool::Id> softref_kind =
66       storage.string_pool().GetId("KIND_SOFT_REFERENCE");
67   base::Optional<StringPool::Id> finalizerref_kind =
68       storage.string_pool().GetId("KIND_FINALIZER_REFERENCE");
69   base::Optional<StringPool::Id> phantomref_kind =
70       storage.string_pool().GetId("KIND_PHANTOM_REFERENCE");
71 
72   if ((weakref_kind && kind == *weakref_kind) ||
73       (softref_kind && kind == *softref_kind) ||
74       (finalizerref_kind && kind == *finalizerref_kind) ||
75       (phantomref_kind && kind == *phantomref_kind)) {
76     // Do not follow weak / soft / finalizer / phantom references.
77     return {};
78   }
79 
80   std::set<tables::HeapGraphObjectTable::Id> children;
81   ForReferenceSet(
82       storage, id, [&storage, &children, id](uint32_t reference_row) {
83         PERFETTO_CHECK(
84             storage.heap_graph_reference_table().owner_id()[reference_row] ==
85             id);
86         auto opt_owned =
87             storage.heap_graph_reference_table().owned_id()[reference_row];
88         if (opt_owned) {
89           children.emplace(*opt_owned);
90         }
91         return true;
92       });
93   return children;
94 }
95 
96 struct ClassDescriptor {
97   StringId name;
98   base::Optional<StringId> location;
99 
operator <perfetto::trace_processor::__anonb1d40ec90111::ClassDescriptor100   bool operator<(const ClassDescriptor& other) const {
101     return std::tie(name, location) < std::tie(other.name, other.location);
102   }
103 };
104 
GetClassDescriptor(const TraceStorage & storage,tables::HeapGraphObjectTable::Id obj_id)105 ClassDescriptor GetClassDescriptor(const TraceStorage& storage,
106                                    tables::HeapGraphObjectTable::Id obj_id) {
107   auto obj_idx = storage.heap_graph_object_table().id().IndexOf(obj_id).value();
108   auto type_id = storage.heap_graph_object_table().type_id()[obj_idx];
109   auto type_idx =
110       storage.heap_graph_class_table().id().IndexOf(type_id).value();
111   return {storage.heap_graph_class_table().name()[type_idx],
112           storage.heap_graph_class_table().location()[type_idx]};
113 }
114 
GetReferredObj(const TraceStorage & storage,uint32_t ref_set_id,const std::string & field_name)115 base::Optional<tables::HeapGraphObjectTable::Id> GetReferredObj(
116     const TraceStorage& storage,
117     uint32_t ref_set_id,
118     const std::string& field_name) {
119   const auto& refs_tbl = storage.heap_graph_reference_table();
120 
121   auto filtered = refs_tbl.Filter(
122       {refs_tbl.reference_set_id().eq(ref_set_id),
123        refs_tbl.field_name().eq(NullTermStringView(field_name))});
124   auto refs_it = filtered.IterateRows();
125   if (!refs_it) {
126     return {};
127   }
128 
129   SqlValue sql_owned = refs_it.Get(static_cast<uint32_t>(
130       tables::HeapGraphReferenceTable::ColumnIndex::owned_id));
131   if (sql_owned.is_null()) {
132     return base::nullopt;
133   }
134   return tables::HeapGraphObjectTable::Id(
135       static_cast<uint32_t>(sql_owned.AsLong()));
136 }
137 
138 // Maps from normalized class name and location, to superclass.
139 std::map<ClassDescriptor, ClassDescriptor>
BuildSuperclassMap(UniquePid upid,int64_t ts,TraceStorage * storage)140 BuildSuperclassMap(UniquePid upid, int64_t ts, TraceStorage* storage) {
141   std::map<ClassDescriptor, ClassDescriptor> superclass_map;
142 
143   // Resolve superclasses by iterating heap graph objects and identifying the
144   // superClass field.
145   const auto& objects_tbl = storage->heap_graph_object_table();
146   auto filtered = objects_tbl.Filter(
147       {objects_tbl.upid().eq(upid), objects_tbl.graph_sample_ts().eq(ts)});
148   for (auto obj_it = filtered.IterateRows(); obj_it; obj_it.Next()) {
149     auto obj_id = tables::HeapGraphObjectTable::Id(static_cast<uint32_t>(
150         obj_it
151             .Get(static_cast<uint32_t>(
152                 tables::HeapGraphObjectTable::ColumnIndex::id))
153             .AsLong()));
154     auto class_descriptor = GetClassDescriptor(*storage, obj_id);
155     auto normalized =
156         GetNormalizedType(storage->GetString(class_descriptor.name));
157     // superClass ptrs are stored on the static class objects
158     // ignore arrays (as they are generated objects)
159     if (!normalized.is_static_class || normalized.number_of_arrays > 0)
160       continue;
161 
162     auto opt_ref_set_id = obj_it.Get(static_cast<uint32_t>(
163         tables::HeapGraphObjectTable::ColumnIndex::reference_set_id));
164     if (opt_ref_set_id.is_null())
165       continue;
166     auto ref_set_id = static_cast<uint32_t>(opt_ref_set_id.AsLong());
167     auto super_obj_id =
168         GetReferredObj(*storage, ref_set_id, "java.lang.Class.superClass");
169     if (!super_obj_id) {
170       // This is expected to be missing for Object and primitive types
171       continue;
172     }
173 
174     // Lookup the super obj type id
175     auto super_class_descriptor = GetClassDescriptor(*storage, *super_obj_id);
176     auto super_class_name =
177         NormalizeTypeName(storage->GetString(super_class_descriptor.name));
178     StringId super_class_id = storage->InternString(super_class_name);
179     StringId class_id = storage->InternString(normalized.name);
180     superclass_map[{class_id, class_descriptor.location}] = {
181         super_class_id, super_class_descriptor.location};
182   }
183   return superclass_map;
184 }
185 
186 }  // namespace
187 
MarkRoot(TraceStorage * storage,tables::HeapGraphObjectTable::Id id,StringPool::Id type)188 void MarkRoot(TraceStorage* storage,
189               tables::HeapGraphObjectTable::Id id,
190               StringPool::Id type) {
191   uint32_t row = *storage->heap_graph_object_table().id().IndexOf(id);
192   storage->mutable_heap_graph_object_table()->mutable_root_type()->Set(row,
193                                                                        type);
194 
195   // Calculate shortest distance to a GC root.
196   std::deque<std::pair<int32_t, tables::HeapGraphObjectTable::Id>>
197       reachable_nodes{{0, id}};
198   while (!reachable_nodes.empty()) {
199     tables::HeapGraphObjectTable::Id cur_node;
200     int32_t distance;
201     std::tie(distance, cur_node) = reachable_nodes.front();
202     reachable_nodes.pop_front();
203     uint32_t cur_row =
204         *storage->heap_graph_object_table().id().IndexOf(cur_node);
205     int32_t cur_distance =
206         storage->heap_graph_object_table().root_distance()[cur_row];
207     if (cur_distance == -1 || cur_distance > distance) {
208       if (cur_distance == -1) {
209         storage->mutable_heap_graph_object_table()->mutable_reachable()->Set(
210             cur_row, 1);
211       }
212       storage->mutable_heap_graph_object_table()->mutable_root_distance()->Set(
213           cur_row, distance);
214 
215       for (tables::HeapGraphObjectTable::Id child_node :
216            GetChildren(*storage, cur_node)) {
217         uint32_t child_row =
218             *storage->heap_graph_object_table().id().IndexOf(child_node);
219         int32_t child_distance =
220             storage->heap_graph_object_table().root_distance()[child_row];
221         if (child_distance == -1 || child_distance > distance + 1)
222           reachable_nodes.emplace_back(distance + 1, child_node);
223       }
224     }
225   }
226 }
227 
GetStaticClassTypeName(base::StringView type)228 base::Optional<base::StringView> GetStaticClassTypeName(base::StringView type) {
229   static const base::StringView kJavaClassTemplate("java.lang.Class<");
230   if (!type.empty() && type.at(type.size() - 1) == '>' &&
231       type.substr(0, kJavaClassTemplate.size()) == kJavaClassTemplate) {
232     return type.substr(kJavaClassTemplate.size(),
233                        type.size() - kJavaClassTemplate.size() - 1);
234   }
235   return {};
236 }
237 
NumberOfArrays(base::StringView type)238 size_t NumberOfArrays(base::StringView type) {
239   if (type.size() < 2)
240     return 0;
241 
242   size_t arrays = 0;
243   while (type.size() >= 2 * (arrays + 1) &&
244          memcmp(type.end() - 2 * (arrays + 1), "[]", 2) == 0) {
245     arrays++;
246   }
247 
248   return arrays;
249 }
250 
GetNormalizedType(base::StringView type)251 NormalizedType GetNormalizedType(base::StringView type) {
252   auto static_class_type_name = GetStaticClassTypeName(type);
253   if (static_class_type_name.has_value()) {
254     type = static_class_type_name.value();
255   }
256   size_t number_of_arrays = NumberOfArrays(type);
257   return {base::StringView(type.data(), type.size() - number_of_arrays * 2),
258           static_class_type_name.has_value(), number_of_arrays};
259 }
260 
NormalizeTypeName(base::StringView type)261 base::StringView NormalizeTypeName(base::StringView type) {
262   return GetNormalizedType(type).name;
263 }
264 
DenormalizeTypeName(NormalizedType normalized,base::StringView deobfuscated_type_name)265 std::string DenormalizeTypeName(NormalizedType normalized,
266                                 base::StringView deobfuscated_type_name) {
267   std::string result = deobfuscated_type_name.ToStdString();
268   for (size_t i = 0; i < normalized.number_of_arrays; ++i) {
269     result += "[]";
270   }
271   if (normalized.is_static_class) {
272     result = "java.lang.Class<" + result + ">";
273   }
274   return result;
275 }
276 
HeapGraphTracker(TraceProcessorContext * context)277 HeapGraphTracker::HeapGraphTracker(TraceProcessorContext* context)
278     : context_(context) {}
279 
GetOrCreateSequence(uint32_t seq_id)280 HeapGraphTracker::SequenceState& HeapGraphTracker::GetOrCreateSequence(
281     uint32_t seq_id) {
282   return sequence_state_[seq_id];
283 }
284 
SetPidAndTimestamp(SequenceState * sequence_state,UniquePid upid,int64_t ts)285 bool HeapGraphTracker::SetPidAndTimestamp(SequenceState* sequence_state,
286                                           UniquePid upid,
287                                           int64_t ts) {
288   if (sequence_state->current_upid != 0 &&
289       sequence_state->current_upid != upid) {
290     context_->storage->IncrementStats(stats::heap_graph_non_finalized_graph);
291     return false;
292   }
293   if (sequence_state->current_ts != 0 && sequence_state->current_ts != ts) {
294     context_->storage->IncrementStats(stats::heap_graph_non_finalized_graph);
295     return false;
296   }
297   sequence_state->current_upid = upid;
298   sequence_state->current_ts = ts;
299   return true;
300 }
301 
GetOrInsertObject(SequenceState * sequence_state,uint64_t object_id)302 tables::HeapGraphObjectTable::Id HeapGraphTracker::GetOrInsertObject(
303     SequenceState* sequence_state,
304     uint64_t object_id) {
305   auto it = sequence_state->object_id_to_db_id.find(object_id);
306   if (it == sequence_state->object_id_to_db_id.end()) {
307     auto id_and_row =
308         context_->storage->mutable_heap_graph_object_table()->Insert(
309             {sequence_state->current_upid,
310              sequence_state->current_ts,
311              -1,
312              /*reference_set_id=*/base::nullopt,
313              /*reachable=*/0,
314              {},
315              /*root_type=*/base::nullopt,
316              /*root_distance*/ -1});
317     bool inserted;
318     std::tie(it, inserted) =
319         sequence_state->object_id_to_db_id.emplace(object_id, id_and_row.id);
320   }
321   return it->second;
322 }
323 
GetOrInsertType(SequenceState * sequence_state,uint64_t type_id)324 tables::HeapGraphClassTable::Id HeapGraphTracker::GetOrInsertType(
325     SequenceState* sequence_state,
326     uint64_t type_id) {
327   auto it = sequence_state->type_id_to_db_id.find(type_id);
328   if (it == sequence_state->type_id_to_db_id.end()) {
329     auto id_and_row =
330         context_->storage->mutable_heap_graph_class_table()->Insert(
331             {StringPool::Id(), base::nullopt, base::nullopt});
332     bool inserted;
333     std::tie(it, inserted) =
334         sequence_state->type_id_to_db_id.emplace(type_id, id_and_row.id);
335   }
336   return it->second;
337 }
338 
AddObject(uint32_t seq_id,UniquePid upid,int64_t ts,SourceObject obj)339 void HeapGraphTracker::AddObject(uint32_t seq_id,
340                                  UniquePid upid,
341                                  int64_t ts,
342                                  SourceObject obj) {
343   SequenceState& sequence_state = GetOrCreateSequence(seq_id);
344 
345   if (!SetPidAndTimestamp(&sequence_state, upid, ts))
346     return;
347 
348   sequence_state.last_object_id = obj.object_id;
349 
350   tables::HeapGraphObjectTable::Id owner_id =
351       GetOrInsertObject(&sequence_state, obj.object_id);
352   tables::HeapGraphClassTable::Id type_id =
353       GetOrInsertType(&sequence_state, obj.type_id);
354 
355   auto* hgo = context_->storage->mutable_heap_graph_object_table();
356   uint32_t row = *hgo->id().IndexOf(owner_id);
357 
358   hgo->mutable_self_size()->Set(row, static_cast<int64_t>(obj.self_size));
359   hgo->mutable_type_id()->Set(row, type_id);
360 
361   if (obj.self_size == 0)
362     sequence_state.deferred_size_objects_for_type_[type_id].push_back(owner_id);
363 
364   uint32_t reference_set_id =
365       context_->storage->heap_graph_reference_table().row_count();
366   bool any_references = false;
367 
368   for (size_t i = 0; i < obj.referred_objects.size(); ++i) {
369     uint64_t owned_object_id = obj.referred_objects[i];
370     // This is true for unset reference fields.
371     base::Optional<tables::HeapGraphObjectTable::Id> owned_id;
372     if (owned_object_id != 0)
373       owned_id = GetOrInsertObject(&sequence_state, owned_object_id);
374 
375     auto ref_id_and_row =
376         context_->storage->mutable_heap_graph_reference_table()->Insert(
377             {reference_set_id,
378              owner_id,
379              owned_id,
380              {},
381              {},
382              /*deobfuscated_field_name=*/base::nullopt});
383     if (!obj.field_name_ids.empty()) {
384       sequence_state.references_for_field_name_id[obj.field_name_ids[i]]
385           .push_back(ref_id_and_row.id);
386     }
387     any_references = true;
388   }
389   if (any_references) {
390     uint32_t owner_row =
391         *context_->storage->heap_graph_object_table().id().IndexOf(owner_id);
392     context_->storage->mutable_heap_graph_object_table()
393         ->mutable_reference_set_id()
394         ->Set(owner_row, reference_set_id);
395     if (obj.field_name_ids.empty()) {
396       sequence_state.deferred_reference_objects_for_type_[type_id].push_back(
397           owner_id);
398     }
399   }
400 }
401 
AddRoot(uint32_t seq_id,UniquePid upid,int64_t ts,SourceRoot root)402 void HeapGraphTracker::AddRoot(uint32_t seq_id,
403                                UniquePid upid,
404                                int64_t ts,
405                                SourceRoot root) {
406   SequenceState& sequence_state = GetOrCreateSequence(seq_id);
407   if (!SetPidAndTimestamp(&sequence_state, upid, ts))
408     return;
409 
410   sequence_state.current_roots.emplace_back(std::move(root));
411 }
412 
AddInternedLocationName(uint32_t seq_id,uint64_t intern_id,StringPool::Id strid)413 void HeapGraphTracker::AddInternedLocationName(uint32_t seq_id,
414                                                uint64_t intern_id,
415                                                StringPool::Id strid) {
416   SequenceState& sequence_state = GetOrCreateSequence(seq_id);
417   sequence_state.interned_location_names.emplace(intern_id, strid);
418 }
419 
AddInternedType(uint32_t seq_id,uint64_t intern_id,StringPool::Id strid,base::Optional<uint64_t> location_id,uint64_t object_size,std::vector<uint64_t> field_name_ids,uint64_t superclass_id,uint64_t classloader_id,bool no_fields,StringPool::Id kind)420 void HeapGraphTracker::AddInternedType(uint32_t seq_id,
421                                        uint64_t intern_id,
422                                        StringPool::Id strid,
423                                        base::Optional<uint64_t> location_id,
424                                        uint64_t object_size,
425                                        std::vector<uint64_t> field_name_ids,
426                                        uint64_t superclass_id,
427                                        uint64_t classloader_id,
428                                        bool no_fields,
429                                        StringPool::Id kind) {
430   SequenceState& sequence_state = GetOrCreateSequence(seq_id);
431   sequence_state.interned_types[intern_id].name = strid;
432   sequence_state.interned_types[intern_id].location_id = location_id;
433   sequence_state.interned_types[intern_id].object_size = object_size;
434   sequence_state.interned_types[intern_id].field_name_ids =
435       std::move(field_name_ids);
436   sequence_state.interned_types[intern_id].superclass_id = superclass_id;
437   sequence_state.interned_types[intern_id].classloader_id = classloader_id;
438   sequence_state.interned_types[intern_id].no_fields = no_fields;
439   sequence_state.interned_types[intern_id].kind = kind;
440 }
441 
AddInternedFieldName(uint32_t seq_id,uint64_t intern_id,base::StringView str)442 void HeapGraphTracker::AddInternedFieldName(uint32_t seq_id,
443                                             uint64_t intern_id,
444                                             base::StringView str) {
445   SequenceState& sequence_state = GetOrCreateSequence(seq_id);
446   size_t space = str.find(' ');
447   base::StringView type;
448   if (space != base::StringView::npos) {
449     type = str.substr(0, space);
450     str = str.substr(space + 1);
451   }
452   StringPool::Id field_name = context_->storage->InternString(str);
453   StringPool::Id type_name = context_->storage->InternString(type);
454 
455   sequence_state.interned_fields.emplace(intern_id,
456                                          InternedField{field_name, type_name});
457 
458   auto it = sequence_state.references_for_field_name_id.find(intern_id);
459   if (it != sequence_state.references_for_field_name_id.end()) {
460     auto hgr = context_->storage->mutable_heap_graph_reference_table();
461     for (const tables::HeapGraphReferenceTable::Id reference_id : it->second) {
462       uint32_t row = *hgr->id().IndexOf(reference_id);
463       hgr->mutable_field_name()->Set(row, field_name);
464       hgr->mutable_field_type_name()->Set(row, type_name);
465 
466       field_to_rows_[field_name].emplace_back(row);
467     }
468   }
469 }
470 
SetPacketIndex(uint32_t seq_id,uint64_t index)471 void HeapGraphTracker::SetPacketIndex(uint32_t seq_id, uint64_t index) {
472   SequenceState& sequence_state = GetOrCreateSequence(seq_id);
473   bool dropped_packet = false;
474   // perfetto_hprof starts counting at index = 0.
475   if (!sequence_state.prev_index && index != 0) {
476     dropped_packet = true;
477   }
478 
479   if (sequence_state.prev_index && *sequence_state.prev_index + 1 != index) {
480     dropped_packet = true;
481   }
482 
483   if (dropped_packet) {
484     sequence_state.truncated = true;
485     if (sequence_state.prev_index) {
486       PERFETTO_ELOG("Missing packets between %" PRIu64 " and %" PRIu64,
487                     *sequence_state.prev_index, index);
488     } else {
489       PERFETTO_ELOG("Invalid first packet index %" PRIu64 " (!= 0)", index);
490     }
491 
492     context_->storage->IncrementIndexedStats(
493         stats::heap_graph_missing_packet,
494         static_cast<int>(sequence_state.current_upid));
495   }
496   sequence_state.prev_index = index;
497 }
498 
499 // This only works on Android S+ traces. We need to have ingested the whole
500 // profile before calling this function (e.g. in FinalizeProfile).
GetSuperClass(SequenceState * sequence_state,const InternedType * current_type)501 HeapGraphTracker::InternedType* HeapGraphTracker::GetSuperClass(
502     SequenceState* sequence_state,
503     const InternedType* current_type) {
504   if (current_type->superclass_id) {
505     auto it = sequence_state->interned_types.find(current_type->superclass_id);
506     if (it != sequence_state->interned_types.end())
507       return &it->second;
508   }
509   context_->storage->IncrementIndexedStats(
510       stats::heap_graph_malformed_packet,
511       static_cast<int>(sequence_state->current_upid));
512   return nullptr;
513 }
514 
FinalizeProfile(uint32_t seq_id)515 void HeapGraphTracker::FinalizeProfile(uint32_t seq_id) {
516   SequenceState& sequence_state = GetOrCreateSequence(seq_id);
517   if (sequence_state.truncated) {
518     truncated_graphs_.emplace(
519         std::make_pair(sequence_state.current_upid, sequence_state.current_ts));
520   }
521 
522   // We do this in FinalizeProfile because the interned_location_names get
523   // written at the end of the dump.
524   for (const auto& p : sequence_state.interned_types) {
525     uint64_t id = p.first;
526     const InternedType& interned_type = p.second;
527     base::Optional<StringPool::Id> location_name;
528     if (interned_type.location_id) {
529       auto it = sequence_state.interned_location_names.find(
530           *interned_type.location_id);
531       if (it == sequence_state.interned_location_names.end()) {
532         context_->storage->IncrementIndexedStats(
533             stats::heap_graph_invalid_string_id,
534             static_cast<int>(sequence_state.current_upid));
535       } else {
536         location_name = it->second;
537       }
538     }
539     tables::HeapGraphClassTable::Id type_id =
540         GetOrInsertType(&sequence_state, id);
541 
542     auto sz_obj_it =
543         sequence_state.deferred_size_objects_for_type_.find(type_id);
544     if (sz_obj_it != sequence_state.deferred_size_objects_for_type_.end()) {
545       for (tables::HeapGraphObjectTable::Id obj_id : sz_obj_it->second) {
546         auto* hgo = context_->storage->mutable_heap_graph_object_table();
547         uint32_t row = *hgo->id().IndexOf(obj_id);
548         hgo->mutable_self_size()->Set(
549             row, static_cast<int64_t>(interned_type.object_size));
550       }
551       sequence_state.deferred_size_objects_for_type_.erase(sz_obj_it);
552     }
553 
554     auto ref_obj_it =
555         sequence_state.deferred_reference_objects_for_type_.find(type_id);
556     if (ref_obj_it !=
557         sequence_state.deferred_reference_objects_for_type_.end()) {
558       for (tables::HeapGraphObjectTable::Id obj_id : ref_obj_it->second) {
559         const InternedType* current_type = &interned_type;
560         if (interned_type.no_fields) {
561           continue;
562         }
563         size_t field_offset_in_cls = 0;
564         ForReferenceSet(
565             *context_->storage, obj_id,
566             [this, &current_type, &sequence_state,
567              &field_offset_in_cls](uint32_t reference_row) {
568               while (current_type && field_offset_in_cls >=
569                                          current_type->field_name_ids.size()) {
570                 size_t prev_type_size = current_type->field_name_ids.size();
571                 current_type = GetSuperClass(&sequence_state, current_type);
572                 field_offset_in_cls -= prev_type_size;
573               }
574 
575               if (!current_type) {
576                 return false;
577               }
578 
579               uint64_t field_id =
580                   current_type->field_name_ids[field_offset_in_cls++];
581               auto it = sequence_state.interned_fields.find(field_id);
582               if (it == sequence_state.interned_fields.end()) {
583                 PERFETTO_ELOG("Invalid field id.");
584                 context_->storage->IncrementIndexedStats(
585                     stats::heap_graph_malformed_packet,
586                     static_cast<int>(sequence_state.current_upid));
587                 return true;
588               }
589               const InternedField& field = it->second;
590               auto hgr =
591                   context_->storage->mutable_heap_graph_reference_table();
592               hgr->mutable_field_name()->Set(reference_row, field.name);
593               hgr->mutable_field_type_name()->Set(reference_row,
594                                                   field.type_name);
595               field_to_rows_[field.name].emplace_back(reference_row);
596               return true;
597             });
598       }
599       sequence_state.deferred_reference_objects_for_type_.erase(ref_obj_it);
600     }
601 
602     auto* hgc = context_->storage->mutable_heap_graph_class_table();
603     uint32_t row = *hgc->id().IndexOf(type_id);
604     hgc->mutable_name()->Set(row, interned_type.name);
605     if (interned_type.classloader_id) {
606       auto classloader_object_id =
607           GetOrInsertObject(&sequence_state, interned_type.classloader_id);
608       hgc->mutable_classloader_id()->Set(row, classloader_object_id.value);
609     }
610     if (location_name)
611       hgc->mutable_location()->Set(row, *location_name);
612     hgc->mutable_kind()->Set(row, interned_type.kind);
613 
614     base::StringView normalized_type =
615         NormalizeTypeName(context_->storage->GetString(interned_type.name));
616 
617     // Annoyingly, some apps have a relative path to base.apk. We take this to
618     // mean the main package, so we treat it as if the location was unknown.
619     bool is_base_apk = false;
620     if (location_name) {
621       base::StringView base_apk("base.apk");
622       is_base_apk = context_->storage->GetString(*location_name)
623                         .substr(0, base_apk.size()) == base_apk;
624     }
625 
626     if (location_name && !is_base_apk) {
627       base::Optional<std::string> package_name =
628           PackageFromLocation(context_->storage.get(),
629                               context_->storage->GetString(*location_name));
630       if (package_name) {
631         class_to_rows_[std::make_pair(
632                            context_->storage->InternString(
633                                base::StringView(*package_name)),
634                            context_->storage->InternString(normalized_type))]
635             .emplace_back(type_id);
636       }
637     } else {
638       // TODO(b/153552977): Remove this workaround.
639       // For profiles collected for old versions of perfetto_hprof, we do not
640       // have any location information. We store them using the nullopt
641       // location, and assume they are all part of the main APK.
642       //
643       // This is to keep ingestion of old profiles working (especially
644       // important for the UI).
645       class_to_rows_[std::make_pair(
646                          base::nullopt,
647                          context_->storage->InternString(normalized_type))]
648           .emplace_back(type_id);
649     }
650   }
651 
652   if (!sequence_state.deferred_size_objects_for_type_.empty()) {
653     context_->storage->IncrementIndexedStats(
654         stats::heap_graph_malformed_packet,
655         static_cast<int>(sequence_state.current_upid));
656   }
657 
658   if (!sequence_state.deferred_reference_objects_for_type_.empty()) {
659     context_->storage->IncrementIndexedStats(
660         stats::heap_graph_malformed_packet,
661         static_cast<int>(sequence_state.current_upid));
662   }
663 
664   for (const SourceRoot& root : sequence_state.current_roots) {
665     for (uint64_t obj_id : root.object_ids) {
666       auto it = sequence_state.object_id_to_db_id.find(obj_id);
667       // This can only happen for an invalid type string id, which is already
668       // reported as an error. Silently continue here.
669       if (it == sequence_state.object_id_to_db_id.end())
670         continue;
671 
672       tables::HeapGraphObjectTable::Id db_id = it->second;
673       auto it_and_success = roots_[std::make_pair(sequence_state.current_upid,
674                                                   sequence_state.current_ts)]
675                                 .emplace(db_id);
676       if (it_and_success.second)
677         MarkRoot(context_->storage.get(), db_id, root.root_type);
678     }
679   }
680 
681   PopulateSuperClasses(sequence_state);
682   sequence_state_.erase(seq_id);
683 }
684 
685 // TODO(fmayer): For Android S+ traces, use the superclass_id from the trace.
PopulateSuperClasses(const SequenceState & seq)686 void HeapGraphTracker::PopulateSuperClasses(const SequenceState& seq) {
687   // Maps from normalized class name and location, to superclass.
688   std::map<ClassDescriptor, ClassDescriptor> superclass_map =
689       BuildSuperclassMap(seq.current_upid, seq.current_ts,
690                          context_->storage.get());
691 
692   auto* classes_tbl = context_->storage->mutable_heap_graph_class_table();
693   std::map<ClassDescriptor, tables::HeapGraphClassTable::Id> class_to_id;
694   for (uint32_t idx = 0; idx < classes_tbl->row_count(); ++idx) {
695     class_to_id[{classes_tbl->name()[idx], classes_tbl->location()[idx]}] =
696         classes_tbl->id()[idx];
697   }
698 
699   // Iterate through the classes table and annotate with superclasses.
700   // We iterate all rows on the classes table (even though the superclass
701   // mapping was generated on the current sequence) - if we cannot identify
702   // a superclass we will just skip.
703   for (uint32_t idx = 0; idx < classes_tbl->row_count(); ++idx) {
704     auto name = context_->storage->GetString(classes_tbl->name()[idx]);
705     auto location = classes_tbl->location()[idx];
706     auto normalized = GetNormalizedType(name);
707     if (normalized.is_static_class || normalized.number_of_arrays > 0)
708       continue;
709 
710     StringId class_name_id = context_->storage->InternString(normalized.name);
711     auto map_it = superclass_map.find({class_name_id, location});
712     if (map_it == superclass_map.end()) {
713       continue;
714     }
715 
716     // Find the row for the superclass id
717     auto superclass_it = class_to_id.find(map_it->second);
718     if (superclass_it == class_to_id.end()) {
719       // This can happen for traces was captured before the patch to
720       // explicitly emit interned types (meaning classes without live
721       // instances would not appear here).
722       continue;
723     }
724     auto superclass_id = superclass_it->second;
725     // Mutate the superclass column
726     classes_tbl->mutable_superclass_id()->Set(idx, superclass_id);
727   }
728 }
729 
FindPathFromRoot(TraceStorage * storage,tables::HeapGraphObjectTable::Id id,PathFromRoot * path)730 void FindPathFromRoot(TraceStorage* storage,
731                       tables::HeapGraphObjectTable::Id id,
732                       PathFromRoot* path) {
733   // We have long retention chains (e.g. from LinkedList). If we use the stack
734   // here, we risk running out of stack space. This is why we use a vector to
735   // simulate the stack.
736   struct StackElem {
737     tables::HeapGraphObjectTable::Id node;  // Node in the original graph.
738     size_t parent_id;  // id of parent node in the result tree.
739     size_t i;          // Index of the next child of this node to handle.
740     uint32_t depth;    // Depth in the resulting tree
741                        // (including artifical root).
742     std::vector<tables::HeapGraphObjectTable::Id> children;
743   };
744 
745   std::vector<StackElem> stack{{id, PathFromRoot::kRoot, 0, 0, {}}};
746 
747   while (!stack.empty()) {
748     tables::HeapGraphObjectTable::Id n = stack.back().node;
749     uint32_t row = *storage->heap_graph_object_table().id().IndexOf(n);
750     size_t parent_id = stack.back().parent_id;
751     uint32_t depth = stack.back().depth;
752     size_t& i = stack.back().i;
753     std::vector<tables::HeapGraphObjectTable::Id>& children =
754         stack.back().children;
755 
756     tables::HeapGraphClassTable::Id type_id =
757         storage->heap_graph_object_table().type_id()[row];
758 
759     uint32_t type_row =
760         *storage->heap_graph_class_table().id().IndexOf(type_id);
761     base::Optional<StringPool::Id> opt_class_name_id =
762         storage->heap_graph_class_table().deobfuscated_name()[type_row];
763     if (!opt_class_name_id) {
764       opt_class_name_id = storage->heap_graph_class_table().name()[type_row];
765     }
766     PERFETTO_CHECK(opt_class_name_id);
767     StringPool::Id class_name_id = *opt_class_name_id;
768     base::Optional<StringPool::Id> root_type =
769         storage->heap_graph_object_table().root_type()[row];
770     if (root_type) {
771       class_name_id = storage->InternString(base::StringView(
772           storage->GetString(class_name_id).ToStdString() + " [" +
773           storage->GetString(*root_type).ToStdString() + "]"));
774     }
775     auto it = path->nodes[parent_id].children.find(class_name_id);
776     if (it == path->nodes[parent_id].children.end()) {
777       size_t path_id = path->nodes.size();
778       path->nodes.emplace_back(PathFromRoot::Node{});
779       std::tie(it, std::ignore) =
780           path->nodes[parent_id].children.emplace(class_name_id, path_id);
781       path->nodes.back().class_name_id = class_name_id;
782       path->nodes.back().depth = depth;
783       path->nodes.back().parent_id = parent_id;
784     }
785     size_t path_id = it->second;
786     PathFromRoot::Node* output_tree_node = &path->nodes[path_id];
787 
788     if (i == 0) {
789       // This is the first time we are looking at this node, so add its
790       // size to the relevant node in the resulting tree.
791       output_tree_node->size +=
792           storage->heap_graph_object_table().self_size()[row];
793       output_tree_node->count++;
794       std::set<tables::HeapGraphObjectTable::Id> children_set =
795           GetChildren(*storage, n);
796       children.assign(children_set.cbegin(), children_set.cend());
797       PERFETTO_CHECK(children.size() == children_set.size());
798     }
799     // Otherwise we have already handled this node and just need to get its
800     // i-th child.
801     if (!children.empty()) {
802       PERFETTO_CHECK(i < children.size());
803       tables::HeapGraphObjectTable::Id child = children[i];
804       uint32_t child_row =
805           *storage->heap_graph_object_table().id().IndexOf(child);
806       if (++i == children.size())
807         stack.pop_back();
808 
809       int32_t child_distance =
810           storage->heap_graph_object_table().root_distance()[child_row];
811       int32_t n_distance =
812           storage->heap_graph_object_table().root_distance()[row];
813       PERFETTO_CHECK(n_distance >= 0);
814       PERFETTO_CHECK(child_distance >= 0);
815 
816       bool visited = path->visited.count(child);
817 
818       if (child_distance == n_distance + 1 && !visited) {
819         path->visited.emplace(child);
820         stack.emplace_back(StackElem{child, path_id, 0, depth + 1, {}});
821       }
822     } else {
823       stack.pop_back();
824     }
825   }
826 }
827 
828 std::unique_ptr<tables::ExperimentalFlamegraphNodesTable>
BuildFlamegraph(const int64_t current_ts,const UniquePid current_upid)829 HeapGraphTracker::BuildFlamegraph(const int64_t current_ts,
830                                   const UniquePid current_upid) {
831   auto profile_type = context_->storage->InternString("graph");
832   auto java_mapping = context_->storage->InternString("JAVA");
833 
834   std::unique_ptr<tables::ExperimentalFlamegraphNodesTable> tbl(
835       new tables::ExperimentalFlamegraphNodesTable(
836           context_->storage->mutable_string_pool(), nullptr));
837 
838   auto it = roots_.find(std::make_pair(current_upid, current_ts));
839   if (it == roots_.end()) {
840     // TODO(fmayer): This should not be within the flame graph but some marker
841     // in the UI.
842     if (IsTruncated(current_upid, current_ts)) {
843       tables::ExperimentalFlamegraphNodesTable::Row alloc_row{};
844       alloc_row.ts = current_ts;
845       alloc_row.upid = current_upid;
846       alloc_row.profile_type = profile_type;
847       alloc_row.depth = 0;
848       alloc_row.name =
849           context_->storage->InternString("ERROR: INCOMPLETE GRAPH");
850       alloc_row.map_name = java_mapping;
851       alloc_row.count = 1;
852       alloc_row.cumulative_count = 1;
853       alloc_row.size = 1;
854       alloc_row.cumulative_size = 1;
855       alloc_row.parent_id = base::nullopt;
856       tbl->Insert(alloc_row);
857       return tbl;
858     }
859     // We haven't seen this graph, so we should raise an error.
860     return nullptr;
861   }
862 
863   const std::set<tables::HeapGraphObjectTable::Id>& roots = it->second;
864 
865   PathFromRoot init_path;
866   for (tables::HeapGraphObjectTable::Id root : roots) {
867     FindPathFromRoot(context_->storage.get(), root, &init_path);
868   }
869 
870   std::vector<int32_t> node_to_cumulative_size(init_path.nodes.size());
871   std::vector<int32_t> node_to_cumulative_count(init_path.nodes.size());
872   // i > 0 is to skip the artifical root node.
873   for (size_t i = init_path.nodes.size() - 1; i > 0; --i) {
874     const PathFromRoot::Node& node = init_path.nodes[i];
875 
876     node_to_cumulative_size[i] += node.size;
877     node_to_cumulative_count[i] += node.count;
878     node_to_cumulative_size[node.parent_id] += node_to_cumulative_size[i];
879     node_to_cumulative_count[node.parent_id] += node_to_cumulative_count[i];
880   }
881 
882   std::vector<FlamegraphId> node_to_id(init_path.nodes.size());
883   // i = 1 is to skip the artifical root node.
884   for (size_t i = 1; i < init_path.nodes.size(); ++i) {
885     const PathFromRoot::Node& node = init_path.nodes[i];
886     PERFETTO_CHECK(node.parent_id < i);
887     base::Optional<FlamegraphId> parent_id;
888     if (node.parent_id != 0)
889       parent_id = node_to_id[node.parent_id];
890     const uint32_t depth = node.depth;
891 
892     tables::ExperimentalFlamegraphNodesTable::Row alloc_row{};
893     alloc_row.ts = current_ts;
894     alloc_row.upid = current_upid;
895     alloc_row.profile_type = profile_type;
896     alloc_row.depth = depth;
897     alloc_row.name = node.class_name_id;
898     alloc_row.map_name = java_mapping;
899     alloc_row.count = static_cast<int64_t>(node.count);
900     alloc_row.cumulative_count =
901         static_cast<int64_t>(node_to_cumulative_count[i]);
902     alloc_row.size = static_cast<int64_t>(node.size);
903     alloc_row.cumulative_size =
904         static_cast<int64_t>(node_to_cumulative_size[i]);
905     alloc_row.parent_id = parent_id;
906     node_to_id[i] = tbl->Insert(alloc_row).id;
907   }
908   return tbl;
909 }
910 
NotifyEndOfFile()911 void HeapGraphTracker::NotifyEndOfFile() {
912   if (!sequence_state_.empty()) {
913     context_->storage->IncrementStats(stats::heap_graph_non_finalized_graph);
914     // There might still be valuable data even though the trace is truncated.
915     while (!sequence_state_.empty()) {
916       FinalizeProfile(sequence_state_.begin()->first);
917     }
918   }
919 }
920 
IsTruncated(UniquePid upid,int64_t ts)921 bool HeapGraphTracker::IsTruncated(UniquePid upid, int64_t ts) {
922   // The graph was finalized but was missing packets.
923   if (truncated_graphs_.find(std::make_pair(upid, ts)) !=
924       truncated_graphs_.end()) {
925     return true;
926   }
927 
928   // Or the graph was never finalized, so is missing packets at the end.
929   for (const auto& p : sequence_state_) {
930     const SequenceState& sequence_state = p.second;
931     if (sequence_state.current_upid == upid &&
932         sequence_state.current_ts == ts) {
933       return true;
934     }
935   }
936   return false;
937 }
938 
939 HeapGraphTracker::~HeapGraphTracker() = default;
940 
941 }  // namespace trace_processor
942 }  // namespace perfetto
943