1 /* 2 * Copyright (C) 2019 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef SRC_TRACE_PROCESSOR_IMPORTERS_PROTO_HEAP_GRAPH_TRACKER_H_ 18 #define SRC_TRACE_PROCESSOR_IMPORTERS_PROTO_HEAP_GRAPH_TRACKER_H_ 19 20 #include <map> 21 #include <optional> 22 #include <set> 23 #include <utility> 24 #include <vector> 25 26 #include "perfetto/ext/base/string_view.h" 27 28 #include "protos/perfetto/trace/profiling/heap_graph.pbzero.h" 29 #include "src/trace_processor/storage/trace_storage.h" 30 #include "src/trace_processor/types/trace_processor_context.h" 31 32 namespace perfetto { 33 namespace trace_processor { 34 35 class TraceProcessorContext; 36 37 struct NormalizedType { 38 base::StringView name; 39 bool is_static_class; 40 size_t number_of_arrays; 41 }; 42 43 struct PathFromRoot { 44 static constexpr size_t kRoot = 0; 45 struct Node { 46 uint32_t depth = 0; 47 // Invariant: parent_id < id of this node. 48 size_t parent_id = 0; 49 int64_t size = 0; 50 int64_t count = 0; 51 StringId class_name_id = {}; 52 std::map<StringId, size_t> children; 53 }; 54 std::vector<Node> nodes{Node{}}; 55 std::set<tables::HeapGraphObjectTable::Id> visited; 56 }; 57 58 void MarkRoot(TraceStorage*, 59 tables::HeapGraphObjectTable::RowReference, 60 StringId type); 61 void UpdateShortestPaths(TraceStorage* s, 62 tables::HeapGraphObjectTable::RowReference row_ref); 63 void FindPathFromRoot(TraceStorage* storage, 64 tables::HeapGraphObjectTable::RowReference, 65 PathFromRoot* path); 66 67 std::optional<base::StringView> GetStaticClassTypeName(base::StringView type); 68 size_t NumberOfArrays(base::StringView type); 69 NormalizedType GetNormalizedType(base::StringView type); 70 base::StringView NormalizeTypeName(base::StringView type); 71 std::string DenormalizeTypeName(NormalizedType normalized, 72 base::StringView deobfuscated_type_name); 73 74 class HeapGraphTracker : public Destructible { 75 public: 76 struct SourceObject { 77 // All ids in this are in the trace iid space, not in the trace processor 78 // id space. 79 uint64_t object_id = 0; 80 uint64_t self_size = 0; 81 uint64_t type_id = 0; 82 83 std::vector<uint64_t> field_name_ids; 84 std::vector<uint64_t> referred_objects; 85 86 // If this object is an instance of `libcore.util.NativeAllocationRegistry`, 87 // this is the value of its `size` field. 88 std::optional<int64_t> native_allocation_registry_size; 89 }; 90 91 struct SourceRoot { 92 StringId root_type; 93 std::vector<uint64_t> object_ids; 94 }; 95 96 explicit HeapGraphTracker(TraceStorage* storage); 97 GetOrCreate(TraceProcessorContext * context)98 static HeapGraphTracker* GetOrCreate(TraceProcessorContext* context) { 99 if (!context->heap_graph_tracker) { 100 context->heap_graph_tracker.reset( 101 new HeapGraphTracker(context->storage.get())); 102 } 103 return static_cast<HeapGraphTracker*>(context->heap_graph_tracker.get()); 104 } 105 106 void AddRoot(uint32_t seq_id, UniquePid upid, int64_t ts, SourceRoot root); 107 void AddObject(uint32_t seq_id, UniquePid upid, int64_t ts, SourceObject obj); 108 void AddInternedType(uint32_t seq_id, 109 uint64_t intern_id, 110 StringId strid, 111 std::optional<uint64_t> location_id, 112 uint64_t object_size, 113 std::vector<uint64_t> field_name_ids, 114 uint64_t superclass_id, 115 uint64_t classloader_id, 116 bool no_fields, 117 StringId kind); 118 void AddInternedFieldName(uint32_t seq_id, 119 uint64_t intern_id, 120 base::StringView str); 121 void AddInternedLocationName(uint32_t seq_id, 122 uint64_t intern_id, 123 StringId str); 124 void FinalizeProfile(uint32_t seq); 125 void FinalizeAllProfiles(); 126 void SetPacketIndex(uint32_t seq_id, uint64_t index); 127 128 ~HeapGraphTracker() override; 129 RowsForType(std::optional<StringId> package_name,StringId type_name)130 const std::vector<tables::HeapGraphClassTable::RowNumber>* RowsForType( 131 std::optional<StringId> package_name, 132 StringId type_name) const { 133 auto it = class_to_rows_.find(std::make_pair(package_name, type_name)); 134 if (it == class_to_rows_.end()) 135 return nullptr; 136 return &it->second; 137 } 138 RowsForField(StringId field_name)139 const std::vector<tables::HeapGraphReferenceTable::RowNumber>* RowsForField( 140 StringId field_name) const { 141 return field_to_rows_.Find(field_name); 142 } 143 144 std::unique_ptr<tables::ExperimentalFlamegraphNodesTable> BuildFlamegraph( 145 const int64_t current_ts, 146 const UniquePid current_upid); 147 GetLastObjectId(uint32_t seq_id)148 uint64_t GetLastObjectId(uint32_t seq_id) { 149 return GetOrCreateSequence(seq_id).last_object_id; 150 } 151 152 private: 153 struct InternedField { 154 StringId name; 155 StringId type_name; 156 }; 157 struct InternedType { 158 StringId name; 159 std::optional<uint64_t> location_id; 160 uint64_t object_size; 161 std::vector<uint64_t> field_name_ids; 162 uint64_t superclass_id; 163 bool no_fields; 164 uint64_t classloader_id; 165 StringId kind; 166 }; 167 struct SequenceState { 168 UniquePid current_upid = 0; 169 int64_t current_ts = 0; 170 uint64_t last_object_id = 0; 171 std::vector<SourceRoot> current_roots; 172 173 // Note: the below maps are a mix of std::map and base::FlatHashMap because 174 // of the incremental evolution of this code (i.e. when the code was written 175 // FlatHashMap did not exist and pieces were migrated as they were found to 176 // be performance problems). 177 // 178 // In the future, likely all of these should be base::FlatHashMap. This 179 // was not done when the first use of base::FlatHashMap happened because 180 // there are some subtle cases where base::FlatHashMap *regresses* perf and 181 // there was not time for investigation. 182 183 std::map<uint64_t, InternedType> interned_types; 184 std::map<uint64_t, StringId> interned_location_names; 185 base::FlatHashMap<uint64_t, tables::HeapGraphObjectTable::RowNumber> 186 object_id_to_db_row; 187 base::FlatHashMap<uint64_t, tables::HeapGraphClassTable::RowNumber> 188 type_id_to_db_row; 189 std::map<uint64_t, std::vector<tables::HeapGraphReferenceTable::RowNumber>> 190 references_for_field_name_id; 191 base::FlatHashMap<uint64_t, InternedField> interned_fields; 192 std::map<tables::HeapGraphClassTable::Id, 193 std::vector<tables::HeapGraphObjectTable::RowNumber>> 194 deferred_reference_objects_for_type_; 195 std::optional<uint64_t> prev_index; 196 // For most objects, we need not store the size in the object's message 197 // itself, because all instances of the type have the same type. In this 198 // case, we defer setting self_size in the table until we process the class 199 // message in FinalizeProfile. 200 std::map<tables::HeapGraphClassTable::Id, 201 std::vector<tables::HeapGraphObjectTable::RowNumber>> 202 deferred_size_objects_for_type_; 203 // Contains the value of the "size" field for each 204 // "libcore.util.NativeAllocationRegistry" object. 205 std::map<tables::HeapGraphObjectTable::Id, int64_t> nar_size_by_obj_id; 206 bool truncated = false; 207 }; 208 209 SequenceState& GetOrCreateSequence(uint32_t seq_id); 210 tables::HeapGraphObjectTable::RowReference GetOrInsertObject( 211 SequenceState* sequence_state, 212 uint64_t object_id); 213 tables::HeapGraphClassTable::RowReference GetOrInsertType( 214 SequenceState* sequence_state, 215 uint64_t type_id); 216 bool SetPidAndTimestamp(SequenceState* seq, UniquePid upid, int64_t ts); 217 void PopulateSuperClasses(const SequenceState& seq); 218 InternedType* GetSuperClass(SequenceState* sequence_state, 219 const InternedType* current_type); 220 bool IsTruncated(UniquePid upid, int64_t ts); 221 222 // Returns the object pointed to by `field` in `obj`. 223 std::optional<tables::HeapGraphObjectTable::Id> GetReferenceByFieldName( 224 tables::HeapGraphObjectTable::Id obj, 225 StringId field); 226 227 // Populates HeapGraphObject::native_size by walking the graph for 228 // `seq`. 229 // 230 // This should be called only once (it is not idempotent) per seq, after the 231 // all the other tables have been fully populated. 232 void PopulateNativeSize(const SequenceState& seq); 233 234 TraceStorage* const storage_; 235 std::map<uint32_t, SequenceState> sequence_state_; 236 237 std::map<std::pair<std::optional<StringId>, StringId>, 238 std::vector<tables::HeapGraphClassTable::RowNumber>> 239 class_to_rows_; 240 base::FlatHashMap<StringId, 241 std::vector<tables::HeapGraphReferenceTable::RowNumber>> 242 field_to_rows_; 243 244 std::map<std::pair<std::optional<StringId>, StringId>, StringId> 245 deobfuscation_mapping_; 246 std::map<std::pair<UniquePid, int64_t>, 247 std::set<tables::HeapGraphObjectTable::RowNumber>> 248 roots_; 249 std::set<std::pair<UniquePid, int64_t>> truncated_graphs_; 250 251 StringId cleaner_thunk_str_id_; 252 StringId referent_str_id_; 253 StringId cleaner_thunk_this0_str_id_; 254 StringId native_size_str_id_; 255 StringId cleaner_next_str_id_; 256 }; 257 258 } // namespace trace_processor 259 } // namespace perfetto 260 261 #endif // SRC_TRACE_PROCESSOR_IMPORTERS_PROTO_HEAP_GRAPH_TRACKER_H_ 262