1 /*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "reference_table.h"
18
19 #include "android-base/stringprintf.h"
20
21 #include "base/mutex.h"
22 #include "base/utils.h"
23 #include "gc/allocation_record.h"
24 #include "gc/heap.h"
25 #include "indirect_reference_table.h"
26 #include "mirror/array-inl.h"
27 #include "mirror/array.h"
28 #include "mirror/class-inl.h"
29 #include "mirror/class.h"
30 #include "mirror/object-inl.h"
31 #include "mirror/string-inl.h"
32 #include "runtime-inl.h"
33 #include "thread.h"
34
35 namespace art {
36
37 using android::base::StringAppendF;
38 using android::base::StringPrintf;
39
ReferenceTable(const char * name,size_t initial_size,size_t max_size)40 ReferenceTable::ReferenceTable(const char* name, size_t initial_size, size_t max_size)
41 : name_(name), max_size_(max_size) {
42 CHECK_LE(initial_size, max_size);
43 entries_.reserve(initial_size);
44 }
45
~ReferenceTable()46 ReferenceTable::~ReferenceTable() {
47 }
48
Add(ObjPtr<mirror::Object> obj)49 void ReferenceTable::Add(ObjPtr<mirror::Object> obj) {
50 DCHECK(obj != nullptr);
51 VerifyObject(obj);
52 if (entries_.size() >= max_size_) {
53 LOG(FATAL) << "ReferenceTable '" << name_ << "' "
54 << "overflowed (" << max_size_ << " entries)";
55 }
56 entries_.push_back(GcRoot<mirror::Object>(obj));
57 }
58
Remove(ObjPtr<mirror::Object> obj)59 void ReferenceTable::Remove(ObjPtr<mirror::Object> obj) {
60 // We iterate backwards on the assumption that references are LIFO.
61 for (int i = entries_.size() - 1; i >= 0; --i) {
62 ObjPtr<mirror::Object> entry = entries_[i].Read();
63 if (entry == obj) {
64 entries_.erase(entries_.begin() + i);
65 return;
66 }
67 }
68 }
69
70 // If "obj" is an array, return the number of elements in the array.
71 // Otherwise, return zero.
GetElementCount(ObjPtr<mirror::Object> obj)72 static size_t GetElementCount(ObjPtr<mirror::Object> obj) REQUIRES_SHARED(Locks::mutator_lock_) {
73 // We assume the special cleared value isn't an array in the if statement below.
74 DCHECK(!Runtime::Current()->GetClearedJniWeakGlobal()->IsArrayInstance());
75 if (obj == nullptr || !obj->IsArrayInstance()) {
76 return 0;
77 }
78 return obj->AsArray()->GetLength();
79 }
80
81 // Log an object with some additional info.
82 //
83 // Pass in the number of elements in the array (or 0 if this is not an
84 // array object), and the number of additional objects that are identical
85 // or equivalent to the original.
DumpSummaryLine(std::ostream & os,ObjPtr<mirror::Object> obj,size_t element_count,int identical,int equiv)86 static void DumpSummaryLine(std::ostream& os, ObjPtr<mirror::Object> obj, size_t element_count,
87 int identical, int equiv)
88 REQUIRES_SHARED(Locks::mutator_lock_) {
89 if (obj == nullptr) {
90 os << " null reference (count=" << equiv << ")\n";
91 return;
92 }
93 if (Runtime::Current()->IsClearedJniWeakGlobal(obj)) {
94 os << " cleared jweak (count=" << equiv << ")\n";
95 return;
96 }
97
98 std::string className(obj->PrettyTypeOf());
99 if (obj->IsClass()) {
100 // We're summarizing multiple instances, so using the exemplar
101 // Class' type parameter here would be misleading.
102 className = "java.lang.Class";
103 }
104 if (element_count != 0) {
105 StringAppendF(&className, " (%zd elements)", element_count);
106 }
107
108 size_t total = identical + equiv + 1;
109 std::string msg(StringPrintf("%5zd of %s", total, className.c_str()));
110 if (identical + equiv != 0) {
111 StringAppendF(&msg, " (%d unique instances)", equiv + 1);
112 }
113 os << " " << msg << "\n";
114 }
115
Size() const116 size_t ReferenceTable::Size() const {
117 return entries_.size();
118 }
119
Dump(std::ostream & os)120 void ReferenceTable::Dump(std::ostream& os) {
121 os << name_ << " reference table dump:\n";
122 Dump(os, entries_);
123 }
124
Dump(std::ostream & os,Table & entries)125 void ReferenceTable::Dump(std::ostream& os, Table& entries) {
126 // Compare GC roots, first by class, then size, then address.
127 struct GcRootComparator {
128 bool operator()(GcRoot<mirror::Object> root1, GcRoot<mirror::Object> root2) const
129 // TODO: enable analysis when analysis can work with the STL.
130 NO_THREAD_SAFETY_ANALYSIS {
131 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
132 // These GC roots are already forwarded in ReferenceTable::Dump. We sort by class since there
133 // are no suspend points which can happen during the sorting process. This works since
134 // we are guaranteed that the addresses of obj1, obj2, obj1->GetClass, obj2->GetClass wont
135 // change during the sorting process. The classes are forwarded by ref->GetClass().
136 ObjPtr<mirror::Object> obj1 = root1.Read<kWithoutReadBarrier>();
137 ObjPtr<mirror::Object> obj2 = root2.Read<kWithoutReadBarrier>();
138 DCHECK(obj1 != nullptr);
139 DCHECK(obj2 != nullptr);
140 Runtime* runtime = Runtime::Current();
141 DCHECK(!runtime->IsClearedJniWeakGlobal(obj1));
142 DCHECK(!runtime->IsClearedJniWeakGlobal(obj2));
143 // Sort by class...
144 if (obj1->GetClass() != obj2->GetClass()) {
145 return obj1->GetClass() < obj2->GetClass();
146 }
147 // ...then by size...
148 const size_t size1 = obj1->SizeOf();
149 const size_t size2 = obj2->SizeOf();
150 if (size1 != size2) {
151 return size1 < size2;
152 }
153 // ...and finally by address.
154 return obj1.Ptr() < obj2.Ptr();
155 }
156 };
157
158 if (entries.empty()) {
159 os << " (empty)\n";
160 return;
161 }
162
163 // Dump the most recent N entries.
164 const size_t kLast = 10;
165 size_t count = entries.size();
166 int first = count - kLast;
167 if (first < 0) {
168 first = 0;
169 }
170 os << " Last " << (count - first) << " entries (of " << count << "):\n";
171 Runtime* runtime = Runtime::Current();
172 for (int idx = count - 1; idx >= first; --idx) {
173 ObjPtr<mirror::Object> ref = entries[idx].Read();
174 if (ref == nullptr) {
175 continue;
176 }
177 if (runtime->IsClearedJniWeakGlobal(ref)) {
178 os << StringPrintf(" %5d: cleared jweak\n", idx);
179 continue;
180 }
181 if (ref->GetClass() == nullptr) {
182 // should only be possible right after a plain dvmMalloc().
183 size_t size = ref->SizeOf();
184 os << StringPrintf(" %5d: %p (raw) (%zd bytes)\n", idx, ref.Ptr(), size);
185 continue;
186 }
187
188 std::string className(ref->PrettyTypeOf());
189
190 std::string extras;
191 size_t element_count = GetElementCount(ref);
192 if (element_count != 0) {
193 StringAppendF(&extras, " (%zd elements)", element_count);
194 } else if (ref->GetClass()->IsStringClass()) {
195 ObjPtr<mirror::String> s = ref->AsString();
196 std::string utf8(s->ToModifiedUtf8());
197 if (s->GetLength() <= 16) {
198 StringAppendF(&extras, " \"%s\"", utf8.c_str());
199 } else {
200 StringAppendF(&extras, " \"%.16s... (%d chars)", utf8.c_str(), s->GetLength());
201 }
202 } else if (ref->IsReferenceInstance()) {
203 ObjPtr<mirror::Object> referent = ref->AsReference()->GetReferent();
204 if (referent == nullptr) {
205 extras = " (referent is null)";
206 } else {
207 extras = StringPrintf(" (referent is a %s)", referent->PrettyTypeOf().c_str());
208 }
209 }
210 os << StringPrintf(" %5d: ", idx) << ref << " " << className << extras << "\n";
211 if (runtime->GetHeap()->IsAllocTrackingEnabled()) {
212 MutexLock mu(Thread::Current(), *Locks::alloc_tracker_lock_);
213
214 gc::AllocRecordObjectMap* records = runtime->GetHeap()->GetAllocationRecords();
215 DCHECK(records != nullptr);
216 // It's annoying that this is a list. But this code should be very uncommon to be executed.
217
218 auto print_stack = [&](ObjPtr<mirror::Object> to_print, const std::string& msg)
219 REQUIRES_SHARED(Locks::mutator_lock_)
220 REQUIRES(Locks::alloc_tracker_lock_) {
221 for (auto it = records->Begin(), end = records->End(); it != end; ++it) {
222 GcRoot<mirror::Object>& stack_for_object = it->first;
223 gc::AllocRecord& record = it->second;
224 if (stack_for_object.Read() == to_print.Ptr()) {
225 os << " " << msg << "\n";
226 const gc::AllocRecordStackTrace* trace = record.GetStackTrace();
227 size_t depth = trace->GetDepth();
228 if (depth == 0) {
229 os << " (No managed frames)\n";
230 } else {
231 for (size_t i = 0; i < depth; ++i) {
232 const gc::AllocRecordStackTraceElement& frame = trace->GetStackElement(i);
233 os << " ";
234 if (frame.GetMethod() == nullptr) {
235 os << "(missing method data)\n";
236 continue;
237 }
238 os << frame.GetMethod()->PrettyMethod(true)
239 << ":"
240 << frame.ComputeLineNumber()
241 << "\n";
242 }
243 }
244 break;
245 }
246 }
247 };
248 // Print the stack trace of the ref.
249 print_stack(ref, "Allocated at:");
250
251 // If it's a reference, see if we have data about the referent.
252 if (ref->IsReferenceInstance()) {
253 ObjPtr<mirror::Object> referent = ref->AsReference()->GetReferent();
254 if (referent != nullptr) {
255 print_stack(referent, "Referent allocated at:");
256 }
257 }
258 }
259 }
260
261 // Make a copy of the table and sort it, only adding non null and not cleared elements.
262 Table sorted_entries;
263 for (GcRoot<mirror::Object>& root : entries) {
264 if (!root.IsNull() && !runtime->IsClearedJniWeakGlobal(root.Read())) {
265 sorted_entries.push_back(root);
266 }
267 }
268 if (sorted_entries.empty()) {
269 return;
270 }
271 std::sort(sorted_entries.begin(), sorted_entries.end(), GcRootComparator());
272
273 class SummaryElement {
274 public:
275 GcRoot<mirror::Object> root;
276 size_t equiv;
277 size_t identical;
278
279 SummaryElement() : equiv(0), identical(0) {}
280 SummaryElement(SummaryElement&& ref) noexcept {
281 root = ref.root;
282 equiv = ref.equiv;
283 identical = ref.identical;
284 }
285 SummaryElement(const SummaryElement&) = default;
286 SummaryElement& operator=(SummaryElement&&) = default;
287
288 void Reset(GcRoot<mirror::Object>& _root) {
289 root = _root;
290 equiv = 0;
291 identical = 0;
292 }
293 };
294 std::vector<SummaryElement> sorted_summaries;
295 {
296 SummaryElement prev;
297
298 for (GcRoot<mirror::Object>& root : sorted_entries) {
299 ObjPtr<mirror::Object> current = root.Read<kWithoutReadBarrier>();
300
301 if (UNLIKELY(prev.root.IsNull())) {
302 prev.Reset(root);
303 continue;
304 }
305
306 ObjPtr<mirror::Object> prevObj = prev.root.Read<kWithoutReadBarrier>();
307 if (current == prevObj) {
308 // Same reference, added more than once.
309 ++prev.identical;
310 } else if (current->GetClass() == prevObj->GetClass() &&
311 GetElementCount(current) == GetElementCount(prevObj)) {
312 // Same class / element count, different object.
313 ++prev.equiv;
314 } else {
315 sorted_summaries.push_back(prev);
316 prev.Reset(root);
317 }
318 prev.root = root;
319 }
320 sorted_summaries.push_back(prev);
321
322 // Compare summary elements, first by combined count, then by identical (indicating leaks),
323 // then by class (and size and address).
324 struct SummaryElementComparator {
325 GcRootComparator gc_root_cmp;
326
327 bool operator()(SummaryElement& elem1, SummaryElement& elem2) const
328 NO_THREAD_SAFETY_ANALYSIS {
329 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
330
331 size_t count1 = elem1.equiv + elem1.identical;
332 size_t count2 = elem2.equiv + elem2.identical;
333 if (count1 != count2) {
334 return count1 > count2;
335 }
336
337 if (elem1.identical != elem2.identical) {
338 return elem1.identical > elem2.identical;
339 }
340
341 // Otherwise, compare the GC roots as before.
342 return gc_root_cmp(elem1.root, elem2.root);
343 }
344 };
345 std::sort(sorted_summaries.begin(), sorted_summaries.end(), SummaryElementComparator());
346 }
347
348 // Dump a summary of the whole table.
349 os << " Summary:\n";
350 for (SummaryElement& elem : sorted_summaries) {
351 ObjPtr<mirror::Object> elemObj = elem.root.Read<kWithoutReadBarrier>();
352 DumpSummaryLine(os, elemObj, GetElementCount(elemObj), elem.identical, elem.equiv);
353 }
354 }
355
VisitRoots(RootVisitor * visitor,const RootInfo & root_info)356 void ReferenceTable::VisitRoots(RootVisitor* visitor, const RootInfo& root_info) {
357 BufferedRootVisitor<kDefaultBufferedRootCount> buffered_visitor(visitor, root_info);
358 for (GcRoot<mirror::Object>& root : entries_) {
359 buffered_visitor.VisitRoot(root);
360 }
361 }
362
363 } // namespace art
364