1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "ti_heap.h"
18
19 #include <ios>
20 #include <unordered_map>
21
22 #include "android-base/logging.h"
23 #include "android-base/thread_annotations.h"
24 #include "arch/context.h"
25 #include "art_field-inl.h"
26 #include "art_jvmti.h"
27 #include "base/logging.h"
28 #include "base/macros.h"
29 #include "base/mutex.h"
30 #include "base/utils.h"
31 #include "class_linker.h"
32 #include "deopt_manager.h"
33 #include "dex/primitive.h"
34 #include "events-inl.h"
35 #include "gc/collector_type.h"
36 #include "gc/gc_cause.h"
37 #include "gc/heap-visit-objects-inl.h"
38 #include "gc/heap-inl.h"
39 #include "gc/scoped_gc_critical_section.h"
40 #include "gc_root-inl.h"
41 #include "handle.h"
42 #include "handle_scope.h"
43 #include "java_frame_root_info.h"
44 #include "jni/jni_env_ext.h"
45 #include "jni/jni_id_manager.h"
46 #include "jni/jni_internal.h"
47 #include "jvmti_weak_table-inl.h"
48 #include "mirror/array-inl.h"
49 #include "mirror/array.h"
50 #include "mirror/class.h"
51 #include "mirror/object-inl.h"
52 #include "mirror/object-refvisitor-inl.h"
53 #include "mirror/object_array-inl.h"
54 #include "mirror/object_array-alloc-inl.h"
55 #include "mirror/object_reference.h"
56 #include "obj_ptr-inl.h"
57 #include "object_callbacks.h"
58 #include "object_tagging.h"
59 #include "offsets.h"
60 #include "read_barrier.h"
61 #include "runtime.h"
62 #include "scoped_thread_state_change-inl.h"
63 #include "stack.h"
64 #include "thread-inl.h"
65 #include "thread_list.h"
66 #include "ti_logging.h"
67 #include "ti_stack.h"
68 #include "ti_thread.h"
69 #include "well_known_classes.h"
70
71 namespace openjdkjvmti {
72
73 EventHandler* HeapExtensions::gEventHandler = nullptr;
74
75 namespace {
76
77 struct IndexCache {
78 // The number of interface fields implemented by the class. This is a prefix to all assigned
79 // field indices.
80 size_t interface_fields;
81
82 // It would be nice to also cache the following, but it is complicated to wire up into the
83 // generic visit:
84 // The number of fields in interfaces and superclasses. This is the first index assigned to
85 // fields of the class.
86 // size_t superclass_fields;
87 };
88 using IndexCachingTable = JvmtiWeakTable<IndexCache>;
89
90 static IndexCachingTable gIndexCachingTable;
91
92 // Report the contents of a string, if a callback is set.
ReportString(art::ObjPtr<art::mirror::Object> obj,jvmtiEnv * env,ObjectTagTable * tag_table,const jvmtiHeapCallbacks * cb,const void * user_data)93 jint ReportString(art::ObjPtr<art::mirror::Object> obj,
94 jvmtiEnv* env,
95 ObjectTagTable* tag_table,
96 const jvmtiHeapCallbacks* cb,
97 const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
98 if (UNLIKELY(cb->string_primitive_value_callback != nullptr) && obj->IsString()) {
99 art::ObjPtr<art::mirror::String> str = obj->AsString();
100 int32_t string_length = str->GetLength();
101 JvmtiUniquePtr<uint16_t[]> data;
102
103 if (string_length > 0) {
104 jvmtiError alloc_error;
105 data = AllocJvmtiUniquePtr<uint16_t[]>(env, string_length, &alloc_error);
106 if (data == nullptr) {
107 // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
108 // back? For now just warn.
109 LOG(WARNING) << "Unable to allocate buffer for string reporting! Silently dropping value."
110 << " >" << str->ToModifiedUtf8() << "<";
111 return 0;
112 }
113
114 if (str->IsCompressed()) {
115 uint8_t* compressed_data = str->GetValueCompressed();
116 for (int32_t i = 0; i != string_length; ++i) {
117 data[i] = compressed_data[i];
118 }
119 } else {
120 // Can copy directly.
121 memcpy(data.get(), str->GetValue(), string_length * sizeof(uint16_t));
122 }
123 }
124
125 const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
126 jlong string_tag = tag_table->GetTagOrZero(obj.Ptr());
127 const jlong saved_string_tag = string_tag;
128
129 jint result = cb->string_primitive_value_callback(class_tag,
130 obj->SizeOf(),
131 &string_tag,
132 data.get(),
133 string_length,
134 const_cast<void*>(user_data));
135 if (string_tag != saved_string_tag) {
136 tag_table->Set(obj.Ptr(), string_tag);
137 }
138
139 return result;
140 }
141 return 0;
142 }
143
144 // Report the contents of a primitive array, if a callback is set.
ReportPrimitiveArray(art::ObjPtr<art::mirror::Object> obj,jvmtiEnv * env,ObjectTagTable * tag_table,const jvmtiHeapCallbacks * cb,const void * user_data)145 jint ReportPrimitiveArray(art::ObjPtr<art::mirror::Object> obj,
146 jvmtiEnv* env,
147 ObjectTagTable* tag_table,
148 const jvmtiHeapCallbacks* cb,
149 const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
150 if (UNLIKELY(cb->array_primitive_value_callback != nullptr) &&
151 obj->IsArrayInstance() &&
152 !obj->IsObjectArray()) {
153 art::ObjPtr<art::mirror::Array> array = obj->AsArray();
154 int32_t array_length = array->GetLength();
155 size_t component_size = array->GetClass()->GetComponentSize();
156 art::Primitive::Type art_prim_type = array->GetClass()->GetComponentType()->GetPrimitiveType();
157 jvmtiPrimitiveType prim_type =
158 static_cast<jvmtiPrimitiveType>(art::Primitive::Descriptor(art_prim_type)[0]);
159 DCHECK(prim_type == JVMTI_PRIMITIVE_TYPE_BOOLEAN ||
160 prim_type == JVMTI_PRIMITIVE_TYPE_BYTE ||
161 prim_type == JVMTI_PRIMITIVE_TYPE_CHAR ||
162 prim_type == JVMTI_PRIMITIVE_TYPE_SHORT ||
163 prim_type == JVMTI_PRIMITIVE_TYPE_INT ||
164 prim_type == JVMTI_PRIMITIVE_TYPE_LONG ||
165 prim_type == JVMTI_PRIMITIVE_TYPE_FLOAT ||
166 prim_type == JVMTI_PRIMITIVE_TYPE_DOUBLE);
167
168 const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
169 jlong array_tag = tag_table->GetTagOrZero(obj.Ptr());
170 const jlong saved_array_tag = array_tag;
171
172 jint result;
173 if (array_length == 0) {
174 result = cb->array_primitive_value_callback(class_tag,
175 obj->SizeOf(),
176 &array_tag,
177 0,
178 prim_type,
179 nullptr,
180 const_cast<void*>(user_data));
181 } else {
182 jvmtiError alloc_error;
183 JvmtiUniquePtr<char[]> data = AllocJvmtiUniquePtr<char[]>(env,
184 array_length * component_size,
185 &alloc_error);
186 if (data == nullptr) {
187 // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
188 // back? For now just warn.
189 LOG(WARNING) << "Unable to allocate buffer for array reporting! Silently dropping value.";
190 return 0;
191 }
192
193 memcpy(data.get(), array->GetRawData(component_size, 0), array_length * component_size);
194
195 result = cb->array_primitive_value_callback(class_tag,
196 obj->SizeOf(),
197 &array_tag,
198 array_length,
199 prim_type,
200 data.get(),
201 const_cast<void*>(user_data));
202 }
203
204 if (array_tag != saved_array_tag) {
205 tag_table->Set(obj.Ptr(), array_tag);
206 }
207
208 return result;
209 }
210 return 0;
211 }
212
213 template <typename UserData>
VisitorFalse(art::ObjPtr<art::mirror::Object> obj,art::ObjPtr<art::mirror::Class> klass,art::ArtField & field,size_t field_index,UserData * user_data)214 bool VisitorFalse([[maybe_unused]] art::ObjPtr<art::mirror::Object> obj,
215 [[maybe_unused]] art::ObjPtr<art::mirror::Class> klass,
216 [[maybe_unused]] art::ArtField& field,
217 [[maybe_unused]] size_t field_index,
218 [[maybe_unused]] UserData* user_data) {
219 return false;
220 }
221
222 template <typename UserData, bool kCallVisitorOnRecursion>
223 class FieldVisitor {
224 public:
225 // Report the contents of a primitive fields of the given object, if a callback is set.
226 template <typename StaticPrimitiveVisitor,
227 typename StaticReferenceVisitor,
228 typename InstancePrimitiveVisitor,
229 typename InstanceReferenceVisitor>
ReportFields(art::ObjPtr<art::mirror::Object> obj,UserData * user_data,StaticPrimitiveVisitor & static_prim_visitor,StaticReferenceVisitor & static_ref_visitor,InstancePrimitiveVisitor & instance_prim_visitor,InstanceReferenceVisitor & instance_ref_visitor)230 static bool ReportFields(art::ObjPtr<art::mirror::Object> obj,
231 UserData* user_data,
232 StaticPrimitiveVisitor& static_prim_visitor,
233 StaticReferenceVisitor& static_ref_visitor,
234 InstancePrimitiveVisitor& instance_prim_visitor,
235 InstanceReferenceVisitor& instance_ref_visitor)
236 REQUIRES_SHARED(art::Locks::mutator_lock_) {
237 FieldVisitor fv(user_data);
238
239 if (obj->IsClass()) {
240 // When visiting a class, we only visit the static fields of the given class. No field of
241 // superclasses is visited.
242 art::ObjPtr<art::mirror::Class> klass = obj->AsClass();
243 // Only report fields on resolved classes. We need valid field data.
244 if (!klass->IsResolved()) {
245 return false;
246 }
247 return fv.ReportFieldsImpl(nullptr,
248 obj->AsClass(),
249 obj->AsClass()->IsInterface(),
250 static_prim_visitor,
251 static_ref_visitor,
252 instance_prim_visitor,
253 instance_ref_visitor);
254 } else {
255 // See comment above. Just double-checking here, but an instance *should* mean the class was
256 // resolved.
257 DCHECK(obj->GetClass()->IsResolved() || obj->GetClass()->IsErroneousResolved());
258 return fv.ReportFieldsImpl(obj,
259 obj->GetClass(),
260 false,
261 static_prim_visitor,
262 static_ref_visitor,
263 instance_prim_visitor,
264 instance_ref_visitor);
265 }
266 }
267
268 private:
FieldVisitor(UserData * user_data)269 explicit FieldVisitor(UserData* user_data) : user_data_(user_data) {}
270
271 // Report the contents of fields of the given object. If obj is null, report the static fields,
272 // otherwise the instance fields.
273 template <typename StaticPrimitiveVisitor,
274 typename StaticReferenceVisitor,
275 typename InstancePrimitiveVisitor,
276 typename InstanceReferenceVisitor>
ReportFieldsImpl(art::ObjPtr<art::mirror::Object> obj,art::ObjPtr<art::mirror::Class> klass,bool skip_java_lang_object,StaticPrimitiveVisitor & static_prim_visitor,StaticReferenceVisitor & static_ref_visitor,InstancePrimitiveVisitor & instance_prim_visitor,InstanceReferenceVisitor & instance_ref_visitor)277 bool ReportFieldsImpl(art::ObjPtr<art::mirror::Object> obj,
278 art::ObjPtr<art::mirror::Class> klass,
279 bool skip_java_lang_object,
280 StaticPrimitiveVisitor& static_prim_visitor,
281 StaticReferenceVisitor& static_ref_visitor,
282 InstancePrimitiveVisitor& instance_prim_visitor,
283 InstanceReferenceVisitor& instance_ref_visitor)
284 REQUIRES_SHARED(art::Locks::mutator_lock_) {
285 // Compute the offset of field indices.
286 size_t interface_field_count = CountInterfaceFields(klass);
287
288 size_t tmp;
289 bool aborted = ReportFieldsRecursive(obj,
290 klass,
291 interface_field_count,
292 skip_java_lang_object,
293 static_prim_visitor,
294 static_ref_visitor,
295 instance_prim_visitor,
296 instance_ref_visitor,
297 &tmp);
298 return aborted;
299 }
300
301 // Visit primitive fields in an object (instance). Return true if the visit was aborted.
302 template <typename StaticPrimitiveVisitor,
303 typename StaticReferenceVisitor,
304 typename InstancePrimitiveVisitor,
305 typename InstanceReferenceVisitor>
ReportFieldsRecursive(art::ObjPtr<art::mirror::Object> obj,art::ObjPtr<art::mirror::Class> klass,size_t interface_fields,bool skip_java_lang_object,StaticPrimitiveVisitor & static_prim_visitor,StaticReferenceVisitor & static_ref_visitor,InstancePrimitiveVisitor & instance_prim_visitor,InstanceReferenceVisitor & instance_ref_visitor,size_t * field_index_out)306 bool ReportFieldsRecursive(art::ObjPtr<art::mirror::Object> obj,
307 art::ObjPtr<art::mirror::Class> klass,
308 size_t interface_fields,
309 bool skip_java_lang_object,
310 StaticPrimitiveVisitor& static_prim_visitor,
311 StaticReferenceVisitor& static_ref_visitor,
312 InstancePrimitiveVisitor& instance_prim_visitor,
313 InstanceReferenceVisitor& instance_ref_visitor,
314 size_t* field_index_out)
315 REQUIRES_SHARED(art::Locks::mutator_lock_) {
316 DCHECK(klass != nullptr);
317 size_t field_index;
318 if (klass->GetSuperClass() == nullptr) {
319 // j.l.Object. Start with the fields from interfaces.
320 field_index = interface_fields;
321 if (skip_java_lang_object) {
322 *field_index_out = field_index;
323 return false;
324 }
325 } else {
326 // Report superclass fields.
327 if (kCallVisitorOnRecursion) {
328 if (ReportFieldsRecursive(obj,
329 klass->GetSuperClass(),
330 interface_fields,
331 skip_java_lang_object,
332 static_prim_visitor,
333 static_ref_visitor,
334 instance_prim_visitor,
335 instance_ref_visitor,
336 &field_index)) {
337 return true;
338 }
339 } else {
340 // Still call, but with empty visitor. This is required for correct counting.
341 ReportFieldsRecursive(obj,
342 klass->GetSuperClass(),
343 interface_fields,
344 skip_java_lang_object,
345 VisitorFalse<UserData>,
346 VisitorFalse<UserData>,
347 VisitorFalse<UserData>,
348 VisitorFalse<UserData>,
349 &field_index);
350 }
351 }
352
353 // Now visit fields for the current klass.
354
355 for (auto& field : klass->GetFields()) {
356 if (field.IsStatic()) {
357 if (field.IsPrimitiveType()) {
358 if (static_prim_visitor(obj,
359 klass,
360 field,
361 field_index,
362 user_data_)) {
363 return true;
364 }
365 } else {
366 if (static_ref_visitor(obj,
367 klass,
368 field,
369 field_index,
370 user_data_)) {
371 return true;
372 }
373 }
374 } else {
375 if (field.IsPrimitiveType()) {
376 if (instance_prim_visitor(obj,
377 klass,
378 field,
379 field_index,
380 user_data_)) {
381 return true;
382 }
383 } else {
384 if (instance_ref_visitor(obj,
385 klass,
386 field,
387 field_index,
388 user_data_)) {
389 return true;
390 }
391 }
392 }
393 field_index++;
394 }
395
396 *field_index_out = field_index;
397 return false;
398 }
399
400 // Implements a visit of the implemented interfaces of a given class.
401 template <typename T>
402 struct RecursiveInterfaceVisit {
VisitStaticopenjdkjvmti::__anon5c2b679d0111::FieldVisitor::RecursiveInterfaceVisit403 static void VisitStatic(art::Thread* self, art::ObjPtr<art::mirror::Class> klass, T& visitor)
404 REQUIRES_SHARED(art::Locks::mutator_lock_) {
405 RecursiveInterfaceVisit rv;
406 rv.Visit(self, klass, visitor);
407 }
408
Visitopenjdkjvmti::__anon5c2b679d0111::FieldVisitor::RecursiveInterfaceVisit409 void Visit(art::Thread* self, art::ObjPtr<art::mirror::Class> klass, T& visitor)
410 REQUIRES_SHARED(art::Locks::mutator_lock_) {
411 // First visit the parent, to get the order right.
412 // (We do this in preparation for actual visiting of interface fields.)
413 if (klass->GetSuperClass() != nullptr) {
414 Visit(self, klass->GetSuperClass(), visitor);
415 }
416 for (uint32_t i = 0; i != klass->NumDirectInterfaces(); ++i) {
417 art::ObjPtr<art::mirror::Class> inf_klass = klass->GetDirectInterface(i);
418 DCHECK(inf_klass != nullptr);
419 VisitInterface(self, inf_klass, visitor);
420 }
421 }
422
VisitInterfaceopenjdkjvmti::__anon5c2b679d0111::FieldVisitor::RecursiveInterfaceVisit423 void VisitInterface(art::Thread* self, art::ObjPtr<art::mirror::Class> inf_klass, T& visitor)
424 REQUIRES_SHARED(art::Locks::mutator_lock_) {
425 auto it = visited_interfaces.find(inf_klass.Ptr());
426 if (it != visited_interfaces.end()) {
427 return;
428 }
429 visited_interfaces.insert(inf_klass.Ptr());
430
431 // Let the visitor know about this one. Note that this order is acceptable, as the ordering
432 // of these fields never matters for known visitors.
433 visitor(inf_klass);
434
435 // Now visit the superinterfaces.
436 for (uint32_t i = 0; i != inf_klass->NumDirectInterfaces(); ++i) {
437 art::ObjPtr<art::mirror::Class> super_inf_klass = inf_klass->GetDirectInterface(i);
438 DCHECK(super_inf_klass != nullptr);
439 VisitInterface(self, super_inf_klass, visitor);
440 }
441 }
442
443 std::unordered_set<art::mirror::Class*> visited_interfaces;
444 };
445
446 // Counting interface fields. Note that we cannot use the interface table, as that only contains
447 // "non-marker" interfaces (= interfaces with methods).
CountInterfaceFields(art::ObjPtr<art::mirror::Class> klass)448 static size_t CountInterfaceFields(art::ObjPtr<art::mirror::Class> klass)
449 REQUIRES_SHARED(art::Locks::mutator_lock_) {
450 // Do we have a cached value?
451 IndexCache tmp;
452 if (gIndexCachingTable.GetTag(klass.Ptr(), &tmp)) {
453 return tmp.interface_fields;
454 }
455
456 size_t count = 0;
457 auto visitor = [&count](art::ObjPtr<art::mirror::Class> inf_klass)
458 REQUIRES_SHARED(art::Locks::mutator_lock_) {
459 DCHECK(inf_klass->IsInterface());
460 count += inf_klass->NumFields();
461 };
462 RecursiveInterfaceVisit<decltype(visitor)>::VisitStatic(art::Thread::Current(), klass, visitor);
463
464 // Store this into the cache.
465 tmp.interface_fields = count;
466 gIndexCachingTable.Set(klass.Ptr(), tmp);
467
468 return count;
469 }
470
471 UserData* user_data_;
472 };
473
474 // Debug helper. Prints the structure of an object.
475 template <bool kStatic, bool kRef>
476 struct DumpVisitor {
Callbackopenjdkjvmti::__anon5c2b679d0111::DumpVisitor477 static bool Callback([[maybe_unused]] art::ObjPtr<art::mirror::Object> obj,
478 [[maybe_unused]] art::ObjPtr<art::mirror::Class> klass,
479 art::ArtField& field,
480 size_t field_index,
481 [[maybe_unused]] void* user_data)
482 REQUIRES_SHARED(art::Locks::mutator_lock_) {
483 LOG(ERROR) << (kStatic ? "static " : "instance ")
484 << (kRef ? "ref " : "primitive ")
485 << field.PrettyField()
486 << " @ "
487 << field_index;
488 return false;
489 }
490 };
DumpObjectFields(art::ObjPtr<art::mirror::Object> obj)491 [[maybe_unused]] void DumpObjectFields(art::ObjPtr<art::mirror::Object> obj)
492 REQUIRES_SHARED(art::Locks::mutator_lock_) {
493 if (obj->IsClass()) {
494 FieldVisitor<void, false>:: ReportFields(obj,
495 nullptr,
496 DumpVisitor<true, false>::Callback,
497 DumpVisitor<true, true>::Callback,
498 DumpVisitor<false, false>::Callback,
499 DumpVisitor<false, true>::Callback);
500 } else {
501 FieldVisitor<void, true>::ReportFields(obj,
502 nullptr,
503 DumpVisitor<true, false>::Callback,
504 DumpVisitor<true, true>::Callback,
505 DumpVisitor<false, false>::Callback,
506 DumpVisitor<false, true>::Callback);
507 }
508 }
509
510 class ReportPrimitiveField {
511 public:
Report(art::ObjPtr<art::mirror::Object> obj,ObjectTagTable * tag_table,const jvmtiHeapCallbacks * cb,const void * user_data)512 static bool Report(art::ObjPtr<art::mirror::Object> obj,
513 ObjectTagTable* tag_table,
514 const jvmtiHeapCallbacks* cb,
515 const void* user_data)
516 REQUIRES_SHARED(art::Locks::mutator_lock_) {
517 if (UNLIKELY(cb->primitive_field_callback != nullptr)) {
518 jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
519 ReportPrimitiveField rpf(tag_table, class_tag, cb, user_data);
520 if (obj->IsClass()) {
521 return FieldVisitor<ReportPrimitiveField, false>::ReportFields(
522 obj,
523 &rpf,
524 ReportPrimitiveFieldCallback<true>,
525 VisitorFalse<ReportPrimitiveField>,
526 VisitorFalse<ReportPrimitiveField>,
527 VisitorFalse<ReportPrimitiveField>);
528 } else {
529 return FieldVisitor<ReportPrimitiveField, true>::ReportFields(
530 obj,
531 &rpf,
532 VisitorFalse<ReportPrimitiveField>,
533 VisitorFalse<ReportPrimitiveField>,
534 ReportPrimitiveFieldCallback<false>,
535 VisitorFalse<ReportPrimitiveField>);
536 }
537 }
538 return false;
539 }
540
541
542 private:
ReportPrimitiveField(ObjectTagTable * tag_table,jlong class_tag,const jvmtiHeapCallbacks * cb,const void * user_data)543 ReportPrimitiveField(ObjectTagTable* tag_table,
544 jlong class_tag,
545 const jvmtiHeapCallbacks* cb,
546 const void* user_data)
547 : tag_table_(tag_table), class_tag_(class_tag), cb_(cb), user_data_(user_data) {}
548
549 template <bool kReportStatic>
ReportPrimitiveFieldCallback(art::ObjPtr<art::mirror::Object> obj,art::ObjPtr<art::mirror::Class> klass,art::ArtField & field,size_t field_index,ReportPrimitiveField * user_data)550 static bool ReportPrimitiveFieldCallback(art::ObjPtr<art::mirror::Object> obj,
551 art::ObjPtr<art::mirror::Class> klass,
552 art::ArtField& field,
553 size_t field_index,
554 ReportPrimitiveField* user_data)
555 REQUIRES_SHARED(art::Locks::mutator_lock_) {
556 art::Primitive::Type art_prim_type = field.GetTypeAsPrimitiveType();
557 jvmtiPrimitiveType prim_type =
558 static_cast<jvmtiPrimitiveType>(art::Primitive::Descriptor(art_prim_type)[0]);
559 DCHECK(prim_type == JVMTI_PRIMITIVE_TYPE_BOOLEAN ||
560 prim_type == JVMTI_PRIMITIVE_TYPE_BYTE ||
561 prim_type == JVMTI_PRIMITIVE_TYPE_CHAR ||
562 prim_type == JVMTI_PRIMITIVE_TYPE_SHORT ||
563 prim_type == JVMTI_PRIMITIVE_TYPE_INT ||
564 prim_type == JVMTI_PRIMITIVE_TYPE_LONG ||
565 prim_type == JVMTI_PRIMITIVE_TYPE_FLOAT ||
566 prim_type == JVMTI_PRIMITIVE_TYPE_DOUBLE);
567 jvmtiHeapReferenceInfo info;
568 info.field.index = field_index;
569
570 jvalue value;
571 memset(&value, 0, sizeof(jvalue));
572 art::ObjPtr<art::mirror::Object> src = kReportStatic ? klass : obj;
573 switch (art_prim_type) {
574 case art::Primitive::Type::kPrimBoolean:
575 value.z = field.GetBoolean(src) == 0 ? JNI_FALSE : JNI_TRUE;
576 break;
577 case art::Primitive::Type::kPrimByte:
578 value.b = field.GetByte(src);
579 break;
580 case art::Primitive::Type::kPrimChar:
581 value.c = field.GetChar(src);
582 break;
583 case art::Primitive::Type::kPrimShort:
584 value.s = field.GetShort(src);
585 break;
586 case art::Primitive::Type::kPrimInt:
587 value.i = field.GetInt(src);
588 break;
589 case art::Primitive::Type::kPrimLong:
590 value.j = field.GetLong(src);
591 break;
592 case art::Primitive::Type::kPrimFloat:
593 value.f = field.GetFloat(src);
594 break;
595 case art::Primitive::Type::kPrimDouble:
596 value.d = field.GetDouble(src);
597 break;
598 case art::Primitive::Type::kPrimVoid:
599 case art::Primitive::Type::kPrimNot: {
600 LOG(FATAL) << "Should not reach here";
601 UNREACHABLE();
602 }
603 }
604
605 jlong obj_tag = user_data->tag_table_->GetTagOrZero(src.Ptr());
606 const jlong saved_obj_tag = obj_tag;
607
608 jint ret = user_data->cb_->primitive_field_callback(kReportStatic
609 ? JVMTI_HEAP_REFERENCE_STATIC_FIELD
610 : JVMTI_HEAP_REFERENCE_FIELD,
611 &info,
612 user_data->class_tag_,
613 &obj_tag,
614 value,
615 prim_type,
616 const_cast<void*>(user_data->user_data_));
617
618 if (saved_obj_tag != obj_tag) {
619 user_data->tag_table_->Set(src.Ptr(), obj_tag);
620 }
621
622 if ((ret & JVMTI_VISIT_ABORT) != 0) {
623 return true;
624 }
625
626 return false;
627 }
628
629 ObjectTagTable* tag_table_;
630 jlong class_tag_;
631 const jvmtiHeapCallbacks* cb_;
632 const void* user_data_;
633 };
634
635 struct HeapFilter {
HeapFilteropenjdkjvmti::__anon5c2b679d0111::HeapFilter636 explicit HeapFilter(jint heap_filter)
637 : filter_out_tagged((heap_filter & JVMTI_HEAP_FILTER_TAGGED) != 0),
638 filter_out_untagged((heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) != 0),
639 filter_out_class_tagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) != 0),
640 filter_out_class_untagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) != 0),
641 any_filter(filter_out_tagged ||
642 filter_out_untagged ||
643 filter_out_class_tagged ||
644 filter_out_class_untagged) {
645 }
646
ShouldReportByHeapFilteropenjdkjvmti::__anon5c2b679d0111::HeapFilter647 bool ShouldReportByHeapFilter(jlong tag, jlong class_tag) const {
648 if (!any_filter) {
649 return true;
650 }
651
652 if ((tag == 0 && filter_out_untagged) || (tag != 0 && filter_out_tagged)) {
653 return false;
654 }
655
656 if ((class_tag == 0 && filter_out_class_untagged) ||
657 (class_tag != 0 && filter_out_class_tagged)) {
658 return false;
659 }
660
661 return true;
662 }
663
664 const bool filter_out_tagged;
665 const bool filter_out_untagged;
666 const bool filter_out_class_tagged;
667 const bool filter_out_class_untagged;
668 const bool any_filter;
669 };
670
671 } // namespace
672
Register()673 void HeapUtil::Register() {
674 art::Runtime::Current()->AddSystemWeakHolder(&gIndexCachingTable);
675 }
676
Unregister()677 void HeapUtil::Unregister() {
678 art::Runtime::Current()->RemoveSystemWeakHolder(&gIndexCachingTable);
679 }
680
IterateOverInstancesOfClass(jvmtiEnv * env,jclass klass,jvmtiHeapObjectFilter filter,jvmtiHeapObjectCallback cb,const void * user_data)681 jvmtiError HeapUtil::IterateOverInstancesOfClass(jvmtiEnv* env,
682 jclass klass,
683 jvmtiHeapObjectFilter filter,
684 jvmtiHeapObjectCallback cb,
685 const void* user_data) {
686 if (cb == nullptr || klass == nullptr) {
687 return ERR(NULL_POINTER);
688 }
689
690 art::Thread* self = art::Thread::Current();
691 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
692 art::StackHandleScope<1> hs(self);
693
694 art::ObjPtr<art::mirror::Object> klass_ptr(soa.Decode<art::mirror::Class>(klass));
695 if (!klass_ptr->IsClass()) {
696 return ERR(INVALID_CLASS);
697 }
698 art::Handle<art::mirror::Class> filter_klass(hs.NewHandle(klass_ptr->AsClass()));
699 ObjectTagTable* tag_table = ArtJvmTiEnv::AsArtJvmTiEnv(env)->object_tag_table.get();
700 bool stop_reports = false;
701 auto visitor = [&](art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) {
702 // Early return, as we can't really stop visiting.
703 if (stop_reports) {
704 return;
705 }
706
707 art::ScopedAssertNoThreadSuspension no_suspension("IterateOverInstancesOfClass");
708
709 art::ObjPtr<art::mirror::Class> klass = obj->GetClass();
710
711 if (filter_klass != nullptr && !filter_klass->IsAssignableFrom(klass)) {
712 return;
713 }
714
715 jlong tag = 0;
716 tag_table->GetTag(obj, &tag);
717 if ((filter != JVMTI_HEAP_OBJECT_EITHER) &&
718 ((tag == 0 && filter == JVMTI_HEAP_OBJECT_TAGGED) ||
719 (tag != 0 && filter == JVMTI_HEAP_OBJECT_UNTAGGED))) {
720 return;
721 }
722
723 jlong class_tag = 0;
724 tag_table->GetTag(klass.Ptr(), &class_tag);
725
726 jlong saved_tag = tag;
727 jint ret = cb(class_tag, obj->SizeOf(), &tag, const_cast<void*>(user_data));
728
729 stop_reports = (ret == JVMTI_ITERATION_ABORT);
730
731 if (tag != saved_tag) {
732 tag_table->Set(obj, tag);
733 }
734 };
735 art::Runtime::Current()->GetHeap()->VisitObjects(visitor);
736
737 return OK;
738 }
739
740 template <typename T>
DoIterateThroughHeap(T fn,jvmtiEnv * env,ObjectTagTable * tag_table,jint heap_filter_int,jclass klass,const jvmtiHeapCallbacks * callbacks,const void * user_data)741 static jvmtiError DoIterateThroughHeap(T fn,
742 jvmtiEnv* env,
743 ObjectTagTable* tag_table,
744 jint heap_filter_int,
745 jclass klass,
746 const jvmtiHeapCallbacks* callbacks,
747 const void* user_data) {
748 if (callbacks == nullptr) {
749 return ERR(NULL_POINTER);
750 }
751
752 art::Thread* self = art::Thread::Current();
753 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
754
755 bool stop_reports = false;
756 const HeapFilter heap_filter(heap_filter_int);
757 art::StackHandleScope<1> hs(self);
758 art::Handle<art::mirror::Class> filter_klass(hs.NewHandle(soa.Decode<art::mirror::Class>(klass)));
759 auto visitor = [&](art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) {
760 // Early return, as we can't really stop visiting.
761 if (stop_reports) {
762 return;
763 }
764
765 art::ScopedAssertNoThreadSuspension no_suspension("IterateThroughHeapCallback");
766
767 jlong tag = 0;
768 tag_table->GetTag(obj, &tag);
769
770 jlong class_tag = 0;
771 art::ObjPtr<art::mirror::Class> klass = obj->GetClass();
772 tag_table->GetTag(klass.Ptr(), &class_tag);
773 // For simplicity, even if we find a tag = 0, assume 0 = not tagged.
774
775 if (!heap_filter.ShouldReportByHeapFilter(tag, class_tag)) {
776 return;
777 }
778
779 if (filter_klass != nullptr) {
780 if (filter_klass.Get() != klass) {
781 return;
782 }
783 }
784
785 jlong size = obj->SizeOf();
786
787 jint length = -1;
788 if (obj->IsArrayInstance()) {
789 length = obj->AsArray()->GetLength();
790 }
791
792 jlong saved_tag = tag;
793 jint ret = fn(obj, callbacks, class_tag, size, &tag, length, const_cast<void*>(user_data));
794
795 if (tag != saved_tag) {
796 tag_table->Set(obj, tag);
797 }
798
799 stop_reports = (ret & JVMTI_VISIT_ABORT) != 0;
800
801 if (!stop_reports) {
802 jint string_ret = ReportString(obj, env, tag_table, callbacks, user_data);
803 stop_reports = (string_ret & JVMTI_VISIT_ABORT) != 0;
804 }
805
806 if (!stop_reports) {
807 jint array_ret = ReportPrimitiveArray(obj, env, tag_table, callbacks, user_data);
808 stop_reports = (array_ret & JVMTI_VISIT_ABORT) != 0;
809 }
810
811 if (!stop_reports) {
812 stop_reports = ReportPrimitiveField::Report(obj, tag_table, callbacks, user_data);
813 }
814 };
815 art::Runtime::Current()->GetHeap()->VisitObjects(visitor);
816
817 return ERR(NONE);
818 }
819
IterateThroughHeap(jvmtiEnv * env,jint heap_filter,jclass klass,const jvmtiHeapCallbacks * callbacks,const void * user_data)820 jvmtiError HeapUtil::IterateThroughHeap(jvmtiEnv* env,
821 jint heap_filter,
822 jclass klass,
823 const jvmtiHeapCallbacks* callbacks,
824 const void* user_data) {
825 auto JvmtiIterateHeap = []([[maybe_unused]] art::mirror::Object* obj,
826 const jvmtiHeapCallbacks* cb_callbacks,
827 jlong class_tag,
828 jlong size,
829 jlong* tag,
830 jint length,
831 void* cb_user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
832 return cb_callbacks->heap_iteration_callback(class_tag,
833 size,
834 tag,
835 length,
836 cb_user_data);
837 };
838 return DoIterateThroughHeap(JvmtiIterateHeap,
839 env,
840 ArtJvmTiEnv::AsArtJvmTiEnv(env)->object_tag_table.get(),
841 heap_filter,
842 klass,
843 callbacks,
844 user_data);
845 }
846
847 class FollowReferencesHelper final {
848 public:
FollowReferencesHelper(HeapUtil * h,jvmtiEnv * jvmti_env,art::ObjPtr<art::mirror::Object> initial_object,const jvmtiHeapCallbacks * callbacks,art::ObjPtr<art::mirror::Class> class_filter,jint heap_filter,const void * user_data)849 FollowReferencesHelper(HeapUtil* h,
850 jvmtiEnv* jvmti_env,
851 art::ObjPtr<art::mirror::Object> initial_object,
852 const jvmtiHeapCallbacks* callbacks,
853 art::ObjPtr<art::mirror::Class> class_filter,
854 jint heap_filter,
855 const void* user_data)
856 : env(jvmti_env),
857 tag_table_(h->GetTags()),
858 initial_object_(initial_object),
859 callbacks_(callbacks),
860 class_filter_(class_filter),
861 heap_filter_(heap_filter),
862 user_data_(user_data),
863 start_(0),
864 stop_reports_(false) {
865 }
866
Init()867 void Init()
868 REQUIRES_SHARED(art::Locks::mutator_lock_)
869 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
870 if (initial_object_.IsNull()) {
871 CollectAndReportRootsVisitor carrv(this, tag_table_, &worklist_, &visited_);
872
873 // We need precise info (e.g., vregs).
874 constexpr art::VisitRootFlags kRootFlags = static_cast<art::VisitRootFlags>(
875 art::VisitRootFlags::kVisitRootFlagAllRoots | art::VisitRootFlags::kVisitRootFlagPrecise);
876 art::Runtime::Current()->VisitRoots(&carrv, kRootFlags);
877
878 art::Runtime::Current()->VisitImageRoots(&carrv);
879 stop_reports_ = carrv.IsStopReports();
880
881 if (stop_reports_) {
882 worklist_.clear();
883 }
884 } else {
885 visited_.insert(initial_object_.Ptr());
886 worklist_.push_back(initial_object_.Ptr());
887 }
888 }
889
Work()890 void Work()
891 REQUIRES_SHARED(art::Locks::mutator_lock_)
892 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
893 // Currently implemented as a BFS. To lower overhead, we don't erase elements immediately
894 // from the head of the work list, instead postponing until there's a gap that's "large."
895 //
896 // Alternatively, we can implement a DFS and use the work list as a stack.
897 while (start_ < worklist_.size()) {
898 art::mirror::Object* cur_obj = worklist_[start_];
899 start_++;
900
901 if (start_ >= kMaxStart) {
902 worklist_.erase(worklist_.begin(), worklist_.begin() + start_);
903 start_ = 0;
904 }
905
906 VisitObject(cur_obj);
907
908 if (stop_reports_) {
909 break;
910 }
911 }
912 }
913
914 private:
915 class CollectAndReportRootsVisitor final : public art::RootVisitor {
916 public:
CollectAndReportRootsVisitor(FollowReferencesHelper * helper,ObjectTagTable * tag_table,std::vector<art::mirror::Object * > * worklist,std::unordered_set<art::mirror::Object * > * visited)917 CollectAndReportRootsVisitor(FollowReferencesHelper* helper,
918 ObjectTagTable* tag_table,
919 std::vector<art::mirror::Object*>* worklist,
920 std::unordered_set<art::mirror::Object*>* visited)
921 : helper_(helper),
922 tag_table_(tag_table),
923 worklist_(worklist),
924 visited_(visited),
925 stop_reports_(false) {}
926
VisitRoots(art::mirror::Object *** roots,size_t count,const art::RootInfo & info)927 void VisitRoots(art::mirror::Object*** roots, size_t count, const art::RootInfo& info)
928 override
929 REQUIRES_SHARED(art::Locks::mutator_lock_)
930 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
931 for (size_t i = 0; i != count; ++i) {
932 AddRoot(*roots[i], info);
933 }
934 }
935
VisitRoots(art::mirror::CompressedReference<art::mirror::Object> ** roots,size_t count,const art::RootInfo & info)936 void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots,
937 size_t count,
938 const art::RootInfo& info)
939 override REQUIRES_SHARED(art::Locks::mutator_lock_)
940 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
941 for (size_t i = 0; i != count; ++i) {
942 AddRoot(roots[i]->AsMirrorPtr(), info);
943 }
944 }
945
IsStopReports()946 bool IsStopReports() {
947 return stop_reports_;
948 }
949
950 private:
AddRoot(art::mirror::Object * root_obj,const art::RootInfo & info)951 void AddRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
952 REQUIRES_SHARED(art::Locks::mutator_lock_)
953 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
954 if (stop_reports_) {
955 return;
956 }
957 bool add_to_worklist = ReportRoot(root_obj, info);
958 // We use visited_ to mark roots already so we do not need another set.
959 if (visited_->find(root_obj) == visited_->end()) {
960 if (add_to_worklist) {
961 visited_->insert(root_obj);
962 worklist_->push_back(root_obj);
963 }
964 }
965 }
966
967 // Remove NO_THREAD_SAFETY_ANALYSIS once ASSERT_CAPABILITY works correctly.
FindThread(const art::RootInfo & info)968 art::Thread* FindThread(const art::RootInfo& info) NO_THREAD_SAFETY_ANALYSIS {
969 art::Locks::thread_list_lock_->AssertExclusiveHeld(art::Thread::Current());
970 return art::Runtime::Current()->GetThreadList()->FindThreadByThreadId(info.GetThreadId());
971 }
972
GetReferenceKind(const art::RootInfo & info,jvmtiHeapReferenceInfo * ref_info)973 jvmtiHeapReferenceKind GetReferenceKind(const art::RootInfo& info,
974 jvmtiHeapReferenceInfo* ref_info)
975 REQUIRES_SHARED(art::Locks::mutator_lock_) {
976 // We do not necessarily hold thread_list_lock_ here, but we may if we are called from
977 // VisitThreadRoots, which can happen from JVMTI FollowReferences. If it was acquired in
978 // ThreadList::VisitRoots, it's unsafe to temporarily release it. Thus we act as if we did
979 // not hold the thread_list_lock_ here, and relax CHECKs appropriately. If it does happen,
980 // we are in a SuspendAll situation with concurrent GC disabled, and should not need to run
981 // flip functions. TODO: Find a way to clean this up.
982
983 // TODO: Fill in ref_info.
984 memset(ref_info, 0, sizeof(jvmtiHeapReferenceInfo));
985
986 switch (info.GetType()) {
987 case art::RootType::kRootJNIGlobal:
988 return JVMTI_HEAP_REFERENCE_JNI_GLOBAL;
989
990 case art::RootType::kRootJNILocal:
991 {
992 uint32_t thread_id = info.GetThreadId();
993 ref_info->jni_local.thread_id = thread_id;
994
995 art::Thread* thread = FindThread(info);
996 if (thread != nullptr) {
997 art::mirror::Object* thread_obj;
998 if (thread->IsStillStarting()) {
999 thread_obj = nullptr;
1000 } else {
1001 thread_obj = thread->GetPeerFromOtherThread();
1002 }
1003 if (thread_obj != nullptr) {
1004 ref_info->jni_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
1005 }
1006 }
1007
1008 // TODO: We don't have this info.
1009 if (thread != nullptr) {
1010 ref_info->jni_local.depth = 0;
1011 art::ArtMethod* method = thread->GetCurrentMethod(nullptr,
1012 /* check_suspended= */ true,
1013 /* abort_on_error= */ false);
1014 if (method != nullptr) {
1015 ref_info->jni_local.method = art::jni::EncodeArtMethod(method);
1016 }
1017 }
1018
1019 return JVMTI_HEAP_REFERENCE_JNI_LOCAL;
1020 }
1021
1022 case art::RootType::kRootJavaFrame:
1023 {
1024 uint32_t thread_id = info.GetThreadId();
1025 ref_info->stack_local.thread_id = thread_id;
1026
1027 art::Thread* thread = FindThread(info);
1028 if (thread != nullptr) {
1029 art::mirror::Object* thread_obj;
1030 if (thread->IsStillStarting()) {
1031 thread_obj = nullptr;
1032 } else {
1033 thread_obj = thread->GetPeerFromOtherThread();
1034 }
1035 if (thread_obj != nullptr) {
1036 ref_info->stack_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
1037 }
1038 }
1039
1040 auto& java_info = static_cast<const art::JavaFrameRootInfo&>(info);
1041 size_t vreg = java_info.GetVReg();
1042 ref_info->stack_local.slot = static_cast<jint>(
1043 vreg <= art::JavaFrameRootInfo::kMaxVReg ? vreg : -1);
1044 const art::StackVisitor* visitor = java_info.GetVisitor();
1045 ref_info->stack_local.location =
1046 static_cast<jlocation>(visitor->GetDexPc(/* abort_on_failure= */ false));
1047 ref_info->stack_local.depth = static_cast<jint>(visitor->GetFrameDepth());
1048 art::ArtMethod* method = visitor->GetMethod();
1049 if (method != nullptr) {
1050 ref_info->stack_local.method = art::jni::EncodeArtMethod(method);
1051 }
1052
1053 return JVMTI_HEAP_REFERENCE_STACK_LOCAL;
1054 }
1055
1056 case art::RootType::kRootNativeStack:
1057 case art::RootType::kRootThreadBlock:
1058 case art::RootType::kRootThreadObject:
1059 return JVMTI_HEAP_REFERENCE_THREAD;
1060
1061 case art::RootType::kRootStickyClass:
1062 case art::RootType::kRootInternedString:
1063 // Note: this isn't a root in the RI.
1064 return JVMTI_HEAP_REFERENCE_SYSTEM_CLASS;
1065
1066 case art::RootType::kRootMonitorUsed:
1067 case art::RootType::kRootJNIMonitor:
1068 return JVMTI_HEAP_REFERENCE_MONITOR;
1069
1070 case art::RootType::kRootFinalizing:
1071 case art::RootType::kRootDebugger:
1072 case art::RootType::kRootReferenceCleanup:
1073 case art::RootType::kRootVMInternal:
1074 case art::RootType::kRootUnknown:
1075 return JVMTI_HEAP_REFERENCE_OTHER;
1076 }
1077 LOG(FATAL) << "Unreachable";
1078 UNREACHABLE();
1079 }
1080
ReportRoot(art::mirror::Object * root_obj,const art::RootInfo & info)1081 bool ReportRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
1082 REQUIRES_SHARED(art::Locks::mutator_lock_)
1083 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1084 jvmtiHeapReferenceInfo ref_info;
1085 jvmtiHeapReferenceKind kind = GetReferenceKind(info, &ref_info);
1086 jint result = helper_->ReportReference(kind, &ref_info, nullptr, root_obj);
1087 if ((result & JVMTI_VISIT_ABORT) != 0) {
1088 stop_reports_ = true;
1089 }
1090 return (result & JVMTI_VISIT_OBJECTS) != 0;
1091 }
1092
1093 private:
1094 FollowReferencesHelper* helper_;
1095 ObjectTagTable* tag_table_;
1096 std::vector<art::mirror::Object*>* worklist_;
1097 std::unordered_set<art::mirror::Object*>* visited_;
1098 bool stop_reports_;
1099 };
1100
VisitObject(art::mirror::Object * obj)1101 void VisitObject(art::mirror::Object* obj)
1102 REQUIRES_SHARED(art::Locks::mutator_lock_)
1103 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1104 if (obj->IsClass()) {
1105 VisitClass(obj->AsClass().Ptr());
1106 return;
1107 }
1108 if (obj->IsArrayInstance()) {
1109 VisitArray(obj);
1110 return;
1111 }
1112
1113 // All instance fields.
1114 auto report_instance_field =
1115 [&](art::ObjPtr<art::mirror::Object> src,
1116 [[maybe_unused]] art::ObjPtr<art::mirror::Class> obj_klass,
1117 art::ArtField& field,
1118 size_t field_index,
1119 [[maybe_unused]] void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_)
1120 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1121 art::ObjPtr<art::mirror::Object> field_value = field.GetObject(src);
1122 if (field_value != nullptr) {
1123 jvmtiHeapReferenceInfo reference_info;
1124 memset(&reference_info, 0, sizeof(reference_info));
1125
1126 reference_info.field.index = field_index;
1127
1128 jvmtiHeapReferenceKind kind =
1129 field.GetOffset().Int32Value() ==
1130 art::mirror::Object::ClassOffset().Int32Value() ?
1131 JVMTI_HEAP_REFERENCE_CLASS :
1132 JVMTI_HEAP_REFERENCE_FIELD;
1133 const jvmtiHeapReferenceInfo* reference_info_ptr =
1134 kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info;
1135
1136 return !ReportReferenceMaybeEnqueue(
1137 kind, reference_info_ptr, src.Ptr(), field_value.Ptr());
1138 }
1139 return false;
1140 };
1141 stop_reports_ = FieldVisitor<void, true>::ReportFields(obj,
1142 nullptr,
1143 VisitorFalse<void>,
1144 VisitorFalse<void>,
1145 VisitorFalse<void>,
1146 report_instance_field);
1147 if (stop_reports_) {
1148 return;
1149 }
1150
1151 jint string_ret = ReportString(obj, env, tag_table_, callbacks_, user_data_);
1152 stop_reports_ = (string_ret & JVMTI_VISIT_ABORT) != 0;
1153 if (stop_reports_) {
1154 return;
1155 }
1156
1157 stop_reports_ = ReportPrimitiveField::Report(obj, tag_table_, callbacks_, user_data_);
1158 }
1159
VisitArray(art::mirror::Object * array)1160 void VisitArray(art::mirror::Object* array)
1161 REQUIRES_SHARED(art::Locks::mutator_lock_)
1162 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1163 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS,
1164 nullptr,
1165 array,
1166 array->GetClass());
1167 if (stop_reports_) {
1168 return;
1169 }
1170
1171 if (array->IsObjectArray()) {
1172 art::ObjPtr<art::mirror::ObjectArray<art::mirror::Object>> obj_array =
1173 array->AsObjectArray<art::mirror::Object>();
1174 for (auto elem_pair : art::ZipCount(obj_array->Iterate())) {
1175 if (elem_pair.first != nullptr) {
1176 jvmtiHeapReferenceInfo reference_info;
1177 reference_info.array.index = elem_pair.second;
1178 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT,
1179 &reference_info,
1180 array,
1181 elem_pair.first.Ptr());
1182 if (stop_reports_) {
1183 break;
1184 }
1185 }
1186 }
1187 } else {
1188 if (!stop_reports_) {
1189 jint array_ret = ReportPrimitiveArray(array, env, tag_table_, callbacks_, user_data_);
1190 stop_reports_ = (array_ret & JVMTI_VISIT_ABORT) != 0;
1191 }
1192 }
1193 }
1194
VisitClass(art::mirror::Class * klass)1195 void VisitClass(art::mirror::Class* klass)
1196 REQUIRES_SHARED(art::Locks::mutator_lock_)
1197 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1198 // TODO: Are erroneous classes reported? Are non-prepared ones? For now, just use resolved ones.
1199 if (!klass->IsResolved()) {
1200 return;
1201 }
1202
1203 // Superclass.
1204 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_SUPERCLASS,
1205 nullptr,
1206 klass,
1207 klass->GetSuperClass().Ptr());
1208 if (stop_reports_) {
1209 return;
1210 }
1211
1212 // Directly implemented or extended interfaces.
1213 art::Thread* self = art::Thread::Current();
1214 art::StackHandleScope<1> hs(self);
1215 art::Handle<art::mirror::Class> h_klass(hs.NewHandle<art::mirror::Class>(klass));
1216 for (size_t i = 0; i < h_klass->NumDirectInterfaces(); ++i) {
1217 art::ObjPtr<art::mirror::Class> inf_klass =
1218 art::mirror::Class::ResolveDirectInterface(self, h_klass, i);
1219 if (inf_klass == nullptr) {
1220 // TODO: With a resolved class this should not happen...
1221 self->ClearException();
1222 break;
1223 }
1224
1225 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_INTERFACE,
1226 nullptr,
1227 klass,
1228 inf_klass.Ptr());
1229 if (stop_reports_) {
1230 return;
1231 }
1232 }
1233
1234 // Classloader.
1235 // TODO: What about the boot classpath loader? We'll skip for now, but do we have to find the
1236 // fake BootClassLoader?
1237 if (klass->GetClassLoader() != nullptr) {
1238 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS_LOADER,
1239 nullptr,
1240 klass,
1241 klass->GetClassLoader().Ptr());
1242 if (stop_reports_) {
1243 return;
1244 }
1245 }
1246 DCHECK_EQ(h_klass.Get(), klass);
1247
1248 // Declared static fields.
1249 auto report_static_field =
1250 [&]([[maybe_unused]] art::ObjPtr<art::mirror::Object> obj,
1251 art::ObjPtr<art::mirror::Class> obj_klass,
1252 art::ArtField& field,
1253 size_t field_index,
1254 [[maybe_unused]] void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_)
1255 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1256 art::ObjPtr<art::mirror::Object> field_value = field.GetObject(obj_klass);
1257 if (field_value != nullptr) {
1258 jvmtiHeapReferenceInfo reference_info;
1259 memset(&reference_info, 0, sizeof(reference_info));
1260
1261 reference_info.field.index = static_cast<jint>(field_index);
1262
1263 return !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
1264 &reference_info,
1265 obj_klass.Ptr(),
1266 field_value.Ptr());
1267 }
1268 return false;
1269 };
1270 stop_reports_ = FieldVisitor<void, false>::ReportFields(klass,
1271 nullptr,
1272 VisitorFalse<void>,
1273 report_static_field,
1274 VisitorFalse<void>,
1275 VisitorFalse<void>);
1276 if (stop_reports_) {
1277 return;
1278 }
1279
1280 stop_reports_ = ReportPrimitiveField::Report(klass, tag_table_, callbacks_, user_data_);
1281 }
1282
MaybeEnqueue(art::mirror::Object * obj)1283 void MaybeEnqueue(art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) {
1284 if (visited_.find(obj) == visited_.end()) {
1285 worklist_.push_back(obj);
1286 visited_.insert(obj);
1287 }
1288 }
1289
ReportReferenceMaybeEnqueue(jvmtiHeapReferenceKind kind,const jvmtiHeapReferenceInfo * reference_info,art::mirror::Object * referree,art::mirror::Object * referrer)1290 bool ReportReferenceMaybeEnqueue(jvmtiHeapReferenceKind kind,
1291 const jvmtiHeapReferenceInfo* reference_info,
1292 art::mirror::Object* referree,
1293 art::mirror::Object* referrer)
1294 REQUIRES_SHARED(art::Locks::mutator_lock_)
1295 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1296 jint result = ReportReference(kind, reference_info, referree, referrer);
1297 if ((result & JVMTI_VISIT_ABORT) == 0) {
1298 if ((result & JVMTI_VISIT_OBJECTS) != 0) {
1299 MaybeEnqueue(referrer);
1300 }
1301 return true;
1302 } else {
1303 return false;
1304 }
1305 }
1306
ReportReference(jvmtiHeapReferenceKind kind,const jvmtiHeapReferenceInfo * reference_info,art::mirror::Object * referrer,art::mirror::Object * referree)1307 jint ReportReference(jvmtiHeapReferenceKind kind,
1308 const jvmtiHeapReferenceInfo* reference_info,
1309 art::mirror::Object* referrer,
1310 art::mirror::Object* referree)
1311 REQUIRES_SHARED(art::Locks::mutator_lock_)
1312 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1313 if (referree == nullptr || stop_reports_) {
1314 return 0;
1315 }
1316
1317 if (UNLIKELY(class_filter_ != nullptr) && class_filter_ != referree->GetClass()) {
1318 return JVMTI_VISIT_OBJECTS;
1319 }
1320
1321 const jlong class_tag = tag_table_->GetTagOrZero(referree->GetClass());
1322 jlong tag = tag_table_->GetTagOrZero(referree);
1323
1324 if (!heap_filter_.ShouldReportByHeapFilter(tag, class_tag)) {
1325 return JVMTI_VISIT_OBJECTS;
1326 }
1327
1328 const jlong referrer_class_tag =
1329 referrer == nullptr ? 0 : tag_table_->GetTagOrZero(referrer->GetClass());
1330 const jlong size = static_cast<jlong>(referree->SizeOf());
1331 jlong saved_tag = tag;
1332 jlong referrer_tag = 0;
1333 jlong saved_referrer_tag = 0;
1334 jlong* referrer_tag_ptr;
1335 if (referrer == nullptr) {
1336 referrer_tag_ptr = nullptr;
1337 } else {
1338 if (referrer == referree) {
1339 referrer_tag_ptr = &tag;
1340 } else {
1341 referrer_tag = saved_referrer_tag = tag_table_->GetTagOrZero(referrer);
1342 referrer_tag_ptr = &referrer_tag;
1343 }
1344 }
1345
1346 jint length = -1;
1347 if (referree->IsArrayInstance()) {
1348 length = referree->AsArray()->GetLength();
1349 }
1350
1351 jint result = callbacks_->heap_reference_callback(kind,
1352 reference_info,
1353 class_tag,
1354 referrer_class_tag,
1355 size,
1356 &tag,
1357 referrer_tag_ptr,
1358 length,
1359 const_cast<void*>(user_data_));
1360
1361 if (tag != saved_tag) {
1362 tag_table_->Set(referree, tag);
1363 }
1364 if (referrer_tag != saved_referrer_tag) {
1365 tag_table_->Set(referrer, referrer_tag);
1366 }
1367
1368 return result;
1369 }
1370
1371 jvmtiEnv* env;
1372 ObjectTagTable* tag_table_;
1373 art::ObjPtr<art::mirror::Object> initial_object_;
1374 const jvmtiHeapCallbacks* callbacks_;
1375 art::ObjPtr<art::mirror::Class> class_filter_;
1376 const HeapFilter heap_filter_;
1377 const void* user_data_;
1378
1379 std::vector<art::mirror::Object*> worklist_;
1380 size_t start_;
1381 static constexpr size_t kMaxStart = 1000000U;
1382
1383 std::unordered_set<art::mirror::Object*> visited_;
1384
1385 bool stop_reports_;
1386
1387 friend class CollectAndReportRootsVisitor;
1388 };
1389
FollowReferences(jvmtiEnv * env,jint heap_filter,jclass klass,jobject initial_object,const jvmtiHeapCallbacks * callbacks,const void * user_data)1390 jvmtiError HeapUtil::FollowReferences(jvmtiEnv* env,
1391 jint heap_filter,
1392 jclass klass,
1393 jobject initial_object,
1394 const jvmtiHeapCallbacks* callbacks,
1395 const void* user_data) {
1396 if (callbacks == nullptr) {
1397 return ERR(NULL_POINTER);
1398 }
1399
1400 art::Thread* self = art::Thread::Current();
1401
1402 art::gc::Heap* heap = art::Runtime::Current()->GetHeap();
1403 if (heap->IsGcConcurrentAndMoving()) {
1404 // Need to take a heap dump while GC isn't running. See the
1405 // comment in Heap::VisitObjects().
1406 heap->IncrementDisableMovingGC(self);
1407 }
1408 {
1409 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
1410 art::jni::ScopedEnableSuspendAllJniIdQueries sjni; // make sure we can get JNI ids.
1411 art::ScopedThreadSuspension sts(self, art::ThreadState::kWaitingForVisitObjects);
1412 art::ScopedSuspendAll ssa("FollowReferences");
1413
1414 art::ObjPtr<art::mirror::Class> class_filter = klass == nullptr
1415 ? nullptr
1416 : art::ObjPtr<art::mirror::Class>::DownCast(self->DecodeJObject(klass));
1417 FollowReferencesHelper frh(this,
1418 env,
1419 self->DecodeJObject(initial_object),
1420 callbacks,
1421 class_filter,
1422 heap_filter,
1423 user_data);
1424 frh.Init();
1425 frh.Work();
1426 }
1427 if (heap->IsGcConcurrentAndMoving()) {
1428 heap->DecrementDisableMovingGC(self);
1429 }
1430
1431 return ERR(NONE);
1432 }
1433
GetLoadedClasses(jvmtiEnv * env,jint * class_count_ptr,jclass ** classes_ptr)1434 jvmtiError HeapUtil::GetLoadedClasses(jvmtiEnv* env,
1435 jint* class_count_ptr,
1436 jclass** classes_ptr) {
1437 if (class_count_ptr == nullptr || classes_ptr == nullptr) {
1438 return ERR(NULL_POINTER);
1439 }
1440
1441 class ReportClassVisitor : public art::ClassVisitor {
1442 public:
1443 explicit ReportClassVisitor(art::Thread* self) : self_(self) {}
1444
1445 bool operator()(art::ObjPtr<art::mirror::Class> klass)
1446 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
1447 if (klass->IsLoaded() || klass->IsErroneous()) {
1448 classes_.push_back(self_->GetJniEnv()->AddLocalReference<jclass>(klass));
1449 }
1450 return true;
1451 }
1452
1453 art::Thread* self_;
1454 std::vector<jclass> classes_;
1455 };
1456
1457 art::Thread* self = art::Thread::Current();
1458 ReportClassVisitor rcv(self);
1459 {
1460 art::ScopedObjectAccess soa(self);
1461 art::Runtime::Current()->GetClassLinker()->VisitClasses(&rcv);
1462 }
1463
1464 size_t size = rcv.classes_.size();
1465 jclass* classes = nullptr;
1466 jvmtiError alloc_ret = env->Allocate(static_cast<jlong>(size * sizeof(jclass)),
1467 reinterpret_cast<unsigned char**>(&classes));
1468 if (alloc_ret != ERR(NONE)) {
1469 return alloc_ret;
1470 }
1471
1472 for (size_t i = 0; i < size; ++i) {
1473 classes[i] = rcv.classes_[i];
1474 }
1475 *classes_ptr = classes;
1476 *class_count_ptr = static_cast<jint>(size);
1477
1478 return ERR(NONE);
1479 }
1480
ForceGarbageCollection(jvmtiEnv * env)1481 jvmtiError HeapUtil::ForceGarbageCollection([[maybe_unused]] jvmtiEnv* env) {
1482 art::Runtime::Current()->GetHeap()->CollectGarbage(/* clear_soft_references= */ false);
1483
1484 return ERR(NONE);
1485 }
1486
1487 static constexpr jint kHeapIdDefault = 0;
1488 static constexpr jint kHeapIdImage = 1;
1489 static constexpr jint kHeapIdZygote = 2;
1490 static constexpr jint kHeapIdApp = 3;
1491
GetHeapId(art::ObjPtr<art::mirror::Object> obj)1492 static jint GetHeapId(art::ObjPtr<art::mirror::Object> obj)
1493 REQUIRES_SHARED(art::Locks::mutator_lock_) {
1494 if (obj == nullptr) {
1495 return -1;
1496 }
1497
1498 art::gc::Heap* const heap = art::Runtime::Current()->GetHeap();
1499 const art::gc::space::ContinuousSpace* const space =
1500 heap->FindContinuousSpaceFromObject(obj, true);
1501 jint heap_type = kHeapIdApp;
1502 if (space != nullptr) {
1503 if (space->IsZygoteSpace()) {
1504 heap_type = kHeapIdZygote;
1505 } else if (space->IsImageSpace() && heap->ObjectIsInBootImageSpace(obj)) {
1506 // Only count objects in the boot image as HPROF_HEAP_IMAGE, this leaves app image objects
1507 // as HPROF_HEAP_APP. b/35762934
1508 heap_type = kHeapIdImage;
1509 }
1510 } else {
1511 const auto* los = heap->GetLargeObjectsSpace();
1512 if (los->Contains(obj.Ptr()) && los->IsZygoteLargeObject(art::Thread::Current(), obj.Ptr())) {
1513 heap_type = kHeapIdZygote;
1514 }
1515 }
1516 return heap_type;
1517 };
1518
GetObjectHeapId(jvmtiEnv * env,jlong tag,jint * heap_id,...)1519 jvmtiError HeapExtensions::GetObjectHeapId(jvmtiEnv* env, jlong tag, jint* heap_id, ...) {
1520 if (heap_id == nullptr) {
1521 return ERR(NULL_POINTER);
1522 }
1523
1524 art::Thread* self = art::Thread::Current();
1525
1526 auto work = [&]() REQUIRES_SHARED(art::Locks::mutator_lock_) {
1527 ObjectTagTable* tag_table = ArtJvmTiEnv::AsArtJvmTiEnv(env)->object_tag_table.get();
1528 art::ObjPtr<art::mirror::Object> obj = tag_table->Find(tag);
1529 jint heap_type = GetHeapId(obj);
1530 if (heap_type == -1) {
1531 return ERR(NOT_FOUND);
1532 }
1533 *heap_id = heap_type;
1534 return ERR(NONE);
1535 };
1536
1537 if (!art::Locks::mutator_lock_->IsSharedHeld(self)) {
1538 if (!self->IsThreadSuspensionAllowable()) {
1539 return ERR(INTERNAL);
1540 }
1541 art::ScopedObjectAccess soa(self);
1542 return work();
1543 } else {
1544 // We cannot use SOA in this case. We might be holding the lock, but may not be in the
1545 // runnable state (e.g., during GC).
1546 art::Locks::mutator_lock_->AssertSharedHeld(self);
1547 // TODO: Investigate why ASSERT_SHARED_CAPABILITY doesn't work.
1548 auto annotalysis_workaround = [&]() NO_THREAD_SAFETY_ANALYSIS {
1549 return work();
1550 };
1551 return annotalysis_workaround();
1552 }
1553 }
1554
CopyStringAndReturn(jvmtiEnv * env,const char * in,char ** out)1555 static jvmtiError CopyStringAndReturn(jvmtiEnv* env, const char* in, char** out) {
1556 jvmtiError error;
1557 JvmtiUniquePtr<char[]> param_name = CopyString(env, in, &error);
1558 if (param_name == nullptr) {
1559 return error;
1560 }
1561 *out = param_name.release();
1562 return ERR(NONE);
1563 }
1564
1565 static constexpr const char* kHeapIdDefaultName = "default";
1566 static constexpr const char* kHeapIdImageName = "image";
1567 static constexpr const char* kHeapIdZygoteName = "zygote";
1568 static constexpr const char* kHeapIdAppName = "app";
1569
GetHeapName(jvmtiEnv * env,jint heap_id,char ** heap_name,...)1570 jvmtiError HeapExtensions::GetHeapName(jvmtiEnv* env, jint heap_id, char** heap_name, ...) {
1571 switch (heap_id) {
1572 case kHeapIdDefault:
1573 return CopyStringAndReturn(env, kHeapIdDefaultName, heap_name);
1574 case kHeapIdImage:
1575 return CopyStringAndReturn(env, kHeapIdImageName, heap_name);
1576 case kHeapIdZygote:
1577 return CopyStringAndReturn(env, kHeapIdZygoteName, heap_name);
1578 case kHeapIdApp:
1579 return CopyStringAndReturn(env, kHeapIdAppName, heap_name);
1580
1581 default:
1582 return ERR(ILLEGAL_ARGUMENT);
1583 }
1584 }
1585
1586 #pragma clang diagnostic push
1587 #pragma clang diagnostic ignored "-Wcast-function-type-mismatch"
IterateThroughHeapExt(jvmtiEnv * env,jint heap_filter,jclass klass,const jvmtiHeapCallbacks * callbacks,const void * user_data)1588 jvmtiError HeapExtensions::IterateThroughHeapExt(jvmtiEnv* env,
1589 jint heap_filter,
1590 jclass klass,
1591 const jvmtiHeapCallbacks* callbacks,
1592 const void* user_data) {
1593 if (ArtJvmTiEnv::AsArtJvmTiEnv(env)->capabilities.can_tag_objects != 1) { \
1594 return ERR(MUST_POSSESS_CAPABILITY); \
1595 }
1596
1597 // ART extension API: Also pass the heap id.
1598 auto ArtIterateHeap = [](art::mirror::Object* obj,
1599 const jvmtiHeapCallbacks* cb_callbacks,
1600 jlong class_tag,
1601 jlong size,
1602 jlong* tag,
1603 jint length,
1604 void* cb_user_data)
1605 REQUIRES_SHARED(art::Locks::mutator_lock_) {
1606 jint heap_id = GetHeapId(obj);
1607 using ArtExtensionAPI = jint (*)(jlong, jlong, jlong*, jint length, void*, jint);
1608 return reinterpret_cast<ArtExtensionAPI>(cb_callbacks->heap_iteration_callback)(
1609 class_tag, size, tag, length, cb_user_data, heap_id);
1610 };
1611 return DoIterateThroughHeap(ArtIterateHeap,
1612 env,
1613 ArtJvmTiEnv::AsArtJvmTiEnv(env)->object_tag_table.get(),
1614 heap_filter,
1615 klass,
1616 callbacks,
1617 user_data);
1618 }
1619 #pragma clang diagnostic pop
1620
1621 namespace {
1622
1623 using ObjectPtr = art::ObjPtr<art::mirror::Object>;
1624 using ObjectMap = std::unordered_map<ObjectPtr, ObjectPtr, art::HashObjPtr>;
1625
ReplaceObjectReferences(const ObjectMap & map)1626 static void ReplaceObjectReferences(const ObjectMap& map)
1627 REQUIRES(art::Locks::mutator_lock_,
1628 art::Roles::uninterruptible_) {
1629 art::Runtime::Current()->GetHeap()->VisitObjectsPaused(
1630 [&](art::mirror::Object* ref) REQUIRES_SHARED(art::Locks::mutator_lock_) {
1631 // Rewrite all references in the object if needed.
1632 class ResizeReferenceVisitor {
1633 public:
1634 using CompressedObj = art::mirror::CompressedReference<art::mirror::Object>;
1635 explicit ResizeReferenceVisitor(const ObjectMap& map, ObjectPtr ref)
1636 : map_(map), ref_(ref) {}
1637
1638 // Ignore class roots.
1639 void VisitRootIfNonNull(CompressedObj* root) const
1640 REQUIRES_SHARED(art::Locks::mutator_lock_) {
1641 if (root != nullptr) {
1642 VisitRoot(root);
1643 }
1644 }
1645 void VisitRoot(CompressedObj* root) const REQUIRES_SHARED(art::Locks::mutator_lock_) {
1646 auto it = map_.find(root->AsMirrorPtr());
1647 if (it != map_.end()) {
1648 root->Assign(it->second);
1649 art::WriteBarrier::ForEveryFieldWrite(ref_);
1650 }
1651 }
1652
1653 void operator()(art::ObjPtr<art::mirror::Object> obj,
1654 art::MemberOffset off,
1655 bool is_static) const
1656 REQUIRES_SHARED(art::Locks::mutator_lock_) {
1657 auto it = map_.find(obj->GetFieldObject<art::mirror::Object>(off));
1658 if (it != map_.end()) {
1659 UNUSED(is_static);
1660 if (UNLIKELY(!is_static && off == art::mirror::Object::ClassOffset())) {
1661 // We don't want to update the declaring class of any objects. They will be replaced
1662 // in the heap and we need the declaring class to know its size.
1663 return;
1664 } else if (UNLIKELY(!is_static && off == art::mirror::Class::SuperClassOffset() &&
1665 obj->IsClass())) {
1666 // We don't want to be messing with the class hierarcy either.
1667 return;
1668 }
1669 VLOG(plugin) << "Updating field at offset " << off.Uint32Value() << " of type "
1670 << obj->GetClass()->PrettyClass();
1671 obj->SetFieldObject</*transaction*/ false>(off, it->second);
1672 art::WriteBarrier::ForEveryFieldWrite(obj);
1673 }
1674 }
1675
1676 // java.lang.ref.Reference visitor.
1677 void operator()([[maybe_unused]] art::ObjPtr<art::mirror::Class> klass,
1678 art::ObjPtr<art::mirror::Reference> ref) const
1679 REQUIRES_SHARED(art::Locks::mutator_lock_) {
1680 operator()(ref, art::mirror::Reference::ReferentOffset(), /* is_static */ false);
1681 }
1682
1683 private:
1684 const ObjectMap& map_;
1685 ObjectPtr ref_;
1686 };
1687
1688 ResizeReferenceVisitor rrv(map, ref);
1689 if (ref->IsClass()) {
1690 // Class object native roots are the ArtField and ArtMethod 'declaring_class_' fields
1691 // which we don't want to be messing with as it would break ref-visitor assumptions about
1692 // what a class looks like. We want to keep the default behavior in other cases (such as
1693 // dex-cache) though. Unfortunately there is no way to tell from the visitor where exactly
1694 // the root came from.
1695 // TODO It might be nice to have the visitors told where the reference came from.
1696 ref->VisitReferences</*kVisitNativeRoots*/false>(rrv, rrv);
1697 } else {
1698 ref->VisitReferences</*kVisitNativeRoots*/true>(rrv, rrv);
1699 }
1700 });
1701 }
1702
ReplaceStrongRoots(art::Thread * self,const ObjectMap & map)1703 static void ReplaceStrongRoots(art::Thread* self, const ObjectMap& map)
1704 REQUIRES(art::Locks::mutator_lock_, art::Roles::uninterruptible_) {
1705 // replace root references expcept java frames.
1706 struct ResizeRootVisitor : public art::RootVisitor {
1707 public:
1708 explicit ResizeRootVisitor(const ObjectMap& map) : map_(map) {}
1709
1710 // TODO It's somewhat annoying to have to have this function implemented twice. It might be
1711 // good/useful to implement operator= for CompressedReference to allow us to use a template to
1712 // implement both of these.
1713 void VisitRoots(art::mirror::Object*** roots, size_t count, const art::RootInfo& info) override
1714 REQUIRES_SHARED(art::Locks::mutator_lock_) {
1715 art::mirror::Object*** end = roots + count;
1716 for (art::mirror::Object** obj = *roots; roots != end; obj = *(++roots)) {
1717 auto it = map_.find(*obj);
1718 if (it != map_.end()) {
1719 // Java frames might have the JIT doing optimizations (for example loop-unrolling or
1720 // eliding bounds checks) so we need deopt them once we're done here.
1721 if (info.GetType() == art::RootType::kRootJavaFrame) {
1722 const art::JavaFrameRootInfo& jfri =
1723 art::down_cast<const art::JavaFrameRootInfo&>(info);
1724 if (jfri.GetVReg() == art::JavaFrameRootInfo::kMethodDeclaringClass) {
1725 info.Describe(VLOG_STREAM(plugin) << "Not changing declaring-class during stack"
1726 << " walk. Found obsolete java frame id ");
1727 continue;
1728 } else {
1729 info.Describe(VLOG_STREAM(plugin) << "Found java frame id ");
1730 threads_with_roots_.insert(info.GetThreadId());
1731 }
1732 }
1733 *obj = it->second.Ptr();
1734 }
1735 }
1736 }
1737
1738 void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots,
1739 size_t count,
1740 const art::RootInfo& info) override REQUIRES_SHARED(art::Locks::mutator_lock_) {
1741 art::mirror::CompressedReference<art::mirror::Object>** end = roots + count;
1742 for (art::mirror::CompressedReference<art::mirror::Object>* obj = *roots; roots != end;
1743 obj = *(++roots)) {
1744 auto it = map_.find(obj->AsMirrorPtr());
1745 if (it != map_.end()) {
1746 // Java frames might have the JIT doing optimizations (for example loop-unrolling or
1747 // eliding bounds checks) so we need deopt them once we're done here.
1748 if (info.GetType() == art::RootType::kRootJavaFrame) {
1749 const art::JavaFrameRootInfo& jfri =
1750 art::down_cast<const art::JavaFrameRootInfo&>(info);
1751 if (jfri.GetVReg() == art::JavaFrameRootInfo::kMethodDeclaringClass) {
1752 info.Describe(VLOG_STREAM(plugin) << "Not changing declaring-class during stack"
1753 << " walk. Found obsolete java frame id ");
1754 continue;
1755 } else {
1756 info.Describe(VLOG_STREAM(plugin) << "Found java frame id ");
1757 threads_with_roots_.insert(info.GetThreadId());
1758 }
1759 }
1760 obj->Assign(it->second);
1761 }
1762 }
1763 }
1764
1765 const std::unordered_set<uint32_t>& GetThreadsWithJavaFrameRoots() const {
1766 return threads_with_roots_;
1767 }
1768
1769 private:
1770 const ObjectMap& map_;
1771 std::unordered_set<uint32_t> threads_with_roots_;
1772 };
1773 ResizeRootVisitor rrv(map);
1774 art::Runtime::Current()->VisitRoots(&rrv, art::VisitRootFlags::kVisitRootFlagAllRoots);
1775 // Handle java Frames. Annoyingly the JIT can embed information about the length of the array into
1776 // the compiled code. By changing the length of the array we potentially invalidate these
1777 // assumptions and so could cause (eg) OOB array access or other issues.
1778 if (!rrv.GetThreadsWithJavaFrameRoots().empty()) {
1779 art::MutexLock mu(self, *art::Locks::thread_list_lock_);
1780 art::ThreadList* thread_list = art::Runtime::Current()->GetThreadList();
1781 art::instrumentation::Instrumentation* instr = art::Runtime::Current()->GetInstrumentation();
1782 for (uint32_t id : rrv.GetThreadsWithJavaFrameRoots()) {
1783 art::Thread* t = thread_list->FindThreadByThreadId(id);
1784 CHECK(t != nullptr) << "id " << id << " does not refer to a valid thread."
1785 << " Where did the roots come from?";
1786 VLOG(plugin) << "Instrumenting thread stack of thread " << *t;
1787 // TODO Use deopt manager. We need a version that doesn't acquire all the locks we
1788 // already have.
1789 // TODO We technically only need to do this if the frames are not already being interpreted.
1790 // The cost for doing an extra stack walk is unlikely to be worth it though.
1791 instr->InstrumentThreadStack(t, /* force_deopt= */ true);
1792 }
1793 }
1794 }
1795
ReplaceWeakRoots(art::Thread * self,EventHandler * event_handler,const ObjectMap & map)1796 static void ReplaceWeakRoots(art::Thread* self,
1797 EventHandler* event_handler,
1798 const ObjectMap& map)
1799 REQUIRES(art::Locks::mutator_lock_, art::Roles::uninterruptible_) {
1800 // Handle tags. We want to do this seprately from other weak-refs (handled below) because we need
1801 // to send additional events and handle cases where the agent might have tagged the new
1802 // replacement object during the VMObjectAlloc. We do this by removing all tags associated with
1803 // both the obsolete and the new arrays. Then we send the ObsoleteObjectCreated event and cache
1804 // the new tag values. We next update all the other weak-references (the tags have been removed)
1805 // and finally update the tag table with the new values. Doing things in this way (1) keeps all
1806 // code relating to updating weak-references together and (2) ensures we don't end up in strange
1807 // situations where the order of weak-ref visiting affects the final tagging state. Since we have
1808 // the mutator_lock_ and gc-paused throughout this whole process no threads should be able to see
1809 // the interval where the objects are not tagged.
1810 struct NewTagValue {
1811 public:
1812 ObjectPtr obsolete_obj_;
1813 jlong obsolete_tag_;
1814 ObjectPtr new_obj_;
1815 jlong new_tag_;
1816 };
1817
1818 // Map from the environment to the list of <obsolete_tag, new_tag> pairs that were changed.
1819 std::unordered_map<ArtJvmTiEnv*, std::vector<NewTagValue>> changed_tags;
1820 event_handler->ForEachEnv(self, [&](ArtJvmTiEnv* env) {
1821 // Cannot have REQUIRES(art::Locks::mutator_lock_) since ForEachEnv doesn't require it.
1822 art::Locks::mutator_lock_->AssertExclusiveHeld(self);
1823 env->object_tag_table->Lock();
1824 // Get the tags and clear them (so we don't need to special-case the normal weak-ref visitor)
1825 for (auto it : map) {
1826 jlong new_tag = 0;
1827 jlong obsolete_tag = 0;
1828 bool had_obsolete_tag = env->object_tag_table->RemoveLocked(it.first, &obsolete_tag);
1829 bool had_new_tag = env->object_tag_table->RemoveLocked(it.second, &new_tag);
1830 // Dispatch event.
1831 if (had_obsolete_tag || had_new_tag) {
1832 event_handler->DispatchEventOnEnv<ArtJvmtiEvent::kObsoleteObjectCreated>(
1833 env, self, &obsolete_tag, &new_tag);
1834 changed_tags.try_emplace(env).first->second.push_back(
1835 { it.first, obsolete_tag, it.second, new_tag });
1836 }
1837 }
1838 // After weak-ref update we need to go back and re-add obsoletes. We wait to avoid having to
1839 // deal with the visit-weaks overwriting the initial new_obj_ptr tag and generally making things
1840 // difficult.
1841 env->object_tag_table->Unlock();
1842 });
1843 // Handle weak-refs.
1844 struct ReplaceWeaksVisitor : public art::IsMarkedVisitor {
1845 public:
1846 ReplaceWeaksVisitor(const ObjectMap& map) : map_(map) {}
1847
1848 art::mirror::Object* IsMarked(art::mirror::Object* obj)
1849 REQUIRES_SHARED(art::Locks::mutator_lock_) {
1850 auto it = map_.find(obj);
1851 if (it != map_.end()) {
1852 return it->second.Ptr();
1853 } else {
1854 return obj;
1855 }
1856 }
1857
1858 private:
1859 const ObjectMap& map_;
1860 };
1861 ReplaceWeaksVisitor rwv(map);
1862 art::Runtime* runtime = art::Runtime::Current();
1863 runtime->SweepSystemWeaks(&rwv);
1864 runtime->GetThreadList()->SweepInterpreterCaches(&rwv);
1865 // Re-add the object tags. At this point all weak-references to the old_obj_ptr are gone.
1866 event_handler->ForEachEnv(self, [&](ArtJvmTiEnv* env) {
1867 // Cannot have REQUIRES(art::Locks::mutator_lock_) since ForEachEnv doesn't require it.
1868 art::Locks::mutator_lock_->AssertExclusiveHeld(self);
1869 env->object_tag_table->Lock();
1870 auto it = changed_tags.find(env);
1871 if (it != changed_tags.end()) {
1872 for (const NewTagValue& v : it->second) {
1873 env->object_tag_table->SetLocked(v.obsolete_obj_, v.obsolete_tag_);
1874 env->object_tag_table->SetLocked(v.new_obj_, v.new_tag_);
1875 }
1876 }
1877 env->object_tag_table->Unlock();
1878 });
1879 }
1880
1881 } // namespace
1882
ReplaceReference(art::Thread * self,art::ObjPtr<art::mirror::Object> old_obj_ptr,art::ObjPtr<art::mirror::Object> new_obj_ptr)1883 void HeapExtensions::ReplaceReference(art::Thread* self,
1884 art::ObjPtr<art::mirror::Object> old_obj_ptr,
1885 art::ObjPtr<art::mirror::Object> new_obj_ptr) {
1886 ObjectMap map { { old_obj_ptr, new_obj_ptr } };
1887 ReplaceReferences(self, map);
1888 }
1889
ReplaceReferences(art::Thread * self,const ObjectMap & map)1890 void HeapExtensions::ReplaceReferences(art::Thread* self, const ObjectMap& map) {
1891 ReplaceObjectReferences(map);
1892 ReplaceStrongRoots(self, map);
1893 ReplaceWeakRoots(self, HeapExtensions::gEventHandler, map);
1894 }
1895
ChangeArraySize(jvmtiEnv * env,jobject arr,jsize new_size)1896 jvmtiError HeapExtensions::ChangeArraySize(jvmtiEnv* env, jobject arr, jsize new_size) {
1897 if (ArtJvmTiEnv::AsArtJvmTiEnv(env)->capabilities.can_tag_objects != 1) {
1898 return ERR(MUST_POSSESS_CAPABILITY);
1899 }
1900 art::Thread* self = art::Thread::Current();
1901 ScopedNoUserCodeSuspension snucs(self);
1902 art::ScopedObjectAccess soa(self);
1903 if (arr == nullptr) {
1904 JVMTI_LOG(INFO, env) << "Cannot resize a null object";
1905 return ERR(NULL_POINTER);
1906 }
1907 art::ObjPtr<art::mirror::Class> klass(soa.Decode<art::mirror::Object>(arr)->GetClass());
1908 if (!klass->IsArrayClass()) {
1909 JVMTI_LOG(INFO, env) << klass->PrettyClass() << " is not an array class!";
1910 return ERR(ILLEGAL_ARGUMENT);
1911 }
1912 if (new_size < 0) {
1913 JVMTI_LOG(INFO, env) << "Cannot resize an array to a negative size";
1914 return ERR(ILLEGAL_ARGUMENT);
1915 }
1916 // Allocate the new copy.
1917 art::StackHandleScope<2> hs(self);
1918 art::Handle<art::mirror::Array> old_arr(hs.NewHandle(soa.Decode<art::mirror::Array>(arr)));
1919 art::MutableHandle<art::mirror::Array> new_arr(hs.NewHandle<art::mirror::Array>(nullptr));
1920 if (klass->IsObjectArrayClass()) {
1921 new_arr.Assign(
1922 art::mirror::ObjectArray<art::mirror::Object>::Alloc(self, old_arr->GetClass(), new_size));
1923 } else {
1924 // NB This also copies the old array but since we aren't suspended we need to do this again to
1925 // catch any concurrent modifications.
1926 new_arr.Assign(art::mirror::Array::CopyOf(old_arr, self, new_size));
1927 }
1928 if (new_arr.IsNull()) {
1929 self->AssertPendingOOMException();
1930 JVMTI_LOG(INFO, env) << "Unable to allocate " << old_arr->GetClass()->PrettyClass()
1931 << " (length: " << new_size << ") due to OOME. Error was: "
1932 << self->GetException()->Dump();
1933 self->ClearException();
1934 return ERR(OUT_OF_MEMORY);
1935 } else {
1936 self->AssertNoPendingException();
1937 }
1938 // Suspend everything.
1939 art::ScopedThreadSuspension sts(self, art::ThreadState::kSuspended);
1940 art::gc::ScopedGCCriticalSection sgccs(
1941 self, art::gc::GcCause::kGcCauseDebugger, art::gc::CollectorType::kCollectorTypeDebugger);
1942 art::ScopedSuspendAll ssa("Resize array!");
1943 // Replace internals.
1944 new_arr->SetLockWord(old_arr->GetLockWord(false), false);
1945 old_arr->SetLockWord(art::LockWord::Default(), false);
1946 // Copy the contents now when everything is suspended.
1947 int32_t size = std::min(old_arr->GetLength(), new_size);
1948 switch (old_arr->GetClass()->GetComponentType()->GetPrimitiveType()) {
1949 case art::Primitive::kPrimBoolean:
1950 new_arr->AsBooleanArray()->Memcpy(0, old_arr->AsBooleanArray(), 0, size);
1951 break;
1952 case art::Primitive::kPrimByte:
1953 new_arr->AsByteArray()->Memcpy(0, old_arr->AsByteArray(), 0, size);
1954 break;
1955 case art::Primitive::kPrimChar:
1956 new_arr->AsCharArray()->Memcpy(0, old_arr->AsCharArray(), 0, size);
1957 break;
1958 case art::Primitive::kPrimShort:
1959 new_arr->AsShortArray()->Memcpy(0, old_arr->AsShortArray(), 0, size);
1960 break;
1961 case art::Primitive::kPrimInt:
1962 new_arr->AsIntArray()->Memcpy(0, old_arr->AsIntArray(), 0, size);
1963 break;
1964 case art::Primitive::kPrimLong:
1965 new_arr->AsLongArray()->Memcpy(0, old_arr->AsLongArray(), 0, size);
1966 break;
1967 case art::Primitive::kPrimFloat:
1968 new_arr->AsFloatArray()->Memcpy(0, old_arr->AsFloatArray(), 0, size);
1969 break;
1970 case art::Primitive::kPrimDouble:
1971 new_arr->AsDoubleArray()->Memcpy(0, old_arr->AsDoubleArray(), 0, size);
1972 break;
1973 case art::Primitive::kPrimNot:
1974 for (int32_t i = 0; i < size; i++) {
1975 new_arr->AsObjectArray<art::mirror::Object>()->Set(
1976 i, old_arr->AsObjectArray<art::mirror::Object>()->Get(i));
1977 }
1978 break;
1979 case art::Primitive::kPrimVoid:
1980 LOG(FATAL) << "void-array is not a legal type!";
1981 UNREACHABLE();
1982 }
1983 // Actually replace all the pointers.
1984 ReplaceReference(self, old_arr.Get(), new_arr.Get());
1985 return OK;
1986 }
1987
Register(EventHandler * eh)1988 void HeapExtensions::Register(EventHandler* eh) {
1989 gEventHandler = eh;
1990 }
1991
1992 } // namespace openjdkjvmti
1993