1 /* Copyright (C) 2016 The Android Open Source Project
2 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
3 *
4 * This file implements interfaces from the file jvmti.h. This implementation
5 * is licensed under the same terms as the file jvmti.h. The
6 * copyright and license information for the file jvmti.h follows.
7 *
8 * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
9 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
10 *
11 * This code is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License version 2 only, as
13 * published by the Free Software Foundation. Oracle designates this
14 * particular file as subject to the "Classpath" exception as provided
15 * by Oracle in the LICENSE file that accompanied this code.
16 *
17 * This code is distributed in the hope that it will be useful, but WITHOUT
18 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 * version 2 for more details (a copy is included in the LICENSE file that
21 * accompanied this code).
22 *
23 * You should have received a copy of the GNU General Public License version
24 * 2 along with this work; if not, write to the Free Software Foundation,
25 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
26 *
27 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
28 * or visit www.oracle.com if you need additional information or have any
29 * questions.
30 */
31
32 #include "events-inl.h"
33
34 #include "art_field-inl.h"
35 #include "art_jvmti.h"
36 #include "art_method-inl.h"
37 #include "base/logging.h"
38 #include "gc/allocation_listener.h"
39 #include "gc/gc_pause_listener.h"
40 #include "gc/heap.h"
41 #include "gc/scoped_gc_critical_section.h"
42 #include "handle_scope-inl.h"
43 #include "instrumentation.h"
44 #include "jni_env_ext-inl.h"
45 #include "jni_internal.h"
46 #include "mirror/class.h"
47 #include "mirror/object-inl.h"
48 #include "nativehelper/ScopedLocalRef.h"
49 #include "runtime.h"
50 #include "scoped_thread_state_change-inl.h"
51 #include "thread-inl.h"
52 #include "thread_list.h"
53 #include "ti_phase.h"
54
55 namespace openjdkjvmti {
56
IsEnabledAnywhere(ArtJvmtiEvent event)57 bool EventMasks::IsEnabledAnywhere(ArtJvmtiEvent event) {
58 return global_event_mask.Test(event) || unioned_thread_event_mask.Test(event);
59 }
60
GetEventMask(art::Thread * thread)61 EventMask& EventMasks::GetEventMask(art::Thread* thread) {
62 if (thread == nullptr) {
63 return global_event_mask;
64 }
65
66 for (auto& pair : thread_event_masks) {
67 const UniqueThread& unique_thread = pair.first;
68 if (unique_thread.first == thread &&
69 unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
70 return pair.second;
71 }
72 }
73
74 // TODO: Remove old UniqueThread with the same pointer, if exists.
75
76 thread_event_masks.emplace_back(UniqueThread(thread, thread->GetTid()), EventMask());
77 return thread_event_masks.back().second;
78 }
79
GetEventMaskOrNull(art::Thread * thread)80 EventMask* EventMasks::GetEventMaskOrNull(art::Thread* thread) {
81 if (thread == nullptr) {
82 return &global_event_mask;
83 }
84
85 for (auto& pair : thread_event_masks) {
86 const UniqueThread& unique_thread = pair.first;
87 if (unique_thread.first == thread &&
88 unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
89 return &pair.second;
90 }
91 }
92
93 return nullptr;
94 }
95
96
EnableEvent(art::Thread * thread,ArtJvmtiEvent event)97 void EventMasks::EnableEvent(art::Thread* thread, ArtJvmtiEvent event) {
98 DCHECK(EventMask::EventIsInRange(event));
99 GetEventMask(thread).Set(event);
100 if (thread != nullptr) {
101 unioned_thread_event_mask.Set(event, true);
102 }
103 }
104
DisableEvent(art::Thread * thread,ArtJvmtiEvent event)105 void EventMasks::DisableEvent(art::Thread* thread, ArtJvmtiEvent event) {
106 DCHECK(EventMask::EventIsInRange(event));
107 GetEventMask(thread).Set(event, false);
108 if (thread != nullptr) {
109 // Regenerate union for the event.
110 bool union_value = false;
111 for (auto& pair : thread_event_masks) {
112 union_value |= pair.second.Test(event);
113 if (union_value) {
114 break;
115 }
116 }
117 unioned_thread_event_mask.Set(event, union_value);
118 }
119 }
120
HandleChangedCapabilities(const jvmtiCapabilities & caps,bool caps_added)121 void EventMasks::HandleChangedCapabilities(const jvmtiCapabilities& caps, bool caps_added) {
122 if (UNLIKELY(caps.can_retransform_classes == 1)) {
123 // If we are giving this env the retransform classes cap we need to switch all events of
124 // NonTransformable to Transformable and vice versa.
125 ArtJvmtiEvent to_remove = caps_added ? ArtJvmtiEvent::kClassFileLoadHookNonRetransformable
126 : ArtJvmtiEvent::kClassFileLoadHookRetransformable;
127 ArtJvmtiEvent to_add = caps_added ? ArtJvmtiEvent::kClassFileLoadHookRetransformable
128 : ArtJvmtiEvent::kClassFileLoadHookNonRetransformable;
129 if (global_event_mask.Test(to_remove)) {
130 CHECK(!global_event_mask.Test(to_add));
131 global_event_mask.Set(to_remove, false);
132 global_event_mask.Set(to_add, true);
133 }
134
135 if (unioned_thread_event_mask.Test(to_remove)) {
136 CHECK(!unioned_thread_event_mask.Test(to_add));
137 unioned_thread_event_mask.Set(to_remove, false);
138 unioned_thread_event_mask.Set(to_add, true);
139 }
140 for (auto thread_mask : thread_event_masks) {
141 if (thread_mask.second.Test(to_remove)) {
142 CHECK(!thread_mask.second.Test(to_add));
143 thread_mask.second.Set(to_remove, false);
144 thread_mask.second.Set(to_add, true);
145 }
146 }
147 }
148 }
149
RegisterArtJvmTiEnv(ArtJvmTiEnv * env)150 void EventHandler::RegisterArtJvmTiEnv(ArtJvmTiEnv* env) {
151 // Since we never shrink this array we might as well try to fill gaps.
152 auto it = std::find(envs.begin(), envs.end(), nullptr);
153 if (it != envs.end()) {
154 *it = env;
155 } else {
156 envs.push_back(env);
157 }
158 }
159
RemoveArtJvmTiEnv(ArtJvmTiEnv * env)160 void EventHandler::RemoveArtJvmTiEnv(ArtJvmTiEnv* env) {
161 // Since we might be currently iterating over the envs list we cannot actually erase elements.
162 // Instead we will simply replace them with 'nullptr' and skip them manually.
163 auto it = std::find(envs.begin(), envs.end(), env);
164 if (it != envs.end()) {
165 *it = nullptr;
166 for (size_t i = static_cast<size_t>(ArtJvmtiEvent::kMinEventTypeVal);
167 i <= static_cast<size_t>(ArtJvmtiEvent::kMaxEventTypeVal);
168 ++i) {
169 RecalculateGlobalEventMask(static_cast<ArtJvmtiEvent>(i));
170 }
171 }
172 }
173
IsThreadControllable(ArtJvmtiEvent event)174 static bool IsThreadControllable(ArtJvmtiEvent event) {
175 switch (event) {
176 case ArtJvmtiEvent::kVmInit:
177 case ArtJvmtiEvent::kVmStart:
178 case ArtJvmtiEvent::kVmDeath:
179 case ArtJvmtiEvent::kThreadStart:
180 case ArtJvmtiEvent::kCompiledMethodLoad:
181 case ArtJvmtiEvent::kCompiledMethodUnload:
182 case ArtJvmtiEvent::kDynamicCodeGenerated:
183 case ArtJvmtiEvent::kDataDumpRequest:
184 return false;
185
186 default:
187 return true;
188 }
189 }
190
191 class JvmtiAllocationListener : public art::gc::AllocationListener {
192 public:
JvmtiAllocationListener(EventHandler * handler)193 explicit JvmtiAllocationListener(EventHandler* handler) : handler_(handler) {}
194
ObjectAllocated(art::Thread * self,art::ObjPtr<art::mirror::Object> * obj,size_t byte_count)195 void ObjectAllocated(art::Thread* self, art::ObjPtr<art::mirror::Object>* obj, size_t byte_count)
196 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
197 DCHECK_EQ(self, art::Thread::Current());
198
199 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kVmObjectAlloc)) {
200 art::StackHandleScope<1> hs(self);
201 auto h = hs.NewHandleWrapper(obj);
202 // jvmtiEventVMObjectAlloc parameters:
203 // jvmtiEnv *jvmti_env,
204 // JNIEnv* jni_env,
205 // jthread thread,
206 // jobject object,
207 // jclass object_klass,
208 // jlong size
209 art::JNIEnvExt* jni_env = self->GetJniEnv();
210
211 jthread thread_peer;
212 if (self->IsStillStarting()) {
213 thread_peer = nullptr;
214 } else {
215 thread_peer = jni_env->AddLocalReference<jthread>(self->GetPeer());
216 }
217
218 ScopedLocalRef<jthread> thread(jni_env, thread_peer);
219 ScopedLocalRef<jobject> object(
220 jni_env, jni_env->AddLocalReference<jobject>(*obj));
221 ScopedLocalRef<jclass> klass(
222 jni_env, jni_env->AddLocalReference<jclass>(obj->Ptr()->GetClass()));
223
224 handler_->DispatchEvent<ArtJvmtiEvent::kVmObjectAlloc>(self,
225 reinterpret_cast<JNIEnv*>(jni_env),
226 thread.get(),
227 object.get(),
228 klass.get(),
229 static_cast<jlong>(byte_count));
230 }
231 }
232
233 private:
234 EventHandler* handler_;
235 };
236
SetupObjectAllocationTracking(art::gc::AllocationListener * listener,bool enable)237 static void SetupObjectAllocationTracking(art::gc::AllocationListener* listener, bool enable) {
238 // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
239 // now, do a workaround: (possibly) acquire and release.
240 art::ScopedObjectAccess soa(art::Thread::Current());
241 art::ScopedThreadSuspension sts(soa.Self(), art::ThreadState::kSuspended);
242 if (enable) {
243 art::Runtime::Current()->GetHeap()->SetAllocationListener(listener);
244 } else {
245 art::Runtime::Current()->GetHeap()->RemoveAllocationListener();
246 }
247 }
248
249 // Report GC pauses (see spec) as GARBAGE_COLLECTION_START and GARBAGE_COLLECTION_END.
250 class JvmtiGcPauseListener : public art::gc::GcPauseListener {
251 public:
JvmtiGcPauseListener(EventHandler * handler)252 explicit JvmtiGcPauseListener(EventHandler* handler)
253 : handler_(handler),
254 start_enabled_(false),
255 finish_enabled_(false) {}
256
StartPause()257 void StartPause() OVERRIDE {
258 handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionStart>(nullptr);
259 }
260
EndPause()261 void EndPause() OVERRIDE {
262 handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionFinish>(nullptr);
263 }
264
IsEnabled()265 bool IsEnabled() {
266 return start_enabled_ || finish_enabled_;
267 }
268
SetStartEnabled(bool e)269 void SetStartEnabled(bool e) {
270 start_enabled_ = e;
271 }
272
SetFinishEnabled(bool e)273 void SetFinishEnabled(bool e) {
274 finish_enabled_ = e;
275 }
276
277 private:
278 EventHandler* handler_;
279 bool start_enabled_;
280 bool finish_enabled_;
281 };
282
SetupGcPauseTracking(JvmtiGcPauseListener * listener,ArtJvmtiEvent event,bool enable)283 static void SetupGcPauseTracking(JvmtiGcPauseListener* listener, ArtJvmtiEvent event, bool enable) {
284 bool old_state = listener->IsEnabled();
285
286 if (event == ArtJvmtiEvent::kGarbageCollectionStart) {
287 listener->SetStartEnabled(enable);
288 } else {
289 listener->SetFinishEnabled(enable);
290 }
291
292 bool new_state = listener->IsEnabled();
293
294 if (old_state != new_state) {
295 if (new_state) {
296 art::Runtime::Current()->GetHeap()->SetGcPauseListener(listener);
297 } else {
298 art::Runtime::Current()->GetHeap()->RemoveGcPauseListener();
299 }
300 }
301 }
302
303 template<typename Type>
AddLocalRef(art::JNIEnvExt * e,art::mirror::Object * obj)304 static Type AddLocalRef(art::JNIEnvExt* e, art::mirror::Object* obj)
305 REQUIRES_SHARED(art::Locks::mutator_lock_) {
306 return (obj == nullptr) ? nullptr : e->AddLocalReference<Type>(obj);
307 }
308
309 class JvmtiMethodTraceListener FINAL : public art::instrumentation::InstrumentationListener {
310 public:
JvmtiMethodTraceListener(EventHandler * handler)311 explicit JvmtiMethodTraceListener(EventHandler* handler) : event_handler_(handler) {}
312
313 template<ArtJvmtiEvent kEvent, typename ...Args>
RunEventCallback(art::Thread * self,art::JNIEnvExt * jnienv,Args...args)314 void RunEventCallback(art::Thread* self, art::JNIEnvExt* jnienv, Args... args)
315 REQUIRES_SHARED(art::Locks::mutator_lock_) {
316 ScopedLocalRef<jthread> thread_jni(jnienv, AddLocalRef<jthread>(jnienv, self->GetPeer()));
317 // Just give the event a good sized JNI frame. 100 should be fine.
318 jnienv->PushFrame(100);
319 {
320 // Need to do trampoline! :(
321 art::ScopedThreadSuspension sts(self, art::ThreadState::kNative);
322 event_handler_->DispatchEvent<kEvent>(self,
323 static_cast<JNIEnv*>(jnienv),
324 thread_jni.get(),
325 args...);
326 }
327 jnienv->PopFrame();
328 }
329
330 // Call-back for when a method is entered.
MethodEntered(art::Thread * self,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * method,uint32_t dex_pc ATTRIBUTE_UNUSED)331 void MethodEntered(art::Thread* self,
332 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
333 art::ArtMethod* method,
334 uint32_t dex_pc ATTRIBUTE_UNUSED)
335 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
336 if (!method->IsRuntimeMethod() &&
337 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodEntry)) {
338 art::JNIEnvExt* jnienv = self->GetJniEnv();
339 RunEventCallback<ArtJvmtiEvent::kMethodEntry>(self,
340 jnienv,
341 art::jni::EncodeArtMethod(method));
342 }
343 }
344
345 // Callback for when a method is exited with a reference return value.
MethodExited(art::Thread * self,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * method,uint32_t dex_pc ATTRIBUTE_UNUSED,art::Handle<art::mirror::Object> return_value)346 void MethodExited(art::Thread* self,
347 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
348 art::ArtMethod* method,
349 uint32_t dex_pc ATTRIBUTE_UNUSED,
350 art::Handle<art::mirror::Object> return_value)
351 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
352 if (!method->IsRuntimeMethod() &&
353 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
354 DCHECK_EQ(method->GetReturnTypePrimitive(), art::Primitive::kPrimNot)
355 << method->PrettyMethod();
356 DCHECK(!self->IsExceptionPending());
357 jvalue val;
358 art::JNIEnvExt* jnienv = self->GetJniEnv();
359 ScopedLocalRef<jobject> return_jobj(jnienv, AddLocalRef<jobject>(jnienv, return_value.Get()));
360 val.l = return_jobj.get();
361 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
362 self,
363 jnienv,
364 art::jni::EncodeArtMethod(method),
365 /*was_popped_by_exception*/ static_cast<jboolean>(JNI_FALSE),
366 val);
367 }
368 }
369
370 // Call-back for when a method is exited.
MethodExited(art::Thread * self,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * method,uint32_t dex_pc ATTRIBUTE_UNUSED,const art::JValue & return_value)371 void MethodExited(art::Thread* self,
372 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
373 art::ArtMethod* method,
374 uint32_t dex_pc ATTRIBUTE_UNUSED,
375 const art::JValue& return_value)
376 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
377 if (!method->IsRuntimeMethod() &&
378 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
379 DCHECK_NE(method->GetReturnTypePrimitive(), art::Primitive::kPrimNot)
380 << method->PrettyMethod();
381 DCHECK(!self->IsExceptionPending());
382 jvalue val;
383 art::JNIEnvExt* jnienv = self->GetJniEnv();
384 // 64bit integer is the largest value in the union so we should be fine simply copying it into
385 // the union.
386 val.j = return_value.GetJ();
387 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
388 self,
389 jnienv,
390 art::jni::EncodeArtMethod(method),
391 /*was_popped_by_exception*/ static_cast<jboolean>(JNI_FALSE),
392 val);
393 }
394 }
395
396 // Call-back for when a method is popped due to an exception throw. A method will either cause a
397 // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
MethodUnwind(art::Thread * self,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * method,uint32_t dex_pc ATTRIBUTE_UNUSED)398 void MethodUnwind(art::Thread* self,
399 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
400 art::ArtMethod* method,
401 uint32_t dex_pc ATTRIBUTE_UNUSED)
402 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
403 if (!method->IsRuntimeMethod() &&
404 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
405 jvalue val;
406 // Just set this to 0xffffffffffffffff so it's not uninitialized.
407 val.j = static_cast<jlong>(-1);
408 art::JNIEnvExt* jnienv = self->GetJniEnv();
409 art::StackHandleScope<1> hs(self);
410 art::Handle<art::mirror::Throwable> old_exception(hs.NewHandle(self->GetException()));
411 CHECK(!old_exception.IsNull());
412 self->ClearException();
413 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
414 self,
415 jnienv,
416 art::jni::EncodeArtMethod(method),
417 /*was_popped_by_exception*/ static_cast<jboolean>(JNI_TRUE),
418 val);
419 // Match RI behavior of just throwing away original exception if a new one is thrown.
420 if (LIKELY(!self->IsExceptionPending())) {
421 self->SetException(old_exception.Get());
422 }
423 }
424 }
425
426 // Call-back for when the dex pc moves in a method.
DexPcMoved(art::Thread * self,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * method,uint32_t new_dex_pc)427 void DexPcMoved(art::Thread* self,
428 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
429 art::ArtMethod* method,
430 uint32_t new_dex_pc)
431 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
432 DCHECK(!method->IsRuntimeMethod());
433 // Default methods might be copied to multiple classes. We need to get the canonical version of
434 // this method so that we can check for breakpoints correctly.
435 // TODO We should maybe do this on other events to ensure that we are consistent WRT default
436 // methods. This could interact with obsolete methods if we ever let interface redefinition
437 // happen though.
438 method = method->GetCanonicalMethod();
439 art::JNIEnvExt* jnienv = self->GetJniEnv();
440 jmethodID jmethod = art::jni::EncodeArtMethod(method);
441 jlocation location = static_cast<jlocation>(new_dex_pc);
442 // Step event is reported first according to the spec.
443 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kSingleStep)) {
444 RunEventCallback<ArtJvmtiEvent::kSingleStep>(self, jnienv, jmethod, location);
445 }
446 // Next we do the Breakpoint events. The Dispatch code will filter the individual
447 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kBreakpoint)) {
448 RunEventCallback<ArtJvmtiEvent::kBreakpoint>(self, jnienv, jmethod, location);
449 }
450 }
451
452 // Call-back for when we read from a field.
FieldRead(art::Thread * self,art::Handle<art::mirror::Object> this_object,art::ArtMethod * method,uint32_t dex_pc,art::ArtField * field)453 void FieldRead(art::Thread* self,
454 art::Handle<art::mirror::Object> this_object,
455 art::ArtMethod* method,
456 uint32_t dex_pc,
457 art::ArtField* field)
458 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
459 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldAccess)) {
460 art::JNIEnvExt* jnienv = self->GetJniEnv();
461 // DCHECK(!self->IsExceptionPending());
462 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
463 ScopedLocalRef<jobject> fklass(jnienv,
464 AddLocalRef<jobject>(jnienv,
465 field->GetDeclaringClass().Ptr()));
466 RunEventCallback<ArtJvmtiEvent::kFieldAccess>(self,
467 jnienv,
468 art::jni::EncodeArtMethod(method),
469 static_cast<jlocation>(dex_pc),
470 static_cast<jclass>(fklass.get()),
471 this_ref.get(),
472 art::jni::EncodeArtField(field));
473 }
474 }
475
FieldWritten(art::Thread * self,art::Handle<art::mirror::Object> this_object,art::ArtMethod * method,uint32_t dex_pc,art::ArtField * field,art::Handle<art::mirror::Object> new_val)476 void FieldWritten(art::Thread* self,
477 art::Handle<art::mirror::Object> this_object,
478 art::ArtMethod* method,
479 uint32_t dex_pc,
480 art::ArtField* field,
481 art::Handle<art::mirror::Object> new_val)
482 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
483 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
484 art::JNIEnvExt* jnienv = self->GetJniEnv();
485 // DCHECK(!self->IsExceptionPending());
486 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
487 ScopedLocalRef<jobject> fklass(jnienv,
488 AddLocalRef<jobject>(jnienv,
489 field->GetDeclaringClass().Ptr()));
490 ScopedLocalRef<jobject> fval(jnienv, AddLocalRef<jobject>(jnienv, new_val.Get()));
491 jvalue val;
492 val.l = fval.get();
493 RunEventCallback<ArtJvmtiEvent::kFieldModification>(
494 self,
495 jnienv,
496 art::jni::EncodeArtMethod(method),
497 static_cast<jlocation>(dex_pc),
498 static_cast<jclass>(fklass.get()),
499 field->IsStatic() ? nullptr : this_ref.get(),
500 art::jni::EncodeArtField(field),
501 'L', // type_char
502 val);
503 }
504 }
505
506 // Call-back for when we write into a field.
FieldWritten(art::Thread * self,art::Handle<art::mirror::Object> this_object,art::ArtMethod * method,uint32_t dex_pc,art::ArtField * field,const art::JValue & field_value)507 void FieldWritten(art::Thread* self,
508 art::Handle<art::mirror::Object> this_object,
509 art::ArtMethod* method,
510 uint32_t dex_pc,
511 art::ArtField* field,
512 const art::JValue& field_value)
513 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
514 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
515 art::JNIEnvExt* jnienv = self->GetJniEnv();
516 DCHECK(!self->IsExceptionPending());
517 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
518 ScopedLocalRef<jobject> fklass(jnienv,
519 AddLocalRef<jobject>(jnienv,
520 field->GetDeclaringClass().Ptr()));
521 char type_char = art::Primitive::Descriptor(field->GetTypeAsPrimitiveType())[0];
522 jvalue val;
523 // 64bit integer is the largest value in the union so we should be fine simply copying it into
524 // the union.
525 val.j = field_value.GetJ();
526 RunEventCallback<ArtJvmtiEvent::kFieldModification>(
527 self,
528 jnienv,
529 art::jni::EncodeArtMethod(method),
530 static_cast<jlocation>(dex_pc),
531 static_cast<jclass>(fklass.get()),
532 field->IsStatic() ? nullptr : this_ref.get(), // nb static field modification get given
533 // the class as this_object for some
534 // reason.
535 art::jni::EncodeArtField(field),
536 type_char,
537 val);
538 }
539 }
540
541 // Call-back when an exception is caught.
ExceptionCaught(art::Thread * self ATTRIBUTE_UNUSED,art::Handle<art::mirror::Throwable> exception_object ATTRIBUTE_UNUSED)542 void ExceptionCaught(art::Thread* self ATTRIBUTE_UNUSED,
543 art::Handle<art::mirror::Throwable> exception_object ATTRIBUTE_UNUSED)
544 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
545 return;
546 }
547
548 // Call-back for when we execute a branch.
Branch(art::Thread * self ATTRIBUTE_UNUSED,art::ArtMethod * method ATTRIBUTE_UNUSED,uint32_t dex_pc ATTRIBUTE_UNUSED,int32_t dex_pc_offset ATTRIBUTE_UNUSED)549 void Branch(art::Thread* self ATTRIBUTE_UNUSED,
550 art::ArtMethod* method ATTRIBUTE_UNUSED,
551 uint32_t dex_pc ATTRIBUTE_UNUSED,
552 int32_t dex_pc_offset ATTRIBUTE_UNUSED)
553 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
554 return;
555 }
556
557 // Call-back for when we get an invokevirtual or an invokeinterface.
InvokeVirtualOrInterface(art::Thread * self ATTRIBUTE_UNUSED,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * caller ATTRIBUTE_UNUSED,uint32_t dex_pc ATTRIBUTE_UNUSED,art::ArtMethod * callee ATTRIBUTE_UNUSED)558 void InvokeVirtualOrInterface(art::Thread* self ATTRIBUTE_UNUSED,
559 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
560 art::ArtMethod* caller ATTRIBUTE_UNUSED,
561 uint32_t dex_pc ATTRIBUTE_UNUSED,
562 art::ArtMethod* callee ATTRIBUTE_UNUSED)
563 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
564 return;
565 }
566
567 private:
568 EventHandler* const event_handler_;
569 };
570
GetInstrumentationEventsFor(ArtJvmtiEvent event)571 static uint32_t GetInstrumentationEventsFor(ArtJvmtiEvent event) {
572 switch (event) {
573 case ArtJvmtiEvent::kMethodEntry:
574 return art::instrumentation::Instrumentation::kMethodEntered;
575 case ArtJvmtiEvent::kMethodExit:
576 return art::instrumentation::Instrumentation::kMethodExited |
577 art::instrumentation::Instrumentation::kMethodUnwind;
578 case ArtJvmtiEvent::kFieldModification:
579 return art::instrumentation::Instrumentation::kFieldWritten;
580 case ArtJvmtiEvent::kFieldAccess:
581 return art::instrumentation::Instrumentation::kFieldRead;
582 case ArtJvmtiEvent::kBreakpoint:
583 case ArtJvmtiEvent::kSingleStep:
584 return art::instrumentation::Instrumentation::kDexPcMoved;
585 default:
586 LOG(FATAL) << "Unknown event ";
587 return 0;
588 }
589 }
590
SetupTraceListener(JvmtiMethodTraceListener * listener,ArtJvmtiEvent event,bool enable)591 static void SetupTraceListener(JvmtiMethodTraceListener* listener,
592 ArtJvmtiEvent event,
593 bool enable) {
594 art::ScopedThreadStateChange stsc(art::Thread::Current(), art::ThreadState::kNative);
595 uint32_t new_events = GetInstrumentationEventsFor(event);
596 art::instrumentation::Instrumentation* instr = art::Runtime::Current()->GetInstrumentation();
597 art::gc::ScopedGCCriticalSection gcs(art::Thread::Current(),
598 art::gc::kGcCauseInstrumentation,
599 art::gc::kCollectorTypeInstrumentation);
600 art::ScopedSuspendAll ssa("jvmti method tracing installation");
601 if (enable) {
602 // TODO Depending on the features being used we should be able to avoid deoptimizing everything
603 // like we do here.
604 if (!instr->AreAllMethodsDeoptimized()) {
605 instr->EnableMethodTracing("jvmti-tracing", /*needs_interpreter*/true);
606 }
607 instr->AddListener(listener, new_events);
608 } else {
609 instr->RemoveListener(listener, new_events);
610 }
611 }
612
613 // Handle special work for the given event type, if necessary.
HandleEventType(ArtJvmtiEvent event,bool enable)614 void EventHandler::HandleEventType(ArtJvmtiEvent event, bool enable) {
615 switch (event) {
616 case ArtJvmtiEvent::kVmObjectAlloc:
617 SetupObjectAllocationTracking(alloc_listener_.get(), enable);
618 return;
619
620 case ArtJvmtiEvent::kGarbageCollectionStart:
621 case ArtJvmtiEvent::kGarbageCollectionFinish:
622 SetupGcPauseTracking(gc_pause_listener_.get(), event, enable);
623 return;
624
625 case ArtJvmtiEvent::kBreakpoint:
626 case ArtJvmtiEvent::kSingleStep: {
627 ArtJvmtiEvent other = (event == ArtJvmtiEvent::kBreakpoint) ? ArtJvmtiEvent::kSingleStep
628 : ArtJvmtiEvent::kBreakpoint;
629 // We only need to do anything if there isn't already a listener installed/held-on by the
630 // other jvmti event that uses DexPcMoved.
631 if (!IsEventEnabledAnywhere(other)) {
632 SetupTraceListener(method_trace_listener_.get(), event, enable);
633 }
634 return;
635 }
636 case ArtJvmtiEvent::kMethodEntry:
637 case ArtJvmtiEvent::kMethodExit:
638 case ArtJvmtiEvent::kFieldAccess:
639 case ArtJvmtiEvent::kFieldModification:
640 SetupTraceListener(method_trace_listener_.get(), event, enable);
641 return;
642
643 default:
644 break;
645 }
646 }
647
648 // Checks to see if the env has the capabilities associated with the given event.
HasAssociatedCapability(ArtJvmTiEnv * env,ArtJvmtiEvent event)649 static bool HasAssociatedCapability(ArtJvmTiEnv* env,
650 ArtJvmtiEvent event) {
651 jvmtiCapabilities caps = env->capabilities;
652 switch (event) {
653 case ArtJvmtiEvent::kBreakpoint:
654 return caps.can_generate_breakpoint_events == 1;
655
656 case ArtJvmtiEvent::kCompiledMethodLoad:
657 case ArtJvmtiEvent::kCompiledMethodUnload:
658 return caps.can_generate_compiled_method_load_events == 1;
659
660 case ArtJvmtiEvent::kException:
661 case ArtJvmtiEvent::kExceptionCatch:
662 return caps.can_generate_exception_events == 1;
663
664 case ArtJvmtiEvent::kFieldAccess:
665 return caps.can_generate_field_access_events == 1;
666
667 case ArtJvmtiEvent::kFieldModification:
668 return caps.can_generate_field_modification_events == 1;
669
670 case ArtJvmtiEvent::kFramePop:
671 return caps.can_generate_frame_pop_events == 1;
672
673 case ArtJvmtiEvent::kGarbageCollectionStart:
674 case ArtJvmtiEvent::kGarbageCollectionFinish:
675 return caps.can_generate_garbage_collection_events == 1;
676
677 case ArtJvmtiEvent::kMethodEntry:
678 return caps.can_generate_method_entry_events == 1;
679
680 case ArtJvmtiEvent::kMethodExit:
681 return caps.can_generate_method_exit_events == 1;
682
683 case ArtJvmtiEvent::kMonitorContendedEnter:
684 case ArtJvmtiEvent::kMonitorContendedEntered:
685 case ArtJvmtiEvent::kMonitorWait:
686 case ArtJvmtiEvent::kMonitorWaited:
687 return caps.can_generate_monitor_events == 1;
688
689 case ArtJvmtiEvent::kNativeMethodBind:
690 return caps.can_generate_native_method_bind_events == 1;
691
692 case ArtJvmtiEvent::kObjectFree:
693 return caps.can_generate_object_free_events == 1;
694
695 case ArtJvmtiEvent::kSingleStep:
696 return caps.can_generate_single_step_events == 1;
697
698 case ArtJvmtiEvent::kVmObjectAlloc:
699 return caps.can_generate_vm_object_alloc_events == 1;
700
701 default:
702 return true;
703 }
704 }
705
SetEvent(ArtJvmTiEnv * env,art::Thread * thread,ArtJvmtiEvent event,jvmtiEventMode mode)706 jvmtiError EventHandler::SetEvent(ArtJvmTiEnv* env,
707 art::Thread* thread,
708 ArtJvmtiEvent event,
709 jvmtiEventMode mode) {
710 if (thread != nullptr) {
711 art::ThreadState state = thread->GetState();
712 if (state == art::ThreadState::kStarting ||
713 state == art::ThreadState::kTerminated ||
714 thread->IsStillStarting()) {
715 return ERR(THREAD_NOT_ALIVE);
716 }
717 if (!IsThreadControllable(event)) {
718 return ERR(ILLEGAL_ARGUMENT);
719 }
720 }
721
722 if (mode != JVMTI_ENABLE && mode != JVMTI_DISABLE) {
723 return ERR(ILLEGAL_ARGUMENT);
724 }
725
726 if (!EventMask::EventIsInRange(event)) {
727 return ERR(INVALID_EVENT_TYPE);
728 }
729
730 if (!HasAssociatedCapability(env, event)) {
731 return ERR(MUST_POSSESS_CAPABILITY);
732 }
733
734 bool old_state = global_mask.Test(event);
735
736 if (mode == JVMTI_ENABLE) {
737 env->event_masks.EnableEvent(thread, event);
738 global_mask.Set(event);
739 } else {
740 DCHECK_EQ(mode, JVMTI_DISABLE);
741
742 env->event_masks.DisableEvent(thread, event);
743 RecalculateGlobalEventMask(event);
744 }
745
746 bool new_state = global_mask.Test(event);
747
748 // Handle any special work required for the event type.
749 if (new_state != old_state) {
750 HandleEventType(event, mode == JVMTI_ENABLE);
751 }
752
753 return ERR(NONE);
754 }
755
Shutdown()756 void EventHandler::Shutdown() {
757 // Need to remove the method_trace_listener_ if it's there.
758 art::Thread* self = art::Thread::Current();
759 art::gc::ScopedGCCriticalSection gcs(self,
760 art::gc::kGcCauseInstrumentation,
761 art::gc::kCollectorTypeInstrumentation);
762 art::ScopedSuspendAll ssa("jvmti method tracing uninstallation");
763 // Just remove every possible event.
764 art::Runtime::Current()->GetInstrumentation()->RemoveListener(method_trace_listener_.get(), ~0);
765 }
766
EventHandler()767 EventHandler::EventHandler() {
768 alloc_listener_.reset(new JvmtiAllocationListener(this));
769 gc_pause_listener_.reset(new JvmtiGcPauseListener(this));
770 method_trace_listener_.reset(new JvmtiMethodTraceListener(this));
771 }
772
~EventHandler()773 EventHandler::~EventHandler() {
774 }
775
776 } // namespace openjdkjvmti
777