• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "instrumentation-inl.h"
18 
19 #include <functional>
20 #include <optional>
21 #include <sstream>
22 
23 #include <android-base/logging.h>
24 
25 #include "arch/context.h"
26 #include "art_field-inl.h"
27 #include "art_method-inl.h"
28 #include "base/atomic.h"
29 #include "base/callee_save_type.h"
30 #include "class_linker.h"
31 #include "debugger.h"
32 #include "dex/dex_file-inl.h"
33 #include "dex/dex_file_types.h"
34 #include "dex/dex_instruction-inl.h"
35 #include "entrypoints/quick/quick_alloc_entrypoints.h"
36 #include "entrypoints/quick/quick_entrypoints.h"
37 #include "entrypoints/quick/runtime_entrypoints_list.h"
38 #include "entrypoints/runtime_asm_entrypoints.h"
39 #include "gc_root-inl.h"
40 #include "interpreter/interpreter.h"
41 #include "interpreter/interpreter_common.h"
42 #include "jit/jit.h"
43 #include "jit/jit_code_cache.h"
44 #include "jvalue-inl.h"
45 #include "jvalue.h"
46 #include "mirror/class-inl.h"
47 #include "mirror/dex_cache.h"
48 #include "mirror/object-inl.h"
49 #include "mirror/object_array-inl.h"
50 #include "nterp_helpers.h"
51 #include "nth_caller_visitor.h"
52 #include "oat/oat_file_manager.h"
53 #include "oat/oat_quick_method_header.h"
54 #include "runtime-inl.h"
55 #include "thread.h"
56 #include "thread_list.h"
57 
58 namespace art HIDDEN {
59 
60 namespace instrumentation {
61 
62 constexpr bool kVerboseInstrumentation = false;
63 
MethodExited(Thread * thread,ArtMethod * method,OptionalFrame frame,MutableHandle<mirror::Object> & return_value)64 void InstrumentationListener::MethodExited(
65     Thread* thread,
66     ArtMethod* method,
67     OptionalFrame frame,
68     MutableHandle<mirror::Object>& return_value) {
69   DCHECK_EQ(method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive(),
70             Primitive::kPrimNot);
71   const void* original_ret = return_value.Get();
72   JValue v;
73   v.SetL(return_value.Get());
74   MethodExited(thread, method, frame, v);
75   DCHECK(original_ret == v.GetL()) << "Return value changed";
76 }
77 
FieldWritten(Thread * thread,Handle<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,Handle<mirror::Object> field_value)78 void InstrumentationListener::FieldWritten(Thread* thread,
79                                            Handle<mirror::Object> this_object,
80                                            ArtMethod* method,
81                                            uint32_t dex_pc,
82                                            ArtField* field,
83                                            Handle<mirror::Object> field_value) {
84   DCHECK(!field->IsPrimitiveType());
85   JValue v;
86   v.SetL(field_value.Get());
87   FieldWritten(thread, this_object, method, dex_pc, field, v);
88 }
89 
90 // Instrumentation works on non-inlined frames by updating returned PCs
91 // of compiled frames.
92 static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
93     StackVisitor::StackWalkKind::kSkipInlinedFrames;
94 
95 class InstallStubsClassVisitor : public ClassVisitor {
96  public:
InstallStubsClassVisitor(Instrumentation * instrumentation)97   explicit InstallStubsClassVisitor(Instrumentation* instrumentation)
98       : instrumentation_(instrumentation) {}
99 
operator ()(ObjPtr<mirror::Class> klass)100   bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES(Locks::mutator_lock_) {
101     instrumentation_->InstallStubsForClass(klass.Ptr());
102     return true;  // we visit all classes.
103   }
104 
105  private:
106   Instrumentation* const instrumentation_;
107 };
108 
Instrumentation()109 Instrumentation::Instrumentation()
110     : run_exit_hooks_(false),
111       instrumentation_level_(InstrumentationLevel::kInstrumentNothing),
112       forced_interpret_only_(false),
113       have_method_entry_listeners_(0),
114       have_method_exit_listeners_(0),
115       have_method_unwind_listeners_(false),
116       have_dex_pc_listeners_(false),
117       have_field_read_listeners_(false),
118       have_field_write_listeners_(false),
119       have_exception_thrown_listeners_(false),
120       have_watched_frame_pop_listeners_(false),
121       have_branch_listeners_(false),
122       have_exception_handled_listeners_(false),
123       quick_alloc_entry_points_instrumentation_counter_(0),
124       alloc_entrypoints_instrumented_(false) {}
125 
ProcessMethodUnwindCallbacks(Thread * self,std::queue<ArtMethod * > & methods,MutableHandle<mirror::Throwable> & exception)126 bool Instrumentation::ProcessMethodUnwindCallbacks(Thread* self,
127                                                    std::queue<ArtMethod*>& methods,
128                                                    MutableHandle<mirror::Throwable>& exception) {
129   DCHECK(!self->IsExceptionPending());
130   if (!HasMethodUnwindListeners()) {
131     return true;
132   }
133   if (kVerboseInstrumentation) {
134     LOG(INFO) << "Popping frames for exception " << exception->Dump();
135   }
136   // The instrumentation events expect the exception to be set.
137   self->SetException(exception.Get());
138   bool new_exception_thrown = false;
139 
140   // Process callbacks for all methods that would be unwound until a new exception is thrown.
141   while (!methods.empty()) {
142     ArtMethod* method = methods.front();
143     methods.pop();
144     if (kVerboseInstrumentation) {
145       LOG(INFO) << "Popping for unwind " << method->PrettyMethod();
146     }
147 
148     if (method->IsRuntimeMethod()) {
149       continue;
150     }
151 
152     // Notify listeners of method unwind.
153     // TODO: improve the dex_pc information here.
154     uint32_t dex_pc = dex::kDexNoIndex;
155     MethodUnwindEvent(self, method, dex_pc);
156     new_exception_thrown = self->GetException() != exception.Get();
157     if (new_exception_thrown) {
158       break;
159     }
160   }
161 
162   exception.Assign(self->GetException());
163   self->ClearException();
164   if (kVerboseInstrumentation && new_exception_thrown) {
165     LOG(INFO) << "Did partial pop of frames due to new exception";
166   }
167   return !new_exception_thrown;
168 }
169 
InstallStubsForClass(ObjPtr<mirror::Class> klass)170 void Instrumentation::InstallStubsForClass(ObjPtr<mirror::Class> klass) {
171   if (!klass->IsResolved()) {
172     // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
173     // could not be initialized or linked with regards to class inheritance.
174   } else if (klass->IsErroneousResolved()) {
175     // We can't execute code in a erroneous class: do nothing.
176   } else {
177     for (ArtMethod& method : klass->GetMethods(kRuntimePointerSize)) {
178       InstallStubsForMethod(&method);
179     }
180   }
181 }
182 
CanHandleInitializationCheck(const void * code)183 static bool CanHandleInitializationCheck(const void* code) {
184   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
185   return class_linker->IsQuickResolutionStub(code) ||
186          class_linker->IsQuickToInterpreterBridge(code) ||
187          class_linker->IsQuickGenericJniStub(code) ||
188          (code == interpreter::GetNterpWithClinitEntryPoint());
189 }
190 
IsProxyInit(ArtMethod * method)191 static bool IsProxyInit(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
192   // Annoyingly this can be called before we have actually initialized WellKnownClasses so therefore
193   // we also need to check this based on the declaring-class descriptor. The check is valid because
194   // Proxy only has a single constructor.
195   ArtMethod* well_known_proxy_init = WellKnownClasses::java_lang_reflect_Proxy_init;
196   if (well_known_proxy_init == method) {
197     return true;
198   }
199 
200   if (well_known_proxy_init != nullptr) {
201     return false;
202   }
203 
204   return method->IsConstructor() && !method->IsStatic() &&
205       method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;");
206 }
207 
208 // Returns true if we need entry exit stub to call entry hooks. JITed code
209 // directly call entry / exit hooks and don't need the stub.
CodeSupportsEntryExitHooks(const void * entry_point,ArtMethod * method)210 static bool CodeSupportsEntryExitHooks(const void* entry_point, ArtMethod* method)
211     REQUIRES_SHARED(Locks::mutator_lock_) {
212   // Proxy.init should always run with the switch interpreter where entry / exit hooks are
213   // supported.
214   if (IsProxyInit(method)) {
215     return true;
216   }
217 
218   // In some tests runtime isn't setup fully and hence the entry points could be nullptr.
219   // just be conservative and return false here.
220   if (entry_point == nullptr) {
221     return false;
222   }
223 
224   ClassLinker* linker = Runtime::Current()->GetClassLinker();
225   // Interpreter supports entry / exit hooks. Resolution stubs fetch code that supports entry / exit
226   // hooks when required. So return true for both cases.
227   if (linker->IsQuickToInterpreterBridge(entry_point) ||
228       linker->IsQuickResolutionStub(entry_point)) {
229     return true;
230   }
231 
232   // When jiting code for debuggable runtimes / instrumentation is active  we generate the code to
233   // call method entry / exit hooks when required.
234   jit::Jit* jit = Runtime::Current()->GetJit();
235   if (jit != nullptr && jit->GetCodeCache()->ContainsPc(entry_point)) {
236     // If JITed code was compiled with instrumentation support we support entry / exit hooks.
237     OatQuickMethodHeader* header = OatQuickMethodHeader::FromEntryPoint(entry_point);
238     return CodeInfo::IsDebuggable(header->GetOptimizedCodeInfoPtr());
239   }
240 
241   // GenericJni trampoline can handle entry / exit hooks.
242   if (linker->IsQuickGenericJniStub(entry_point)) {
243     return true;
244   }
245 
246   // The remaining cases are nterp / oat code / JIT code that isn't compiled with instrumentation
247   // support.
248   return false;
249 }
250 
251 template <typename T>
CompareExchange(uintptr_t ptr,uintptr_t old_value,uintptr_t new_value)252 bool CompareExchange(uintptr_t ptr, uintptr_t old_value, uintptr_t new_value) {
253   std::atomic<T>* atomic_addr = reinterpret_cast<std::atomic<T>*>(ptr);
254   T cast_old_value = dchecked_integral_cast<T>(old_value);
255   return atomic_addr->compare_exchange_strong(cast_old_value,
256                                               dchecked_integral_cast<T>(new_value),
257                                               std::memory_order_relaxed);
258 }
259 
UpdateEntryPoints(ArtMethod * method,const void * new_code)260 static void UpdateEntryPoints(ArtMethod* method, const void* new_code)
261     REQUIRES_SHARED(Locks::mutator_lock_) {
262   if (kIsDebugBuild) {
263     if (method->StillNeedsClinitCheckMayBeDead()) {
264       CHECK(CanHandleInitializationCheck(new_code));
265     }
266     jit::Jit* jit = Runtime::Current()->GetJit();
267     if (jit != nullptr && jit->GetCodeCache()->ContainsPc(new_code)) {
268       // Ensure we always have the thumb entrypoint for JIT on arm32.
269       if (kRuntimeQuickCodeISA == InstructionSet::kArm) {
270         CHECK_EQ(reinterpret_cast<uintptr_t>(new_code) & 1, 1u);
271       }
272     }
273     const Instrumentation* instr = Runtime::Current()->GetInstrumentation();
274     if (instr->EntryExitStubsInstalled()) {
275       CHECK(CodeSupportsEntryExitHooks(new_code, method));
276     }
277     if (instr->InterpreterStubsInstalled() && !method->IsNative()) {
278       CHECK_EQ(new_code, GetQuickToInterpreterBridge());
279     }
280   }
281   const void* current_entry_point = method->GetEntryPointFromQuickCompiledCode();
282   if (current_entry_point == new_code) {
283     // If the method is from a boot image, don't dirty it if the entrypoint
284     // doesn't change.
285     return;
286   }
287 
288   // Do an atomic exchange to avoid potentially unregistering JIT code twice.
289   MemberOffset offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kRuntimePointerSize);
290   uintptr_t old_value = reinterpret_cast<uintptr_t>(current_entry_point);
291   uintptr_t new_value = reinterpret_cast<uintptr_t>(new_code);
292   uintptr_t ptr = reinterpret_cast<uintptr_t>(method) + offset.Uint32Value();
293   bool success = (kRuntimePointerSize == PointerSize::k32)
294       ? CompareExchange<uint32_t>(ptr, old_value, new_value)
295       : CompareExchange<uint64_t>(ptr, old_value, new_value);
296 
297   // If we successfully updated the entrypoint and the old entrypoint is JITted
298   // code, register the old entrypoint as zombie.
299   jit::Jit* jit = Runtime::Current()->GetJit();
300   if (success &&
301       jit != nullptr &&
302       jit->GetCodeCache()->ContainsPc(current_entry_point)) {
303     jit->GetCodeCache()->AddZombieCode(method, current_entry_point);
304   }
305 }
306 
NeedsDexPcEvents(ArtMethod * method,Thread * thread)307 bool Instrumentation::NeedsDexPcEvents(ArtMethod* method, Thread* thread) {
308   return (InterpretOnly(method) || thread->IsForceInterpreter()) && HasDexPcListeners();
309 }
310 
InterpretOnly(ArtMethod * method)311 bool Instrumentation::InterpretOnly(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
312   if (method->IsNative()) {
313     return false;
314   }
315   return InterpretOnly() || IsDeoptimized(method);
316 }
317 
CanUseNterp(ArtMethod * method)318 static bool CanUseNterp(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
319   return interpreter::CanRuntimeUseNterp() &&
320       CanMethodUseNterp(method) &&
321       method->IsDeclaringClassVerifiedMayBeDead();
322 }
323 
GetOptimizedCodeFor(ArtMethod * method)324 const void* Instrumentation::GetOptimizedCodeFor(ArtMethod* method) {
325   DCHECK(!Runtime::Current()->GetInstrumentation()->InterpretOnly(method));
326   CHECK(method->IsInvokable()) << method->PrettyMethod();
327   if (method->IsProxyMethod()) {
328     return GetQuickProxyInvokeHandler();
329   }
330 
331   // In debuggable mode, we can only use AOT code for native methods.
332   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
333   const void* aot_code = method->GetOatMethodQuickCode(class_linker->GetImagePointerSize());
334   if (CanUseAotCode(aot_code)) {
335     return aot_code;
336   }
337 
338   // If the method has been precompiled, there can be a JIT version.
339   jit::Jit* jit = Runtime::Current()->GetJit();
340   if (jit != nullptr) {
341     const void* code = jit->GetCodeCache()->GetSavedEntryPointOfPreCompiledMethod(method);
342     if (code != nullptr) {
343       return code;
344     }
345   }
346 
347   // We need to check if the class has been verified for setting up nterp, as
348   // the verifier could punt the method to the switch interpreter in case we
349   // need to do lock counting.
350   if (CanUseNterp(method)) {
351     return interpreter::GetNterpEntryPoint();
352   }
353 
354   return method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge();
355 }
356 
ReinitializeMethodsCode(ArtMethod * method)357 void Instrumentation::ReinitializeMethodsCode(ArtMethod* method) {
358   if (!method->IsInvokable()) {
359     DCHECK(method->GetEntryPointFromQuickCompiledCode() == nullptr ||
360            Runtime::Current()->GetClassLinker()->IsQuickToInterpreterBridge(
361                method->GetEntryPointFromQuickCompiledCode()));
362     UpdateEntryPoints(method, GetQuickToInterpreterBridge());
363     return;
364   }
365 
366   // Use instrumentation entrypoints if instrumentation is installed.
367   if (UNLIKELY(EntryExitStubsInstalled() || IsForcedInterpretOnly() || IsDeoptimized(method))) {
368     UpdateEntryPoints(
369         method, method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
370     return;
371   }
372 
373   // Special case if we need an initialization check.
374   // The method and its declaring class may be dead when starting JIT GC during managed heap GC.
375   if (method->StillNeedsClinitCheckMayBeDead()) {
376     // If we have code but the method needs a class initialization check before calling
377     // that code, install the resolution stub that will perform the check.
378     // It will be replaced by the proper entry point by ClassLinker::FixupStaticTrampolines
379     // after initializing class (see ClassLinker::InitializeClass method).
380     // Note: this mimics the logic in image_writer.cc that installs the resolution
381     // stub only if we have compiled code or we can execute nterp, and the method needs a class
382     // initialization check.
383     if (method->IsNative() || CanUseNterp(method)) {
384       if (kIsDebugBuild && CanUseNterp(method)) {
385         // Adds some test coverage for the nterp clinit entrypoint.
386         UpdateEntryPoints(method, interpreter::GetNterpWithClinitEntryPoint());
387       } else {
388         UpdateEntryPoints(method, GetQuickResolutionStub());
389       }
390     } else {
391       UpdateEntryPoints(method, GetQuickToInterpreterBridge());
392     }
393     return;
394   }
395 
396   // We check if the class is verified as we need the slow interpreter for lock verification.
397   // If the class is not verified, This will be updated in
398   // ClassLinker::UpdateClassAfterVerification.
399   if (CanUseNterp(method)) {
400     UpdateEntryPoints(method, interpreter::GetNterpEntryPoint());
401     return;
402   }
403 
404   // Use default entrypoints.
405   UpdateEntryPoints(
406       method, method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
407 }
408 
InstallStubsForMethod(ArtMethod * method)409 void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
410   if (!method->IsInvokable() || method->IsProxyMethod()) {
411     // Do not change stubs for these methods.
412     return;
413   }
414   // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
415   // TODO We should remove the need for this since it means we cannot always correctly detect calls
416   // to Proxy.<init>
417   if (IsProxyInit(method)) {
418     return;
419   }
420 
421   // If the instrumentation needs to go through the interpreter, just update the
422   // entrypoint to interpreter.
423   if (InterpretOnly(method)) {
424     UpdateEntryPoints(method, GetQuickToInterpreterBridge());
425     return;
426   }
427 
428   if (EntryExitStubsInstalled()) {
429     // Install interpreter bridge / GenericJni stub if the existing code doesn't support
430     // entry / exit hooks.
431     if (!CodeSupportsEntryExitHooks(method->GetEntryPointFromQuickCompiledCode(), method)) {
432       UpdateEntryPoints(
433           method, method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
434     }
435     return;
436   }
437 
438   // We're being asked to restore the entrypoints after instrumentation.
439   CHECK_EQ(instrumentation_level_, InstrumentationLevel::kInstrumentNothing);
440   // We need to have the resolution stub still if the class is not initialized.
441   if (method->StillNeedsClinitCheck()) {
442     UpdateEntryPoints(method, GetQuickResolutionStub());
443     return;
444   }
445   UpdateEntryPoints(method, GetOptimizedCodeFor(method));
446 }
447 
UpdateEntrypointsForDebuggable()448 void Instrumentation::UpdateEntrypointsForDebuggable() {
449   Runtime* runtime = Runtime::Current();
450   // If we are transitioning from non-debuggable to debuggable, we patch
451   // entry points of methods to remove any aot / JITed entry points.
452   InstallStubsClassVisitor visitor(this);
453   runtime->GetClassLinker()->VisitClasses(&visitor);
454 }
455 
MethodSupportsExitEvents(ArtMethod * method,const OatQuickMethodHeader * header)456 bool Instrumentation::MethodSupportsExitEvents(ArtMethod* method,
457                                                const OatQuickMethodHeader* header) {
458   if (header == nullptr) {
459     // Header can be a nullptr for runtime / proxy methods that doesn't support method exit hooks
460     // or for native methods that use generic jni stubs. Generic jni stubs support method exit
461     // hooks.
462     return method->IsNative();
463   }
464 
465   if (header->IsNterpMethodHeader()) {
466     // Nterp doesn't support method exit events
467     return false;
468   }
469 
470   DCHECK(header->IsOptimized());
471   if (CodeInfo::IsDebuggable(header->GetOptimizedCodeInfoPtr())) {
472     // For optimized code, we only support method entry / exit hooks if they are compiled as
473     // debuggable.
474     return true;
475   }
476 
477   return false;
478 }
479 
480 // Updates on stack frames to support any changes related to instrumentation.
481 // For JITed frames, DeoptimizeFlag is updated to enable deoptimization of
482 // methods when necessary. Shadow frames are updated if dex pc event
483 // notification has changed. When force_deopt is true then DeoptimizationFlag is
484 // updated to force a deoptimization.
InstrumentationInstallStack(Thread * thread,bool deopt_all_frames)485 void InstrumentationInstallStack(Thread* thread, bool deopt_all_frames)
486     REQUIRES(Locks::mutator_lock_) {
487   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
488   struct InstallStackVisitor final : public StackVisitor {
489     InstallStackVisitor(Thread* thread_in,
490                         Context* context,
491                         bool deopt_all_frames)
492         : StackVisitor(thread_in, context, kInstrumentationStackWalk),
493           deopt_all_frames_(deopt_all_frames),
494           runtime_methods_need_deopt_check_(false) {}
495 
496     bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
497       ArtMethod* m = GetMethod();
498       if (m == nullptr || m->IsRuntimeMethod()) {
499         if (kVerboseInstrumentation) {
500           LOG(INFO) << "  Skipping upcall / runtime method. Frame " << GetFrameId();
501         }
502         return true;  // Ignore upcalls and runtime methods.
503       }
504 
505       bool is_shadow_frame = GetCurrentQuickFrame() == nullptr;
506       if (kVerboseInstrumentation) {
507         LOG(INFO) << "Processing frame: method: " << m->PrettyMethod()
508                   << " is_shadow_frame: " << is_shadow_frame;
509       }
510 
511       // Handle interpreter frame.
512       if (is_shadow_frame) {
513         // Since we are updating the instrumentation related information we have to recalculate
514         // NeedsDexPcEvents. For example, when a new method or thread is deoptimized / interpreter
515         // stubs are installed the NeedsDexPcEvents could change for the shadow frames on the stack.
516         // If we don't update it here we would miss reporting dex pc events which is incorrect.
517         ShadowFrame* shadow_frame = GetCurrentShadowFrame();
518         DCHECK(shadow_frame != nullptr);
519         shadow_frame->SetNotifyDexPcMoveEvents(
520             Runtime::Current()->GetInstrumentation()->NeedsDexPcEvents(GetMethod(), GetThread()));
521         return true;  // Continue.
522       }
523 
524       DCHECK(!m->IsRuntimeMethod());
525       const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
526       // If it is a JITed frame then just set the deopt bit if required otherwise continue.
527       // We need kForceDeoptForRedefinition to ensure we don't use any JITed code after a
528       // redefinition. We support redefinition only if the runtime has started off as a
529       // debuggable runtime which makes sure we don't use any AOT or Nterp code.
530       // The CheckCallerForDeopt is an optimization which we only do for non-native JITed code for
531       // now. We can extend it to native methods but that needs reserving an additional stack slot.
532       // We don't do it currently since that wasn't important for debugger performance.
533       if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
534         if (deopt_all_frames_) {
535           runtime_methods_need_deopt_check_ = true;
536           SetShouldDeoptimizeFlag(DeoptimizeFlagValue::kForceDeoptForRedefinition);
537         }
538         SetShouldDeoptimizeFlag(DeoptimizeFlagValue::kCheckCallerForDeopt);
539       }
540 
541       return true;  // Continue.
542     }
543     bool deopt_all_frames_;
544     bool runtime_methods_need_deopt_check_;
545   };
546   if (kVerboseInstrumentation) {
547     std::string thread_name;
548     thread->GetThreadName(thread_name);
549     LOG(INFO) << "Installing exit stubs in " << thread_name;
550   }
551 
552   std::unique_ptr<Context> context(Context::Create());
553   InstallStackVisitor visitor(thread,
554                               context.get(),
555                               deopt_all_frames);
556   visitor.WalkStack(true);
557 
558   if (visitor.runtime_methods_need_deopt_check_) {
559     thread->SetDeoptCheckRequired(true);
560   }
561 
562   thread->VerifyStack();
563 }
564 
UpdateNeedsDexPcEventsOnStack(Thread * thread)565 void UpdateNeedsDexPcEventsOnStack(Thread* thread) REQUIRES(Locks::mutator_lock_) {
566   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
567 
568   struct InstallStackVisitor final : public StackVisitor {
569     InstallStackVisitor(Thread* thread_in, Context* context)
570         : StackVisitor(thread_in, context, kInstrumentationStackWalk) {}
571 
572     bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
573       ShadowFrame* shadow_frame = GetCurrentShadowFrame();
574       if (shadow_frame != nullptr) {
575         shadow_frame->SetNotifyDexPcMoveEvents(
576             Runtime::Current()->GetInstrumentation()->NeedsDexPcEvents(GetMethod(), GetThread()));
577       }
578       return true;
579     }
580   };
581 
582   std::unique_ptr<Context> context(Context::Create());
583   InstallStackVisitor visitor(thread, context.get());
584   visitor.WalkStack(true);
585 }
586 
ReportMethodEntryForOnStackMethods(InstrumentationListener * listener,Thread * thread)587 void ReportMethodEntryForOnStackMethods(InstrumentationListener* listener, Thread* thread)
588     REQUIRES(Locks::mutator_lock_) {
589   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
590 
591   struct InstallStackVisitor final : public StackVisitor {
592     InstallStackVisitor(Thread* thread_in, Context* context)
593         : StackVisitor(thread_in, context, kInstrumentationStackWalk) {}
594 
595     bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
596       ArtMethod* m = GetMethod();
597       if (m == nullptr || m->IsRuntimeMethod()) {
598         // Skip upcall / runtime methods
599         return true;
600       }
601 
602       if (GetCurrentShadowFrame() != nullptr) {
603         stack_methods_.push_back(m);
604       } else {
605         const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
606         if (Runtime::Current()->GetInstrumentation()->MethodSupportsExitEvents(m, method_header)) {
607           // It is unexpected to see a method enter event but not a method exit event so record
608           // stack methods only for frames that support method exit events. Even if we deoptimize we
609           // make sure that we only call method exit event if the frame supported it in the first
610           // place. For ex: deoptimizing from JITed code with debug support calls a method exit hook
611           // but deoptimizing from nterp doesn't.
612           stack_methods_.push_back(m);
613         }
614       }
615       return true;
616     }
617 
618     std::vector<ArtMethod*> stack_methods_;
619   };
620 
621   if (kVerboseInstrumentation) {
622     std::string thread_name;
623     thread->GetThreadName(thread_name);
624     LOG(INFO) << "Updating DexPcMoveEvents on shadow frames on stack  " << thread_name;
625   }
626 
627   std::unique_ptr<Context> context(Context::Create());
628   InstallStackVisitor visitor(thread, context.get());
629   visitor.WalkStack(true);
630 
631   // Create method enter events for all methods currently on the thread's stack.
632   for (auto smi = visitor.stack_methods_.rbegin(); smi != visitor.stack_methods_.rend(); smi++) {
633     listener->MethodEntered(thread, *smi);
634   }
635 }
636 
InstrumentThreadStack(Thread * thread,bool force_deopt)637 void Instrumentation::InstrumentThreadStack(Thread* thread, bool force_deopt) {
638   run_exit_hooks_ = true;
639   InstrumentationInstallStack(thread, force_deopt);
640 }
641 
InstrumentAllThreadStacks(bool force_deopt)642 void Instrumentation::InstrumentAllThreadStacks(bool force_deopt) {
643   run_exit_hooks_ = true;
644   MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
645   for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
646     InstrumentThreadStack(thread, force_deopt);
647   }
648 }
649 
InstrumentationRestoreStack(Thread * thread)650 static void InstrumentationRestoreStack(Thread* thread) REQUIRES(Locks::mutator_lock_) {
651   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
652 
653   struct RestoreStackVisitor final : public StackVisitor {
654     RestoreStackVisitor(Thread* thread)
655         : StackVisitor(thread, nullptr, kInstrumentationStackWalk), thread_(thread) {}
656 
657     bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
658       if (GetCurrentQuickFrame() == nullptr) {
659         return true;
660       }
661 
662       const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
663       if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
664         // We shouldn't restore stack if any of the frames need a force deopt
665         DCHECK(!ShouldForceDeoptForRedefinition());
666         UnsetShouldDeoptimizeFlag(DeoptimizeFlagValue::kCheckCallerForDeopt);
667       }
668       return true;  // Continue.
669     }
670     Thread* const thread_;
671   };
672 
673   if (kVerboseInstrumentation) {
674     std::string thread_name;
675     thread->GetThreadName(thread_name);
676     LOG(INFO) << "Restoring stack for " << thread_name;
677   }
678   DCHECK(!thread->IsDeoptCheckRequired());
679   RestoreStackVisitor visitor(thread);
680   visitor.WalkStack(true);
681 }
682 
HasFramesNeedingForceDeopt(Thread * thread)683 static bool HasFramesNeedingForceDeopt(Thread* thread) REQUIRES(Locks::mutator_lock_) {
684   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
685 
686   struct CheckForForceDeoptStackVisitor final : public StackVisitor {
687     CheckForForceDeoptStackVisitor(Thread* thread)
688         : StackVisitor(thread, nullptr, kInstrumentationStackWalk),
689           thread_(thread),
690           force_deopt_check_needed_(false) {}
691 
692     bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
693       if (GetCurrentQuickFrame() == nullptr) {
694         return true;
695       }
696 
697       const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
698       if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
699         if (ShouldForceDeoptForRedefinition()) {
700           force_deopt_check_needed_ = true;
701           return false;
702         }
703       }
704       return true;  // Continue.
705     }
706     Thread* const thread_;
707     bool force_deopt_check_needed_;
708   };
709 
710   CheckForForceDeoptStackVisitor visitor(thread);
711   visitor.WalkStack(true);
712   // If there is a frame that requires a force deopt we should have set the IsDeoptCheckRequired
713   // bit. We don't check if the bit needs to be reset on every method exit / deoptimization. We
714   // only check when we no longer need instrumentation support. So it is possible that the bit is
715   // set but we don't find any frames that need a force deopt on the stack so reverse implication
716   // doesn't hold.
717   DCHECK_IMPLIES(visitor.force_deopt_check_needed_, thread->IsDeoptCheckRequired());
718   return visitor.force_deopt_check_needed_;
719 }
720 
DeoptimizeAllThreadFrames()721 void Instrumentation::DeoptimizeAllThreadFrames() {
722   InstrumentAllThreadStacks(/* force_deopt= */ true);
723 }
724 
HasEvent(Instrumentation::InstrumentationEvent expected,uint32_t events)725 static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
726   return (events & expected) != 0;
727 }
728 
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener)729 static bool PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
730                                      uint32_t events,
731                                      std::list<InstrumentationListener*>& list,
732                                      InstrumentationListener* listener)
733     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
734   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
735   if (!HasEvent(event, events)) {
736     return false;
737   }
738   // If there is a free slot in the list, we insert the listener in that slot.
739   // Otherwise we add it to the end of the list.
740   auto it = std::find(list.begin(), list.end(), nullptr);
741   if (it != list.end()) {
742     *it = listener;
743   } else {
744     list.push_back(listener);
745   }
746   return true;
747 }
748 
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)749 static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
750                                      uint32_t events,
751                                      std::list<InstrumentationListener*>& list,
752                                      InstrumentationListener* listener,
753                                      bool* has_listener)
754     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
755   if (PotentiallyAddListenerTo(event, events, list, listener)) {
756     *has_listener = true;
757   }
758 }
759 
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,uint8_t * has_listener,uint8_t flag)760 static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
761                                      uint32_t events,
762                                      std::list<InstrumentationListener*>& list,
763                                      InstrumentationListener* listener,
764                                      uint8_t* has_listener,
765                                      uint8_t flag)
766     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
767   if (PotentiallyAddListenerTo(event, events, list, listener)) {
768     *has_listener = *has_listener | flag;
769   }
770 }
771 
AddListener(InstrumentationListener * listener,uint32_t events,bool is_trace_listener)772 void Instrumentation::AddListener(InstrumentationListener* listener,
773                                   uint32_t events,
774                                   bool is_trace_listener) {
775   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
776   if (is_trace_listener) {
777     PotentiallyAddListenerTo(kMethodEntered,
778                              events,
779                              method_entry_fast_trace_listeners_,
780                              listener,
781                              &have_method_entry_listeners_,
782                              kFastTraceListeners);
783   } else {
784     PotentiallyAddListenerTo(kMethodEntered,
785                              events,
786                              method_entry_slow_listeners_,
787                              listener,
788                              &have_method_entry_listeners_,
789                              kSlowMethodEntryExitListeners);
790   }
791   if (is_trace_listener) {
792     PotentiallyAddListenerTo(kMethodExited,
793                              events,
794                              method_exit_fast_trace_listeners_,
795                              listener,
796                              &have_method_exit_listeners_,
797                              kFastTraceListeners);
798   } else {
799     PotentiallyAddListenerTo(kMethodExited,
800                              events,
801                              method_exit_slow_listeners_,
802                              listener,
803                              &have_method_exit_listeners_,
804                              kSlowMethodEntryExitListeners);
805   }
806   PotentiallyAddListenerTo(kMethodUnwind,
807                            events,
808                            method_unwind_listeners_,
809                            listener,
810                            &have_method_unwind_listeners_);
811   PotentiallyAddListenerTo(kBranch,
812                            events,
813                            branch_listeners_,
814                            listener,
815                            &have_branch_listeners_);
816   PotentiallyAddListenerTo(kDexPcMoved,
817                            events,
818                            dex_pc_listeners_,
819                            listener,
820                            &have_dex_pc_listeners_);
821   PotentiallyAddListenerTo(kFieldRead,
822                            events,
823                            field_read_listeners_,
824                            listener,
825                            &have_field_read_listeners_);
826   PotentiallyAddListenerTo(kFieldWritten,
827                            events,
828                            field_write_listeners_,
829                            listener,
830                            &have_field_write_listeners_);
831   PotentiallyAddListenerTo(kExceptionThrown,
832                            events,
833                            exception_thrown_listeners_,
834                            listener,
835                            &have_exception_thrown_listeners_);
836   PotentiallyAddListenerTo(kWatchedFramePop,
837                            events,
838                            watched_frame_pop_listeners_,
839                            listener,
840                            &have_watched_frame_pop_listeners_);
841   PotentiallyAddListenerTo(kExceptionHandled,
842                            events,
843                            exception_handled_listeners_,
844                            listener,
845                            &have_exception_handled_listeners_);
846   if (HasEvent(kDexPcMoved, events)) {
847     MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
848     for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
849       UpdateNeedsDexPcEventsOnStack(thread);
850     }
851   }
852 }
853 
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener)854 static bool PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
855                                           uint32_t events,
856                                           std::list<InstrumentationListener*>& list,
857                                           InstrumentationListener* listener)
858     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
859   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
860   if (!HasEvent(event, events)) {
861     return false;
862   }
863   auto it = std::find(list.begin(), list.end(), listener);
864   if (it != list.end()) {
865     // Just update the entry, do not remove from the list. Removing entries in the list
866     // is unsafe when mutators are iterating over it.
867     *it = nullptr;
868   }
869 
870   // Check if the list contains any non-null listener.
871   for (InstrumentationListener* l : list) {
872     if (l != nullptr) {
873       return false;
874     }
875   }
876 
877   return true;
878 }
879 
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)880 static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
881                                           uint32_t events,
882                                           std::list<InstrumentationListener*>& list,
883                                           InstrumentationListener* listener,
884                                           bool* has_listener)
885     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
886   if (PotentiallyRemoveListenerFrom(event, events, list, listener)) {
887     *has_listener = false;
888   }
889 }
890 
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,uint8_t * has_listener,uint8_t flag)891 static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
892                                           uint32_t events,
893                                           std::list<InstrumentationListener*>& list,
894                                           InstrumentationListener* listener,
895                                           uint8_t* has_listener,
896                                           uint8_t flag)
897     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
898   if (PotentiallyRemoveListenerFrom(event, events, list, listener)) {
899     *has_listener = *has_listener & ~flag;
900   }
901 }
902 
RemoveListener(InstrumentationListener * listener,uint32_t events,bool is_trace_listener)903 void Instrumentation::RemoveListener(InstrumentationListener* listener,
904                                      uint32_t events,
905                                      bool is_trace_listener) {
906   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
907   if (is_trace_listener) {
908     PotentiallyRemoveListenerFrom(kMethodEntered,
909                                   events,
910                                   method_entry_fast_trace_listeners_,
911                                   listener,
912                                   &have_method_entry_listeners_,
913                                   kFastTraceListeners);
914   } else {
915     PotentiallyRemoveListenerFrom(kMethodEntered,
916                                   events,
917                                   method_entry_slow_listeners_,
918                                   listener,
919                                   &have_method_entry_listeners_,
920                                   kSlowMethodEntryExitListeners);
921   }
922   if (is_trace_listener) {
923     PotentiallyRemoveListenerFrom(kMethodExited,
924                                   events,
925                                   method_exit_fast_trace_listeners_,
926                                   listener,
927                                   &have_method_exit_listeners_,
928                                   kFastTraceListeners);
929   } else {
930     PotentiallyRemoveListenerFrom(kMethodExited,
931                                   events,
932                                   method_exit_slow_listeners_,
933                                   listener,
934                                   &have_method_exit_listeners_,
935                                   kSlowMethodEntryExitListeners);
936   }
937   PotentiallyRemoveListenerFrom(kMethodUnwind,
938                                 events,
939                                 method_unwind_listeners_,
940                                 listener,
941                                 &have_method_unwind_listeners_);
942   PotentiallyRemoveListenerFrom(kBranch,
943                                 events,
944                                 branch_listeners_,
945                                 listener,
946                                 &have_branch_listeners_);
947   PotentiallyRemoveListenerFrom(kDexPcMoved,
948                                 events,
949                                 dex_pc_listeners_,
950                                 listener,
951                                 &have_dex_pc_listeners_);
952   PotentiallyRemoveListenerFrom(kFieldRead,
953                                 events,
954                                 field_read_listeners_,
955                                 listener,
956                                 &have_field_read_listeners_);
957   PotentiallyRemoveListenerFrom(kFieldWritten,
958                                 events,
959                                 field_write_listeners_,
960                                 listener,
961                                 &have_field_write_listeners_);
962   PotentiallyRemoveListenerFrom(kExceptionThrown,
963                                 events,
964                                 exception_thrown_listeners_,
965                                 listener,
966                                 &have_exception_thrown_listeners_);
967   PotentiallyRemoveListenerFrom(kWatchedFramePop,
968                                 events,
969                                 watched_frame_pop_listeners_,
970                                 listener,
971                                 &have_watched_frame_pop_listeners_);
972   PotentiallyRemoveListenerFrom(kExceptionHandled,
973                                 events,
974                                 exception_handled_listeners_,
975                                 listener,
976                                 &have_exception_handled_listeners_);
977   if (HasEvent(kDexPcMoved, events)) {
978     MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
979     for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
980       UpdateNeedsDexPcEventsOnStack(thread);
981     }
982   }
983 }
984 
GetCurrentInstrumentationLevel() const985 Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
986   return instrumentation_level_;
987 }
988 
ConfigureStubs(const char * key,InstrumentationLevel desired_level,bool try_switch_to_non_debuggable)989 void Instrumentation::ConfigureStubs(const char* key,
990                                      InstrumentationLevel desired_level,
991                                      bool try_switch_to_non_debuggable) {
992   // Store the instrumentation level for this key or remove it.
993   if (desired_level == InstrumentationLevel::kInstrumentNothing) {
994     // The client no longer needs instrumentation.
995     requested_instrumentation_levels_.erase(key);
996   } else {
997     // The client needs instrumentation.
998     requested_instrumentation_levels_.Overwrite(key, desired_level);
999   }
1000 
1001   UpdateStubs(try_switch_to_non_debuggable);
1002 }
1003 
UpdateInstrumentationLevel(InstrumentationLevel requested_level)1004 void Instrumentation::UpdateInstrumentationLevel(InstrumentationLevel requested_level) {
1005   instrumentation_level_ = requested_level;
1006 }
1007 
EnableEntryExitHooks(const char * key)1008 void Instrumentation::EnableEntryExitHooks(const char* key) {
1009   DCHECK(Runtime::Current()->IsJavaDebuggable());
1010   ConfigureStubs(key,
1011                  InstrumentationLevel::kInstrumentWithEntryExitHooks,
1012                  /*try_switch_to_non_debuggable=*/false);
1013 }
1014 
MaybeRestoreInstrumentationStack()1015 void Instrumentation::MaybeRestoreInstrumentationStack() {
1016   // Restore stack only if there is no method currently deoptimized.
1017   if (!IsDeoptimizedMethodsEmpty()) {
1018     return;
1019   }
1020 
1021   Thread* self = Thread::Current();
1022   MutexLock mu(self, *Locks::thread_list_lock_);
1023   bool no_remaining_deopts = true;
1024   // Check that there are no other forced deoptimizations. Do it here so we only need to lock
1025   // thread_list_lock once.
1026   // The compiler gets confused on the thread annotations, so use
1027   // NO_THREAD_SAFETY_ANALYSIS. Note that we hold the mutator lock
1028   // exclusively at this point.
1029   Locks::mutator_lock_->AssertExclusiveHeld(self);
1030   Runtime::Current()->GetThreadList()->ForEach([&](Thread* t) NO_THREAD_SAFETY_ANALYSIS {
1031     bool has_force_deopt_frames = HasFramesNeedingForceDeopt(t);
1032     if (!has_force_deopt_frames) {
1033       // We no longer have any frames that require a force deopt check. If the bit was true then we
1034       // had some frames earlier but they already got deoptimized and are no longer on stack.
1035       t->SetDeoptCheckRequired(false);
1036     }
1037     no_remaining_deopts =
1038         no_remaining_deopts &&
1039         !t->IsForceInterpreter() &&
1040         !t->HasDebuggerShadowFrames() &&
1041         !has_force_deopt_frames;
1042   });
1043   if (no_remaining_deopts) {
1044     Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack);
1045     run_exit_hooks_ = false;
1046   }
1047 }
1048 
UpdateStubs(bool try_switch_to_non_debuggable)1049 void Instrumentation::UpdateStubs(bool try_switch_to_non_debuggable) {
1050   // Look for the highest required instrumentation level.
1051   InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
1052   for (const auto& v : requested_instrumentation_levels_) {
1053     requested_level = std::max(requested_level, v.second);
1054   }
1055 
1056   if (GetCurrentInstrumentationLevel() == requested_level) {
1057     // We're already set.
1058     return;
1059   }
1060 
1061   Thread* const self = Thread::Current();
1062   Runtime* runtime = Runtime::Current();
1063   Locks::mutator_lock_->AssertExclusiveHeld(self);
1064   Locks::thread_list_lock_->AssertNotHeld(self);
1065   // The following needs to happen in the same order.
1066   // 1. Update the instrumentation level
1067   // 2. Switch the runtime to non-debuggable if requested. We switch to non-debuggable only when
1068   // the instrumentation level is set to kInstrumentNothing. So this needs to happen only after
1069   // updating the instrumentation level.
1070   // 3. Update the entry points. We use AOT code only if we aren't debuggable runtime. So update
1071   // entrypoints after switching the instrumentation level.
1072   UpdateInstrumentationLevel(requested_level);
1073   if (try_switch_to_non_debuggable) {
1074     MaybeSwitchRuntimeDebugState(self);
1075   }
1076   InstallStubsClassVisitor visitor(this);
1077   runtime->GetClassLinker()->VisitClasses(&visitor);
1078   if (requested_level > InstrumentationLevel::kInstrumentNothing) {
1079     InstrumentAllThreadStacks(/* force_deopt= */ false);
1080   } else {
1081     MaybeRestoreInstrumentationStack();
1082   }
1083 }
1084 
ResetQuickAllocEntryPointsForThread(Thread * thread,void * arg)1085 static void ResetQuickAllocEntryPointsForThread(Thread* thread, [[maybe_unused]] void* arg) {
1086   thread->ResetQuickAllocEntryPointsForThread();
1087 }
1088 
SetEntrypointsInstrumented(bool instrumented)1089 void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
1090   Thread* self = Thread::Current();
1091   Runtime* runtime = Runtime::Current();
1092   Locks::mutator_lock_->AssertNotHeld(self);
1093   Locks::instrument_entrypoints_lock_->AssertHeld(self);
1094   if (runtime->IsStarted()) {
1095     ScopedSuspendAll ssa(__FUNCTION__);
1096     MutexLock mu(self, *Locks::runtime_shutdown_lock_);
1097     SetQuickAllocEntryPointsInstrumented(instrumented);
1098     ResetQuickAllocEntryPoints();
1099     alloc_entrypoints_instrumented_ = instrumented;
1100   } else {
1101     MutexLock mu(self, *Locks::runtime_shutdown_lock_);
1102     SetQuickAllocEntryPointsInstrumented(instrumented);
1103 
1104     // Note: ResetQuickAllocEntryPoints only works when the runtime is started. Manually run the
1105     //       update for just this thread.
1106     // Note: self may be null. One of those paths is setting instrumentation in the Heap
1107     //       constructor for gcstress mode.
1108     if (self != nullptr) {
1109       ResetQuickAllocEntryPointsForThread(self, nullptr);
1110     }
1111 
1112     alloc_entrypoints_instrumented_ = instrumented;
1113   }
1114 }
1115 
InstrumentQuickAllocEntryPoints()1116 void Instrumentation::InstrumentQuickAllocEntryPoints() {
1117   MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
1118   InstrumentQuickAllocEntryPointsLocked();
1119 }
1120 
UninstrumentQuickAllocEntryPoints()1121 void Instrumentation::UninstrumentQuickAllocEntryPoints() {
1122   MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
1123   UninstrumentQuickAllocEntryPointsLocked();
1124 }
1125 
InstrumentQuickAllocEntryPointsLocked()1126 void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
1127   Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
1128   if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
1129     SetEntrypointsInstrumented(true);
1130   }
1131   ++quick_alloc_entry_points_instrumentation_counter_;
1132 }
1133 
UninstrumentQuickAllocEntryPointsLocked()1134 void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
1135   Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
1136   CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
1137   --quick_alloc_entry_points_instrumentation_counter_;
1138   if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
1139     SetEntrypointsInstrumented(false);
1140   }
1141 }
1142 
ResetQuickAllocEntryPoints()1143 void Instrumentation::ResetQuickAllocEntryPoints() {
1144   Runtime* runtime = Runtime::Current();
1145   if (runtime->IsStarted()) {
1146     MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
1147     runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
1148   }
1149 }
1150 
EntryPointString(const void * code)1151 std::string Instrumentation::EntryPointString(const void* code) {
1152   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1153   jit::Jit* jit = Runtime::Current()->GetJit();
1154   if (class_linker->IsQuickToInterpreterBridge(code)) {
1155     return "interpreter";
1156   } else if (class_linker->IsQuickResolutionStub(code)) {
1157     return "resolution";
1158   } else if (jit != nullptr && jit->GetCodeCache()->ContainsPc(code)) {
1159     return "jit";
1160   } else if (code == GetInvokeObsoleteMethodStub()) {
1161     return "obsolete";
1162   } else if (code == interpreter::GetNterpEntryPoint()) {
1163     return "nterp";
1164   } else if (code == interpreter::GetNterpWithClinitEntryPoint()) {
1165     return "nterp with clinit";
1166   } else if (class_linker->IsQuickGenericJniStub(code)) {
1167     return "generic jni";
1168   } else if (Runtime::Current()->GetOatFileManager().ContainsPc(code)) {
1169     return "oat";
1170   } else if (OatQuickMethodHeader::IsStub(reinterpret_cast<const uint8_t*>(code)).value_or(false)) {
1171     return "stub";
1172   }
1173   return "unknown";
1174 }
1175 
UpdateMethodsCodeImpl(ArtMethod * method,const void * new_code)1176 void Instrumentation::UpdateMethodsCodeImpl(ArtMethod* method, const void* new_code) {
1177   if (!EntryExitStubsInstalled()) {
1178     // Fast path: no instrumentation.
1179     DCHECK(!IsDeoptimized(method));
1180     UpdateEntryPoints(method, new_code);
1181     return;
1182   }
1183 
1184   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1185   if (class_linker->IsQuickToInterpreterBridge(new_code)) {
1186     // It's always OK to update to the interpreter.
1187     UpdateEntryPoints(method, new_code);
1188     return;
1189   }
1190 
1191   if (InterpretOnly(method)) {
1192     DCHECK(class_linker->IsQuickToInterpreterBridge(method->GetEntryPointFromQuickCompiledCode()))
1193         << EntryPointString(method->GetEntryPointFromQuickCompiledCode());
1194     // Don't update, stay deoptimized.
1195     return;
1196   }
1197 
1198   if (EntryExitStubsInstalled() && !CodeSupportsEntryExitHooks(new_code, method)) {
1199     DCHECK(CodeSupportsEntryExitHooks(method->GetEntryPointFromQuickCompiledCode(), method))
1200         << EntryPointString(method->GetEntryPointFromQuickCompiledCode()) << " "
1201         << method->PrettyMethod();
1202     // If we need entry / exit stubs but the new_code doesn't support entry / exit hooks just skip.
1203     return;
1204   }
1205 
1206   // At this point, we can update as asked.
1207   UpdateEntryPoints(method, new_code);
1208 }
1209 
UpdateNativeMethodsCodeToJitCode(ArtMethod * method,const void * new_code)1210 void Instrumentation::UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* new_code) {
1211   // We don't do any read barrier on `method`'s declaring class in this code, as the JIT might
1212   // enter here on a soon-to-be deleted ArtMethod. Updating the entrypoint is OK though, as
1213   // the ArtMethod is still in memory.
1214   if (EntryExitStubsInstalled() && !CodeSupportsEntryExitHooks(new_code, method)) {
1215     // If the new code doesn't support entry exit hooks but we need them don't update with the new
1216     // code.
1217     return;
1218   }
1219   UpdateEntryPoints(method, new_code);
1220 }
1221 
UpdateMethodsCode(ArtMethod * method,const void * new_code)1222 void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* new_code) {
1223   DCHECK(method->GetDeclaringClass()->IsResolved());
1224   UpdateMethodsCodeImpl(method, new_code);
1225 }
1226 
AddDeoptimizedMethod(ArtMethod * method)1227 bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
1228   if (IsDeoptimizedMethod(method)) {
1229     // Already in the map. Return.
1230     return false;
1231   }
1232   // Not found. Add it.
1233   deoptimized_methods_.insert(method);
1234   return true;
1235 }
1236 
IsDeoptimizedMethod(ArtMethod * method)1237 bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
1238   return deoptimized_methods_.find(method) != deoptimized_methods_.end();
1239 }
1240 
RemoveDeoptimizedMethod(ArtMethod * method)1241 bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
1242   auto it = deoptimized_methods_.find(method);
1243   if (it == deoptimized_methods_.end()) {
1244     return false;
1245   }
1246   deoptimized_methods_.erase(it);
1247   return true;
1248 }
1249 
Deoptimize(ArtMethod * method)1250 void Instrumentation::Deoptimize(ArtMethod* method) {
1251   CHECK(!method->IsNative());
1252   CHECK(!method->IsProxyMethod());
1253   CHECK(method->IsInvokable());
1254 
1255   {
1256     Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
1257     bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
1258     CHECK(has_not_been_deoptimized) << "Method " << ArtMethod::PrettyMethod(method)
1259         << " is already deoptimized";
1260   }
1261 
1262   if (method->IsObsolete()) {
1263     // If method was marked as obsolete it should have `GetInvokeObsoleteMethodStub`
1264     // as its quick entry point
1265     CHECK_EQ(method->GetEntryPointFromQuickCompiledCode(), GetInvokeObsoleteMethodStub());
1266     return;
1267   }
1268 
1269   if (!InterpreterStubsInstalled()) {
1270     UpdateEntryPoints(method, GetQuickToInterpreterBridge());
1271 
1272     // Instrument thread stacks to request a check if the caller needs a deoptimization.
1273     // This isn't a strong deopt. We deopt this method if it is still in the deopt methods list.
1274     // If by the time we hit this frame we no longer need a deopt it is safe to continue.
1275     InstrumentAllThreadStacks(/* force_deopt= */ false);
1276   }
1277   CHECK_EQ(method->GetEntryPointFromQuickCompiledCode(), GetQuickToInterpreterBridge());
1278 }
1279 
Undeoptimize(ArtMethod * method)1280 void Instrumentation::Undeoptimize(ArtMethod* method) {
1281   CHECK(!method->IsNative());
1282   CHECK(!method->IsProxyMethod());
1283   CHECK(method->IsInvokable());
1284 
1285   {
1286     Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
1287     bool found_and_erased = RemoveDeoptimizedMethod(method);
1288     CHECK(found_and_erased) << "Method " << ArtMethod::PrettyMethod(method)
1289         << " is not deoptimized";
1290   }
1291 
1292   // If interpreter stubs are still needed nothing to do.
1293   if (InterpreterStubsInstalled()) {
1294     return;
1295   }
1296 
1297   if (method->IsObsolete()) {
1298     // Don't update entry points for obsolete methods. The entrypoint should
1299     // have been set to InvokeObsoleteMethoStub.
1300     DCHECK_EQ(method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize),
1301               GetInvokeObsoleteMethodStub());
1302     return;
1303   }
1304 
1305   // We are not using interpreter stubs for deoptimization. Restore the code of the method.
1306   // We still retain interpreter bridge if we need it for other reasons.
1307   if (InterpretOnly(method)) {
1308     UpdateEntryPoints(method, GetQuickToInterpreterBridge());
1309   } else if (method->StillNeedsClinitCheck()) {
1310     UpdateEntryPoints(method, GetQuickResolutionStub());
1311   } else {
1312     UpdateEntryPoints(method, GetMaybeInstrumentedCodeForInvoke(method));
1313   }
1314 
1315   // If there is no deoptimized method left, we can restore the stack of each thread.
1316   if (!EntryExitStubsInstalled()) {
1317     MaybeRestoreInstrumentationStack();
1318   }
1319 }
1320 
IsDeoptimizedMethodsEmpty() const1321 bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
1322   return deoptimized_methods_.empty();
1323 }
1324 
IsDeoptimized(ArtMethod * method)1325 bool Instrumentation::IsDeoptimized(ArtMethod* method) {
1326   DCHECK(method != nullptr);
1327   return IsDeoptimizedMethod(method);
1328 }
1329 
DisableDeoptimization(const char * key,bool try_switch_to_non_debuggable)1330 void Instrumentation::DisableDeoptimization(const char* key, bool try_switch_to_non_debuggable) {
1331   // Remove any instrumentation support added for deoptimization.
1332   ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing, try_switch_to_non_debuggable);
1333   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
1334   // Undeoptimized selected methods.
1335   while (true) {
1336     ArtMethod* method;
1337     {
1338       if (deoptimized_methods_.empty()) {
1339         break;
1340       }
1341       method = *deoptimized_methods_.begin();
1342       CHECK(method != nullptr);
1343     }
1344     Undeoptimize(method);
1345   }
1346 }
1347 
MaybeSwitchRuntimeDebugState(Thread * self)1348 void Instrumentation::MaybeSwitchRuntimeDebugState(Thread* self) {
1349   Runtime* runtime = Runtime::Current();
1350   // Return early if runtime is shutting down.
1351   if (runtime->IsShuttingDown(self)) {
1352     return;
1353   }
1354 
1355   // Don't switch the state if we started off as JavaDebuggable or if we still need entry / exit
1356   // hooks for other reasons.
1357   if (EntryExitStubsInstalled() || runtime->IsJavaDebuggableAtInit()) {
1358     return;
1359   }
1360 
1361   art::jit::Jit* jit = runtime->GetJit();
1362   if (jit != nullptr) {
1363     jit->GetCodeCache()->InvalidateAllCompiledCode();
1364     jit->GetJitCompiler()->SetDebuggableCompilerOption(false);
1365   }
1366   runtime->SetRuntimeDebugState(art::Runtime::RuntimeDebugState::kNonJavaDebuggable);
1367 }
1368 
DeoptimizeEverything(const char * key)1369 void Instrumentation::DeoptimizeEverything(const char* key) {
1370   // We want to switch to non-debuggable only when the debugger / profile tools are detaching.
1371   // This call is used for supporting debug related features (ex: single stepping across all
1372   // threads) while the debugger is still connected.
1373   ConfigureStubs(key,
1374                  InstrumentationLevel::kInstrumentWithInterpreter,
1375                  /*try_switch_to_non_debuggable=*/false);
1376 }
1377 
UndeoptimizeEverything(const char * key)1378 void Instrumentation::UndeoptimizeEverything(const char* key) {
1379   CHECK(InterpreterStubsInstalled());
1380   // We want to switch to non-debuggable only when the debugger / profile tools are detaching.
1381   // This is used when we no longer need to run in interpreter. The debugger is still connected
1382   // so don't switch the runtime. We use "DisableDeoptimization" when detaching the debugger.
1383   ConfigureStubs(key,
1384                  InstrumentationLevel::kInstrumentNothing,
1385                  /*try_switch_to_non_debuggable=*/false);
1386 }
1387 
EnableMethodTracing(const char * key,InstrumentationListener * listener,bool needs_interpreter)1388 void Instrumentation::EnableMethodTracing(const char* key,
1389                                           InstrumentationListener* listener,
1390                                           bool needs_interpreter) {
1391   InstrumentationLevel level;
1392   if (needs_interpreter) {
1393     level = InstrumentationLevel::kInstrumentWithInterpreter;
1394   } else {
1395     level = InstrumentationLevel::kInstrumentWithEntryExitHooks;
1396   }
1397   // We are enabling method tracing here and need to stay in debuggable.
1398   ConfigureStubs(key, level, /*try_switch_to_non_debuggable=*/false);
1399 
1400   MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
1401   for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
1402     ReportMethodEntryForOnStackMethods(listener, thread);
1403   }
1404 }
1405 
DisableMethodTracing(const char * key)1406 void Instrumentation::DisableMethodTracing(const char* key) {
1407   // We no longer need to be in debuggable runtime since we are stopping method tracing. If no
1408   // other debugger / profiling tools are active switch back to non-debuggable.
1409   ConfigureStubs(key,
1410                  InstrumentationLevel::kInstrumentNothing,
1411                  /*try_switch_to_non_debuggable=*/true);
1412 }
1413 
GetCodeForInvoke(ArtMethod * method)1414 const void* Instrumentation::GetCodeForInvoke(ArtMethod* method) {
1415   // This is called by instrumentation and resolution trampolines
1416   // and that should never be getting proxy methods.
1417   DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1418   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1419   const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
1420   // If we don't have the instrumentation, the resolution stub, or the
1421   // interpreter, just return the current entrypoint,
1422   // assuming it's the most optimized.
1423   if (!class_linker->IsQuickResolutionStub(code) &&
1424       !class_linker->IsQuickToInterpreterBridge(code)) {
1425     return code;
1426   }
1427 
1428   if (InterpretOnly(method)) {
1429     // If we're forced into interpreter just use it.
1430     return GetQuickToInterpreterBridge();
1431   }
1432 
1433   return GetOptimizedCodeFor(method);
1434 }
1435 
GetMaybeInstrumentedCodeForInvoke(ArtMethod * method)1436 const void* Instrumentation::GetMaybeInstrumentedCodeForInvoke(ArtMethod* method) {
1437   // This is called by resolution trampolines and that should never be getting proxy methods.
1438   DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1439   const void* code = GetCodeForInvoke(method);
1440   if (EntryExitStubsInstalled() && !CodeSupportsEntryExitHooks(code, method)) {
1441     return method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge();
1442   }
1443   return code;
1444 }
1445 
MethodEnterEventImpl(Thread * thread,ArtMethod * method) const1446 void Instrumentation::MethodEnterEventImpl(Thread* thread, ArtMethod* method) const {
1447   DCHECK(!method->IsRuntimeMethod());
1448   if (HasMethodEntryListeners()) {
1449     for (InstrumentationListener* listener : method_entry_slow_listeners_) {
1450       if (listener != nullptr) {
1451         listener->MethodEntered(thread, method);
1452       }
1453     }
1454     for (InstrumentationListener* listener : method_entry_fast_trace_listeners_) {
1455       if (listener != nullptr) {
1456         listener->MethodEntered(thread, method);
1457       }
1458     }
1459   }
1460 }
1461 
1462 template <>
MethodExitEventImpl(Thread * thread,ArtMethod * method,OptionalFrame frame,MutableHandle<mirror::Object> & return_value) const1463 void Instrumentation::MethodExitEventImpl(Thread* thread,
1464                                           ArtMethod* method,
1465                                           OptionalFrame frame,
1466                                           MutableHandle<mirror::Object>& return_value) const {
1467   if (HasMethodExitListeners()) {
1468     for (InstrumentationListener* listener : method_exit_slow_listeners_) {
1469       if (listener != nullptr) {
1470         listener->MethodExited(thread, method, frame, return_value);
1471       }
1472     }
1473     for (InstrumentationListener* listener : method_exit_fast_trace_listeners_) {
1474       if (listener != nullptr) {
1475         listener->MethodExited(thread, method, frame, return_value);
1476       }
1477     }
1478   }
1479 }
1480 
MethodExitEventImpl(Thread * thread,ArtMethod * method,OptionalFrame frame,JValue & return_value) const1481 template<> void Instrumentation::MethodExitEventImpl(Thread* thread,
1482                                                      ArtMethod* method,
1483                                                      OptionalFrame frame,
1484                                                      JValue& return_value) const {
1485   if (HasMethodExitListeners()) {
1486     Thread* self = Thread::Current();
1487     StackHandleScope<1> hs(self);
1488     if (method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive() !=
1489         Primitive::kPrimNot) {
1490       for (InstrumentationListener* listener : method_exit_slow_listeners_) {
1491         if (listener != nullptr) {
1492           listener->MethodExited(thread, method, frame, return_value);
1493         }
1494       }
1495       for (InstrumentationListener* listener : method_exit_fast_trace_listeners_) {
1496         if (listener != nullptr) {
1497           listener->MethodExited(thread, method, frame, return_value);
1498         }
1499       }
1500     } else {
1501       MutableHandle<mirror::Object> ret(hs.NewHandle(return_value.GetL()));
1502       MethodExitEventImpl(thread, method, frame, ret);
1503       return_value.SetL(ret.Get());
1504     }
1505   }
1506 }
1507 
MethodUnwindEvent(Thread * thread,ArtMethod * method,uint32_t dex_pc) const1508 void Instrumentation::MethodUnwindEvent(Thread* thread,
1509                                         ArtMethod* method,
1510                                         uint32_t dex_pc) const {
1511   if (HasMethodUnwindListeners()) {
1512     for (InstrumentationListener* listener : method_unwind_listeners_) {
1513       if (listener != nullptr) {
1514         listener->MethodUnwind(thread, method, dex_pc);
1515       }
1516     }
1517   }
1518 }
1519 
DexPcMovedEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1520 void Instrumentation::DexPcMovedEventImpl(Thread* thread,
1521                                           ObjPtr<mirror::Object> this_object,
1522                                           ArtMethod* method,
1523                                           uint32_t dex_pc) const {
1524   Thread* self = Thread::Current();
1525   StackHandleScope<1> hs(self);
1526   Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1527   for (InstrumentationListener* listener : dex_pc_listeners_) {
1528     if (listener != nullptr) {
1529       listener->DexPcMoved(thread, thiz, method, dex_pc);
1530     }
1531   }
1532 }
1533 
BranchImpl(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset) const1534 void Instrumentation::BranchImpl(Thread* thread,
1535                                  ArtMethod* method,
1536                                  uint32_t dex_pc,
1537                                  int32_t offset) const {
1538   for (InstrumentationListener* listener : branch_listeners_) {
1539     if (listener != nullptr) {
1540       listener->Branch(thread, method, dex_pc, offset);
1541     }
1542   }
1543 }
1544 
WatchedFramePopImpl(Thread * thread,const ShadowFrame & frame) const1545 void Instrumentation::WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const {
1546   for (InstrumentationListener* listener : watched_frame_pop_listeners_) {
1547     if (listener != nullptr) {
1548       listener->WatchedFramePop(thread, frame);
1549     }
1550   }
1551 }
1552 
FieldReadEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field) const1553 void Instrumentation::FieldReadEventImpl(Thread* thread,
1554                                          ObjPtr<mirror::Object> this_object,
1555                                          ArtMethod* method,
1556                                          uint32_t dex_pc,
1557                                          ArtField* field) const {
1558   Thread* self = Thread::Current();
1559   StackHandleScope<1> hs(self);
1560   Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1561   for (InstrumentationListener* listener : field_read_listeners_) {
1562     if (listener != nullptr) {
1563       listener->FieldRead(thread, thiz, method, dex_pc, field);
1564     }
1565   }
1566 }
1567 
FieldWriteEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value) const1568 void Instrumentation::FieldWriteEventImpl(Thread* thread,
1569                                           ObjPtr<mirror::Object> this_object,
1570                                           ArtMethod* method,
1571                                           uint32_t dex_pc,
1572                                           ArtField* field,
1573                                           const JValue& field_value) const {
1574   Thread* self = Thread::Current();
1575   StackHandleScope<2> hs(self);
1576   Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1577   if (field->IsPrimitiveType()) {
1578     for (InstrumentationListener* listener : field_write_listeners_) {
1579       if (listener != nullptr) {
1580         listener->FieldWritten(thread, thiz, method, dex_pc, field, field_value);
1581       }
1582     }
1583   } else {
1584     Handle<mirror::Object> val(hs.NewHandle(field_value.GetL()));
1585     for (InstrumentationListener* listener : field_write_listeners_) {
1586       if (listener != nullptr) {
1587         listener->FieldWritten(thread, thiz, method, dex_pc, field, val);
1588       }
1589     }
1590   }
1591 }
1592 
ExceptionThrownEvent(Thread * thread,ObjPtr<mirror::Throwable> exception_object) const1593 void Instrumentation::ExceptionThrownEvent(Thread* thread,
1594                                            ObjPtr<mirror::Throwable> exception_object) const {
1595   Thread* self = Thread::Current();
1596   StackHandleScope<1> hs(self);
1597   Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1598   if (HasExceptionThrownListeners()) {
1599     DCHECK_EQ(thread->GetException(), h_exception.Get());
1600     thread->ClearException();
1601     for (InstrumentationListener* listener : exception_thrown_listeners_) {
1602       if (listener != nullptr) {
1603         listener->ExceptionThrown(thread, h_exception);
1604       }
1605     }
1606     // See b/65049545 for discussion about this behavior.
1607     thread->AssertNoPendingException();
1608     thread->SetException(h_exception.Get());
1609   }
1610 }
1611 
ExceptionHandledEvent(Thread * thread,ObjPtr<mirror::Throwable> exception_object) const1612 void Instrumentation::ExceptionHandledEvent(Thread* thread,
1613                                             ObjPtr<mirror::Throwable> exception_object) const {
1614   Thread* self = Thread::Current();
1615   StackHandleScope<1> hs(self);
1616   Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1617   if (HasExceptionHandledListeners()) {
1618     // We should have cleared the exception so that callers can detect a new one.
1619     DCHECK(thread->GetException() == nullptr);
1620     for (InstrumentationListener* listener : exception_handled_listeners_) {
1621       if (listener != nullptr) {
1622         listener->ExceptionHandled(thread, h_exception);
1623       }
1624     }
1625   }
1626 }
1627 
GetDeoptimizationMethodType(ArtMethod * method)1628 DeoptimizationMethodType Instrumentation::GetDeoptimizationMethodType(ArtMethod* method) {
1629   if (method->IsRuntimeMethod()) {
1630     // Certain methods have strict requirement on whether the dex instruction
1631     // should be re-executed upon deoptimization.
1632     if (method == Runtime::Current()->GetCalleeSaveMethod(
1633         CalleeSaveType::kSaveEverythingForClinit)) {
1634       return DeoptimizationMethodType::kKeepDexPc;
1635     }
1636     if (method == Runtime::Current()->GetCalleeSaveMethod(
1637         CalleeSaveType::kSaveEverythingForSuspendCheck)) {
1638       return DeoptimizationMethodType::kKeepDexPc;
1639     }
1640   }
1641   return DeoptimizationMethodType::kDefault;
1642 }
1643 
GetReturnValue(ArtMethod * method,bool * is_ref,uint64_t * gpr_result,uint64_t * fpr_result)1644 JValue Instrumentation::GetReturnValue(ArtMethod* method,
1645                                        bool* is_ref,
1646                                        uint64_t* gpr_result,
1647                                        uint64_t* fpr_result) {
1648   uint32_t length;
1649   const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1650 
1651   // Runtime method does not call into MethodExitEvent() so there should not be
1652   // suspension point below.
1653   ScopedAssertNoThreadSuspension ants(__FUNCTION__, method->IsRuntimeMethod());
1654   DCHECK(!method->IsRuntimeMethod());
1655   char return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
1656 
1657   *is_ref = return_shorty == '[' || return_shorty == 'L';
1658   JValue return_value;
1659   if (return_shorty == 'V') {
1660     return_value.SetJ(0);
1661   } else if (return_shorty == 'F' || return_shorty == 'D') {
1662     return_value.SetJ(*fpr_result);
1663   } else {
1664     return_value.SetJ(*gpr_result);
1665   }
1666   return return_value;
1667 }
1668 
PushDeoptContextIfNeeded(Thread * self,DeoptimizationMethodType deopt_type,bool is_ref,const JValue & return_value)1669 bool Instrumentation::PushDeoptContextIfNeeded(Thread* self,
1670                                                DeoptimizationMethodType deopt_type,
1671                                                bool is_ref,
1672                                                const JValue& return_value)
1673     REQUIRES_SHARED(Locks::mutator_lock_) {
1674   if (self->IsExceptionPending()) {
1675     return false;
1676   }
1677 
1678   ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame();
1679   DCHECK(sp != nullptr && (*sp)->IsRuntimeMethod());
1680   if (!ShouldDeoptimizeCaller(self, sp)) {
1681     return false;
1682   }
1683 
1684   // TODO(mythria): The current deopt behaviour is we just re-execute the
1685   // alloc instruction so we don't need the return value. For instrumentation
1686   // related deopts, we actually don't need to and can use the result we got
1687   // here. Since this is a debug only feature it is not very important but
1688   // consider reusing the result in future.
1689   self->PushDeoptimizationContext(
1690       return_value, is_ref, nullptr, /* from_code= */ false, deopt_type);
1691   self->SetException(Thread::GetDeoptimizationException());
1692   return true;
1693 }
1694 
DeoptimizeIfNeeded(Thread * self,ArtMethod ** sp,DeoptimizationMethodType type,JValue return_value,bool is_reference)1695 std::unique_ptr<Context> Instrumentation::DeoptimizeIfNeeded(Thread* self,
1696                                                              ArtMethod** sp,
1697                                                              DeoptimizationMethodType type,
1698                                                              JValue return_value,
1699                                                              bool is_reference) {
1700   if (self->IsAsyncExceptionPending() || ShouldDeoptimizeCaller(self, sp)) {
1701     self->PushDeoptimizationContext(return_value,
1702                                     is_reference,
1703                                     nullptr,
1704                                     /* from_code= */ false,
1705                                     type);
1706     // This is requested from suspend points or when returning from runtime methods so exit
1707     // callbacks wouldn't be run yet. So don't skip method callbacks.
1708     return self->Deoptimize(DeoptimizationKind::kFullFrame,
1709                             /* single_frame= */ false,
1710                             /* skip_method_exit_callbacks= */ false);
1711   }
1712   // No exception or deoptimization.
1713   return nullptr;
1714 }
1715 
NeedsSlowInterpreterForMethod(Thread * self,ArtMethod * method)1716 bool Instrumentation::NeedsSlowInterpreterForMethod(Thread* self, ArtMethod* method) {
1717   return (method != nullptr) &&
1718          (InterpreterStubsInstalled() ||
1719           IsDeoptimized(method) ||
1720           self->IsForceInterpreter() ||
1721           // NB Since structurally obsolete compiled methods might have the offsets of
1722           // methods/fields compiled in we need to go back to interpreter whenever we hit
1723           // them.
1724           method->GetDeclaringClass()->IsObsoleteObject() ||
1725           Dbg::IsForcedInterpreterNeededForUpcall(self, method));
1726 }
1727 
ShouldDeoptimizeCaller(Thread * self,ArtMethod ** sp)1728 bool Instrumentation::ShouldDeoptimizeCaller(Thread* self, ArtMethod** sp) {
1729   // When exit stubs aren't called we don't need to check for any instrumentation related
1730   // deoptimizations.
1731   if (!RunExitHooks()) {
1732     return false;
1733   }
1734 
1735   ArtMethod* runtime_method = *sp;
1736   DCHECK(runtime_method->IsRuntimeMethod());
1737   QuickMethodFrameInfo frame_info = Runtime::Current()->GetRuntimeMethodFrameInfo(runtime_method);
1738   return ShouldDeoptimizeCaller(self, sp, frame_info.FrameSizeInBytes());
1739 }
1740 
ShouldDeoptimizeCaller(Thread * self,ArtMethod ** sp,size_t frame_size)1741 bool Instrumentation::ShouldDeoptimizeCaller(Thread* self, ArtMethod** sp, size_t frame_size) {
1742   uintptr_t caller_sp = reinterpret_cast<uintptr_t>(sp) + frame_size;
1743   ArtMethod* caller = *(reinterpret_cast<ArtMethod**>(caller_sp));
1744   uintptr_t caller_pc_addr = reinterpret_cast<uintptr_t>(sp) + (frame_size - sizeof(void*));
1745   uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(caller_pc_addr);
1746 
1747   if (caller == nullptr ||
1748       caller->IsNative() ||
1749       caller->IsRuntimeMethod()) {
1750     // We need to check for a deoptimization here because when a redefinition happens it is
1751     // not safe to use any compiled code because the field offsets might change. For native
1752     // methods, we don't embed any field offsets so no need to check for a deoptimization.
1753     // If the caller is null we don't need to do anything. This can happen when the caller
1754     // is being interpreted by the switch interpreter (when called from
1755     // artQuickToInterpreterBridge) / during shutdown / early startup.
1756     return false;
1757   }
1758 
1759   bool needs_deopt = NeedsSlowInterpreterForMethod(self, caller);
1760 
1761   // Non java debuggable apps don't support redefinition and hence it isn't required to check if
1762   // frame needs to be deoptimized. Even in debuggable apps, we only need this check when a
1763   // redefinition has actually happened. This is indicated by IsDeoptCheckRequired flag. We also
1764   // want to avoid getting method header when we need a deopt anyway.
1765   if (Runtime::Current()->IsJavaDebuggable() && !needs_deopt && self->IsDeoptCheckRequired()) {
1766     const OatQuickMethodHeader* header = caller->GetOatQuickMethodHeader(caller_pc);
1767     if (header != nullptr && header->HasShouldDeoptimizeFlag()) {
1768       DCHECK(header->IsOptimized());
1769       uint8_t* should_deopt_flag_addr =
1770           reinterpret_cast<uint8_t*>(caller_sp) + header->GetShouldDeoptimizeFlagOffset();
1771       if ((*should_deopt_flag_addr &
1772            static_cast<uint8_t>(DeoptimizeFlagValue::kForceDeoptForRedefinition)) != 0) {
1773         needs_deopt = true;
1774       }
1775     }
1776   }
1777 
1778   if (needs_deopt) {
1779     if (!Runtime::Current()->IsAsyncDeoptimizeable(caller, caller_pc)) {
1780       LOG(WARNING) << "Got a deoptimization request on un-deoptimizable method "
1781                    << caller->PrettyMethod();
1782       return false;
1783     }
1784     return true;
1785   }
1786 
1787   return false;
1788 }
1789 
1790 }  // namespace instrumentation
1791 }  // namespace art
1792