• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "instrumentation.h"
18 
19 #include <sstream>
20 
21 #include <android-base/logging.h>
22 
23 #include "arch/context.h"
24 #include "art_field-inl.h"
25 #include "art_method-inl.h"
26 #include "base/atomic.h"
27 #include "base/callee_save_type.h"
28 #include "class_linker.h"
29 #include "debugger.h"
30 #include "dex/dex_file-inl.h"
31 #include "dex/dex_file_types.h"
32 #include "dex/dex_instruction-inl.h"
33 #include "entrypoints/quick/quick_alloc_entrypoints.h"
34 #include "entrypoints/quick/quick_entrypoints.h"
35 #include "entrypoints/runtime_asm_entrypoints.h"
36 #include "gc_root-inl.h"
37 #include "interpreter/interpreter.h"
38 #include "interpreter/interpreter_common.h"
39 #include "jit/jit.h"
40 #include "jit/jit_code_cache.h"
41 #include "jvalue-inl.h"
42 #include "mirror/class-inl.h"
43 #include "mirror/dex_cache.h"
44 #include "mirror/object-inl.h"
45 #include "mirror/object_array-inl.h"
46 #include "nth_caller_visitor.h"
47 #include "oat_quick_method_header.h"
48 #include "runtime-inl.h"
49 #include "thread.h"
50 #include "thread_list.h"
51 
52 namespace art {
53 namespace instrumentation {
54 
55 constexpr bool kVerboseInstrumentation = false;
56 
MethodExited(Thread * thread,Handle<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,Handle<mirror::Object> return_value)57 void InstrumentationListener::MethodExited(Thread* thread,
58                                            Handle<mirror::Object> this_object,
59                                            ArtMethod* method,
60                                            uint32_t dex_pc,
61                                            Handle<mirror::Object> return_value) {
62   DCHECK_EQ(method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive(),
63             Primitive::kPrimNot);
64   JValue v;
65   v.SetL(return_value.Get());
66   MethodExited(thread, this_object, method, dex_pc, v);
67 }
68 
FieldWritten(Thread * thread,Handle<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,Handle<mirror::Object> field_value)69 void InstrumentationListener::FieldWritten(Thread* thread,
70                                            Handle<mirror::Object> this_object,
71                                            ArtMethod* method,
72                                            uint32_t dex_pc,
73                                            ArtField* field,
74                                            Handle<mirror::Object> field_value) {
75   DCHECK(!field->IsPrimitiveType());
76   JValue v;
77   v.SetL(field_value.Get());
78   FieldWritten(thread, this_object, method, dex_pc, field, v);
79 }
80 
81 // Instrumentation works on non-inlined frames by updating returned PCs
82 // of compiled frames.
83 static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
84     StackVisitor::StackWalkKind::kSkipInlinedFrames;
85 
86 class InstallStubsClassVisitor : public ClassVisitor {
87  public:
InstallStubsClassVisitor(Instrumentation * instrumentation)88   explicit InstallStubsClassVisitor(Instrumentation* instrumentation)
89       : instrumentation_(instrumentation) {}
90 
operator ()(ObjPtr<mirror::Class> klass)91   bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES(Locks::mutator_lock_) {
92     instrumentation_->InstallStubsForClass(klass.Ptr());
93     return true;  // we visit all classes.
94   }
95 
96  private:
97   Instrumentation* const instrumentation_;
98 };
99 
InstrumentationStackPopper(Thread * self)100 InstrumentationStackPopper::InstrumentationStackPopper(Thread* self)
101       : self_(self),
102         instrumentation_(Runtime::Current()->GetInstrumentation()),
103         frames_to_remove_(0) {}
104 
~InstrumentationStackPopper()105 InstrumentationStackPopper::~InstrumentationStackPopper() {
106   std::deque<instrumentation::InstrumentationStackFrame>* stack = self_->GetInstrumentationStack();
107   for (size_t i = 0; i < frames_to_remove_; i++) {
108     stack->pop_front();
109   }
110 }
111 
PopFramesTo(uint32_t desired_pops,MutableHandle<mirror::Throwable> & exception)112 bool InstrumentationStackPopper::PopFramesTo(uint32_t desired_pops,
113                                              MutableHandle<mirror::Throwable>& exception) {
114   std::deque<instrumentation::InstrumentationStackFrame>* stack = self_->GetInstrumentationStack();
115   DCHECK_LE(frames_to_remove_, desired_pops);
116   DCHECK_GE(stack->size(), desired_pops);
117   DCHECK(!self_->IsExceptionPending());
118   if (!instrumentation_->HasMethodUnwindListeners()) {
119     frames_to_remove_ = desired_pops;
120     return true;
121   }
122   if (kVerboseInstrumentation) {
123     LOG(INFO) << "Popping frames for exception " << exception->Dump();
124   }
125   // The instrumentation events expect the exception to be set.
126   self_->SetException(exception.Get());
127   bool new_exception_thrown = false;
128   for (; frames_to_remove_ < desired_pops && !new_exception_thrown; frames_to_remove_++) {
129     InstrumentationStackFrame frame = stack->at(frames_to_remove_);
130     ArtMethod* method = frame.method_;
131     // Notify listeners of method unwind.
132     // TODO: improve the dex_pc information here.
133     uint32_t dex_pc = dex::kDexNoIndex;
134     if (kVerboseInstrumentation) {
135       LOG(INFO) << "Popping for unwind " << method->PrettyMethod();
136     }
137     if (!method->IsRuntimeMethod() && !frame.interpreter_entry_) {
138       instrumentation_->MethodUnwindEvent(self_, frame.this_object_, method, dex_pc);
139       new_exception_thrown = self_->GetException() != exception.Get();
140     }
141   }
142   exception.Assign(self_->GetException());
143   self_->ClearException();
144   if (kVerboseInstrumentation && new_exception_thrown) {
145     LOG(INFO) << "Failed to pop " << (desired_pops - frames_to_remove_)
146               << " frames due to new exception";
147   }
148   return !new_exception_thrown;
149 }
150 
Instrumentation()151 Instrumentation::Instrumentation()
152     : instrumentation_stubs_installed_(false),
153       entry_exit_stubs_installed_(false),
154       interpreter_stubs_installed_(false),
155       interpret_only_(false),
156       forced_interpret_only_(false),
157       have_method_entry_listeners_(false),
158       have_method_exit_listeners_(false),
159       have_method_unwind_listeners_(false),
160       have_dex_pc_listeners_(false),
161       have_field_read_listeners_(false),
162       have_field_write_listeners_(false),
163       have_exception_thrown_listeners_(false),
164       have_watched_frame_pop_listeners_(false),
165       have_branch_listeners_(false),
166       have_exception_handled_listeners_(false),
167       deoptimized_methods_lock_(new ReaderWriterMutex("deoptimized methods lock",
168                                                       kGenericBottomLock)),
169       deoptimization_enabled_(false),
170       interpreter_handler_table_(kMainHandlerTable),
171       quick_alloc_entry_points_instrumentation_counter_(0),
172       alloc_entrypoints_instrumented_(false),
173       can_use_instrumentation_trampolines_(true) {
174 }
175 
InstallStubsForClass(mirror::Class * klass)176 void Instrumentation::InstallStubsForClass(mirror::Class* klass) {
177   if (!klass->IsResolved()) {
178     // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
179     // could not be initialized or linked with regards to class inheritance.
180   } else if (klass->IsErroneousResolved()) {
181     // We can't execute code in a erroneous class: do nothing.
182   } else {
183     for (ArtMethod& method : klass->GetMethods(kRuntimePointerSize)) {
184       InstallStubsForMethod(&method);
185     }
186   }
187 }
188 
UpdateEntrypoints(ArtMethod * method,const void * quick_code)189 static void UpdateEntrypoints(ArtMethod* method, const void* quick_code)
190     REQUIRES_SHARED(Locks::mutator_lock_) {
191   method->SetEntryPointFromQuickCompiledCode(quick_code);
192 }
193 
NeedDebugVersionFor(ArtMethod * method) const194 bool Instrumentation::NeedDebugVersionFor(ArtMethod* method) const
195     REQUIRES_SHARED(Locks::mutator_lock_) {
196   art::Runtime* runtime = Runtime::Current();
197   // If anything says we need the debug version or we are debuggable we will need the debug version
198   // of the method.
199   return (runtime->GetRuntimeCallbacks()->MethodNeedsDebugVersion(method) ||
200           runtime->IsJavaDebuggable()) &&
201          !method->IsNative() &&
202          !method->IsProxyMethod();
203 }
204 
InstallStubsForMethod(ArtMethod * method)205 void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
206   if (!method->IsInvokable() || method->IsProxyMethod()) {
207     // Do not change stubs for these methods.
208     return;
209   }
210   // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
211   // TODO We should remove the need for this since it means we cannot always correctly detect calls
212   // to Proxy.<init>
213   // Annoyingly this can be called before we have actually initialized WellKnownClasses so therefore
214   // we also need to check this based on the declaring-class descriptor. The check is valid because
215   // Proxy only has a single constructor.
216   ArtMethod* well_known_proxy_init = jni::DecodeArtMethod(
217       WellKnownClasses::java_lang_reflect_Proxy_init);
218   if ((LIKELY(well_known_proxy_init != nullptr) && UNLIKELY(method == well_known_proxy_init)) ||
219       UNLIKELY(method->IsConstructor() &&
220                method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;"))) {
221     return;
222   }
223   const void* new_quick_code;
224   bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
225   Runtime* const runtime = Runtime::Current();
226   ClassLinker* const class_linker = runtime->GetClassLinker();
227   bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
228   if (uninstall) {
229     if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
230       new_quick_code = GetQuickToInterpreterBridge();
231     } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
232       new_quick_code = GetCodeForInvoke(method);
233     } else {
234       new_quick_code = GetQuickResolutionStub();
235     }
236   } else {  // !uninstall
237     if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
238         !method->IsNative()) {
239       new_quick_code = GetQuickToInterpreterBridge();
240     } else {
241       // Do not overwrite resolution trampoline. When the trampoline initializes the method's
242       // class, all its static methods code will be set to the instrumentation entry point.
243       // For more details, see ClassLinker::FixupStaticTrampolines.
244       if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
245         if (entry_exit_stubs_installed_) {
246           // This needs to be checked first since the instrumentation entrypoint will be able to
247           // find the actual JIT compiled code that corresponds to this method.
248           new_quick_code = GetQuickInstrumentationEntryPoint();
249         } else if (NeedDebugVersionFor(method)) {
250           // It would be great to search the JIT for its implementation here but we cannot due to
251           // the locks we hold. Instead just set to the interpreter bridge and that code will search
252           // the JIT when it gets called and replace the entrypoint then.
253           new_quick_code = GetQuickToInterpreterBridge();
254         } else {
255           new_quick_code = class_linker->GetQuickOatCodeFor(method);
256         }
257       } else {
258         new_quick_code = GetQuickResolutionStub();
259       }
260     }
261   }
262   UpdateEntrypoints(method, new_quick_code);
263 }
264 
265 // Places the instrumentation exit pc as the return PC for every quick frame. This also allows
266 // deoptimization of quick frames to interpreter frames.
267 // Since we may already have done this previously, we need to push new instrumentation frame before
268 // existing instrumentation frames.
InstrumentationInstallStack(Thread * thread,void * arg)269 static void InstrumentationInstallStack(Thread* thread, void* arg)
270     REQUIRES_SHARED(Locks::mutator_lock_) {
271   struct InstallStackVisitor final : public StackVisitor {
272     InstallStackVisitor(Thread* thread_in, Context* context, uintptr_t instrumentation_exit_pc)
273         : StackVisitor(thread_in, context, kInstrumentationStackWalk),
274           instrumentation_stack_(thread_in->GetInstrumentationStack()),
275           instrumentation_exit_pc_(instrumentation_exit_pc),
276           reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
277           last_return_pc_(0) {
278     }
279 
280     bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
281       ArtMethod* m = GetMethod();
282       if (m == nullptr) {
283         if (kVerboseInstrumentation) {
284           LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
285         }
286         last_return_pc_ = 0;
287         return true;  // Ignore upcalls.
288       }
289       if (GetCurrentQuickFrame() == nullptr) {
290         bool interpreter_frame = true;
291         InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
292                                                         interpreter_frame);
293         if (kVerboseInstrumentation) {
294           LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
295         }
296         shadow_stack_.push_back(instrumentation_frame);
297         return true;  // Continue.
298       }
299       uintptr_t return_pc = GetReturnPc();
300       if (kVerboseInstrumentation) {
301         LOG(INFO) << "  Installing exit stub in " << DescribeLocation();
302       }
303       if (return_pc == instrumentation_exit_pc_) {
304         CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
305 
306         if (m->IsRuntimeMethod()) {
307           const InstrumentationStackFrame& frame =
308               (*instrumentation_stack_)[instrumentation_stack_depth_];
309           if (frame.interpreter_entry_) {
310             // This instrumentation frame is for an interpreter bridge and is
311             // pushed when executing the instrumented interpreter bridge. So method
312             // enter event must have been reported. However we need to push a DEX pc
313             // into the dex_pcs_ list to match size of instrumentation stack.
314             uint32_t dex_pc = dex::kDexNoIndex;
315             dex_pcs_.push_back(dex_pc);
316             last_return_pc_ = frame.return_pc_;
317             ++instrumentation_stack_depth_;
318             return true;
319           }
320         }
321 
322         // We've reached a frame which has already been installed with instrumentation exit stub.
323         // We should have already installed instrumentation or be interpreter on previous frames.
324         reached_existing_instrumentation_frames_ = true;
325 
326         const InstrumentationStackFrame& frame =
327             (*instrumentation_stack_)[instrumentation_stack_depth_];
328         CHECK_EQ(m->GetNonObsoleteMethod(), frame.method_->GetNonObsoleteMethod())
329             << "Expected " << ArtMethod::PrettyMethod(m)
330             << ", Found " << ArtMethod::PrettyMethod(frame.method_);
331         return_pc = frame.return_pc_;
332         if (kVerboseInstrumentation) {
333           LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
334         }
335       } else {
336         CHECK_NE(return_pc, 0U);
337         if (UNLIKELY(reached_existing_instrumentation_frames_ && !m->IsRuntimeMethod())) {
338           // We already saw an existing instrumentation frame so this should be a runtime-method
339           // inserted by the interpreter or runtime.
340           std::string thread_name;
341           GetThread()->GetThreadName(thread_name);
342           uint32_t dex_pc = dex::kDexNoIndex;
343           if (last_return_pc_ != 0 &&
344               GetCurrentOatQuickMethodHeader() != nullptr) {
345             dex_pc = GetCurrentOatQuickMethodHeader()->ToDexPc(m, last_return_pc_);
346           }
347           LOG(FATAL) << "While walking " << thread_name << " found unexpected non-runtime method"
348                      << " without instrumentation exit return or interpreter frame."
349                      << " method is " << GetMethod()->PrettyMethod()
350                      << " return_pc is " << std::hex << return_pc
351                      << " dex pc: " << dex_pc;
352           UNREACHABLE();
353         }
354         InstrumentationStackFrame instrumentation_frame(
355             m->IsRuntimeMethod() ? nullptr : GetThisObject(),
356             m,
357             return_pc,
358             GetFrameId(),    // A runtime method still gets a frame id.
359             false);
360         if (kVerboseInstrumentation) {
361           LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
362         }
363 
364         // Insert frame at the right position so we do not corrupt the instrumentation stack.
365         // Instrumentation stack frames are in descending frame id order.
366         auto it = instrumentation_stack_->begin();
367         for (auto end = instrumentation_stack_->end(); it != end; ++it) {
368           const InstrumentationStackFrame& current = *it;
369           if (instrumentation_frame.frame_id_ >= current.frame_id_) {
370             break;
371           }
372         }
373         instrumentation_stack_->insert(it, instrumentation_frame);
374         SetReturnPc(instrumentation_exit_pc_);
375       }
376       uint32_t dex_pc = dex::kDexNoIndex;
377       if (last_return_pc_ != 0 &&
378           GetCurrentOatQuickMethodHeader() != nullptr) {
379         dex_pc = GetCurrentOatQuickMethodHeader()->ToDexPc(m, last_return_pc_);
380       }
381       dex_pcs_.push_back(dex_pc);
382       last_return_pc_ = return_pc;
383       ++instrumentation_stack_depth_;
384       return true;  // Continue.
385     }
386     std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
387     std::vector<InstrumentationStackFrame> shadow_stack_;
388     std::vector<uint32_t> dex_pcs_;
389     const uintptr_t instrumentation_exit_pc_;
390     bool reached_existing_instrumentation_frames_;
391     size_t instrumentation_stack_depth_;
392     uintptr_t last_return_pc_;
393   };
394   if (kVerboseInstrumentation) {
395     std::string thread_name;
396     thread->GetThreadName(thread_name);
397     LOG(INFO) << "Installing exit stubs in " << thread_name;
398   }
399 
400   Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
401   std::unique_ptr<Context> context(Context::Create());
402   uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
403   InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
404   visitor.WalkStack(true);
405   CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
406 
407   if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
408     // Create method enter events for all methods currently on the thread's stack. We only do this
409     // if no debugger is attached to prevent from posting events twice.
410     auto ssi = visitor.shadow_stack_.rbegin();
411     for (auto isi = thread->GetInstrumentationStack()->rbegin(),
412         end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
413       while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
414         instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
415         ++ssi;
416       }
417       uint32_t dex_pc = visitor.dex_pcs_.back();
418       visitor.dex_pcs_.pop_back();
419       if (!isi->interpreter_entry_ && !isi->method_->IsRuntimeMethod()) {
420         instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
421       }
422     }
423   }
424   thread->VerifyStack();
425 }
426 
InstrumentThreadStack(Thread * thread)427 void Instrumentation::InstrumentThreadStack(Thread* thread) {
428   instrumentation_stubs_installed_ = true;
429   InstrumentationInstallStack(thread, this);
430 }
431 
432 // Removes the instrumentation exit pc as the return PC for every quick frame.
InstrumentationRestoreStack(Thread * thread,void * arg)433 static void InstrumentationRestoreStack(Thread* thread, void* arg)
434     REQUIRES(Locks::mutator_lock_) {
435   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
436 
437   struct RestoreStackVisitor final : public StackVisitor {
438     RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
439                         Instrumentation* instrumentation)
440         : StackVisitor(thread_in, nullptr, kInstrumentationStackWalk),
441           thread_(thread_in),
442           instrumentation_exit_pc_(instrumentation_exit_pc),
443           instrumentation_(instrumentation),
444           instrumentation_stack_(thread_in->GetInstrumentationStack()),
445           frames_removed_(0) {}
446 
447     bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
448       if (instrumentation_stack_->size() == 0) {
449         return false;  // Stop.
450       }
451       ArtMethod* m = GetMethod();
452       if (GetCurrentQuickFrame() == nullptr) {
453         if (kVerboseInstrumentation) {
454           LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId()
455               << " Method=" << ArtMethod::PrettyMethod(m);
456         }
457         return true;  // Ignore shadow frames.
458       }
459       if (m == nullptr) {
460         if (kVerboseInstrumentation) {
461           LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
462         }
463         return true;  // Ignore upcalls.
464       }
465       bool removed_stub = false;
466       // TODO: make this search more efficient?
467       const size_t frameId = GetFrameId();
468       for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
469         if (instrumentation_frame.frame_id_ == frameId) {
470           if (kVerboseInstrumentation) {
471             LOG(INFO) << "  Removing exit stub in " << DescribeLocation();
472           }
473           if (instrumentation_frame.interpreter_entry_) {
474             CHECK(m == Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
475           } else {
476             CHECK_EQ(m->GetNonObsoleteMethod(),
477                      instrumentation_frame.method_->GetNonObsoleteMethod())
478                 << ArtMethod::PrettyMethod(m);
479           }
480           SetReturnPc(instrumentation_frame.return_pc_);
481           if (instrumentation_->ShouldNotifyMethodEnterExitEvents() &&
482               !m->IsRuntimeMethod()) {
483             // Create the method exit events. As the methods didn't really exit the result is 0.
484             // We only do this if no debugger is attached to prevent from posting events twice.
485             instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
486                                               GetDexPc(), JValue());
487           }
488           frames_removed_++;
489           removed_stub = true;
490           break;
491         }
492       }
493       if (!removed_stub) {
494         if (kVerboseInstrumentation) {
495           LOG(INFO) << "  No exit stub in " << DescribeLocation();
496         }
497       }
498       return true;  // Continue.
499     }
500     Thread* const thread_;
501     const uintptr_t instrumentation_exit_pc_;
502     Instrumentation* const instrumentation_;
503     std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
504     size_t frames_removed_;
505   };
506   if (kVerboseInstrumentation) {
507     std::string thread_name;
508     thread->GetThreadName(thread_name);
509     LOG(INFO) << "Removing exit stubs in " << thread_name;
510   }
511   std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
512   if (stack->size() > 0) {
513     Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
514     uintptr_t instrumentation_exit_pc =
515         reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
516     RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
517     visitor.WalkStack(true);
518     CHECK_EQ(visitor.frames_removed_, stack->size());
519     while (stack->size() > 0) {
520       stack->pop_front();
521     }
522   }
523 }
524 
HasEvent(Instrumentation::InstrumentationEvent expected,uint32_t events)525 static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
526   return (events & expected) != 0;
527 }
528 
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)529 static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
530                                      uint32_t events,
531                                      std::list<InstrumentationListener*>& list,
532                                      InstrumentationListener* listener,
533                                      bool* has_listener)
534     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
535   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
536   if (!HasEvent(event, events)) {
537     return;
538   }
539   // If there is a free slot in the list, we insert the listener in that slot.
540   // Otherwise we add it to the end of the list.
541   auto it = std::find(list.begin(), list.end(), nullptr);
542   if (it != list.end()) {
543     *it = listener;
544   } else {
545     list.push_back(listener);
546   }
547   Runtime::DoAndMaybeSwitchInterpreter([=](){ *has_listener = true; });
548 }
549 
AddListener(InstrumentationListener * listener,uint32_t events)550 void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
551   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
552   PotentiallyAddListenerTo(kMethodEntered,
553                            events,
554                            method_entry_listeners_,
555                            listener,
556                            &have_method_entry_listeners_);
557   PotentiallyAddListenerTo(kMethodExited,
558                            events,
559                            method_exit_listeners_,
560                            listener,
561                            &have_method_exit_listeners_);
562   PotentiallyAddListenerTo(kMethodUnwind,
563                            events,
564                            method_unwind_listeners_,
565                            listener,
566                            &have_method_unwind_listeners_);
567   PotentiallyAddListenerTo(kBranch,
568                            events,
569                            branch_listeners_,
570                            listener,
571                            &have_branch_listeners_);
572   PotentiallyAddListenerTo(kDexPcMoved,
573                            events,
574                            dex_pc_listeners_,
575                            listener,
576                            &have_dex_pc_listeners_);
577   PotentiallyAddListenerTo(kFieldRead,
578                            events,
579                            field_read_listeners_,
580                            listener,
581                            &have_field_read_listeners_);
582   PotentiallyAddListenerTo(kFieldWritten,
583                            events,
584                            field_write_listeners_,
585                            listener,
586                            &have_field_write_listeners_);
587   PotentiallyAddListenerTo(kExceptionThrown,
588                            events,
589                            exception_thrown_listeners_,
590                            listener,
591                            &have_exception_thrown_listeners_);
592   PotentiallyAddListenerTo(kWatchedFramePop,
593                            events,
594                            watched_frame_pop_listeners_,
595                            listener,
596                            &have_watched_frame_pop_listeners_);
597   PotentiallyAddListenerTo(kExceptionHandled,
598                            events,
599                            exception_handled_listeners_,
600                            listener,
601                            &have_exception_handled_listeners_);
602   UpdateInterpreterHandlerTable();
603 }
604 
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)605 static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
606                                           uint32_t events,
607                                           std::list<InstrumentationListener*>& list,
608                                           InstrumentationListener* listener,
609                                           bool* has_listener)
610     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
611   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
612   if (!HasEvent(event, events)) {
613     return;
614   }
615   auto it = std::find(list.begin(), list.end(), listener);
616   if (it != list.end()) {
617     // Just update the entry, do not remove from the list. Removing entries in the list
618     // is unsafe when mutators are iterating over it.
619     *it = nullptr;
620   }
621 
622   // Check if the list contains any non-null listener, and update 'has_listener'.
623   for (InstrumentationListener* l : list) {
624     if (l != nullptr) {
625       Runtime::DoAndMaybeSwitchInterpreter([=](){ *has_listener = true; });
626       return;
627     }
628   }
629   Runtime::DoAndMaybeSwitchInterpreter([=](){ *has_listener = false; });
630 }
631 
RemoveListener(InstrumentationListener * listener,uint32_t events)632 void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
633   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
634   PotentiallyRemoveListenerFrom(kMethodEntered,
635                                 events,
636                                 method_entry_listeners_,
637                                 listener,
638                                 &have_method_entry_listeners_);
639   PotentiallyRemoveListenerFrom(kMethodExited,
640                                 events,
641                                 method_exit_listeners_,
642                                 listener,
643                                 &have_method_exit_listeners_);
644   PotentiallyRemoveListenerFrom(kMethodUnwind,
645                                 events,
646                                 method_unwind_listeners_,
647                                 listener,
648                                 &have_method_unwind_listeners_);
649   PotentiallyRemoveListenerFrom(kBranch,
650                                 events,
651                                 branch_listeners_,
652                                 listener,
653                                 &have_branch_listeners_);
654   PotentiallyRemoveListenerFrom(kDexPcMoved,
655                                 events,
656                                 dex_pc_listeners_,
657                                 listener,
658                                 &have_dex_pc_listeners_);
659   PotentiallyRemoveListenerFrom(kFieldRead,
660                                 events,
661                                 field_read_listeners_,
662                                 listener,
663                                 &have_field_read_listeners_);
664   PotentiallyRemoveListenerFrom(kFieldWritten,
665                                 events,
666                                 field_write_listeners_,
667                                 listener,
668                                 &have_field_write_listeners_);
669   PotentiallyRemoveListenerFrom(kExceptionThrown,
670                                 events,
671                                 exception_thrown_listeners_,
672                                 listener,
673                                 &have_exception_thrown_listeners_);
674   PotentiallyRemoveListenerFrom(kWatchedFramePop,
675                                 events,
676                                 watched_frame_pop_listeners_,
677                                 listener,
678                                 &have_watched_frame_pop_listeners_);
679   PotentiallyRemoveListenerFrom(kExceptionHandled,
680                                 events,
681                                 exception_handled_listeners_,
682                                 listener,
683                                 &have_exception_handled_listeners_);
684   UpdateInterpreterHandlerTable();
685 }
686 
GetCurrentInstrumentationLevel() const687 Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
688   if (interpreter_stubs_installed_) {
689     return InstrumentationLevel::kInstrumentWithInterpreter;
690   } else if (entry_exit_stubs_installed_) {
691     return InstrumentationLevel::kInstrumentWithInstrumentationStubs;
692   } else {
693     return InstrumentationLevel::kInstrumentNothing;
694   }
695 }
696 
RequiresInstrumentationInstallation(InstrumentationLevel new_level) const697 bool Instrumentation::RequiresInstrumentationInstallation(InstrumentationLevel new_level) const {
698   // We need to reinstall instrumentation if we go to a different level.
699   return GetCurrentInstrumentationLevel() != new_level;
700 }
701 
UpdateInstrumentationLevels(InstrumentationLevel level)702 void Instrumentation::UpdateInstrumentationLevels(InstrumentationLevel level) {
703   if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
704     can_use_instrumentation_trampolines_ = false;
705   }
706   if (UNLIKELY(!can_use_instrumentation_trampolines_)) {
707     for (auto& p : requested_instrumentation_levels_) {
708       if (p.second == InstrumentationLevel::kInstrumentWithInstrumentationStubs) {
709         p.second = InstrumentationLevel::kInstrumentWithInterpreter;
710       }
711     }
712   }
713 }
714 
ConfigureStubs(const char * key,InstrumentationLevel desired_level)715 void Instrumentation::ConfigureStubs(const char* key, InstrumentationLevel desired_level) {
716   // Store the instrumentation level for this key or remove it.
717   if (desired_level == InstrumentationLevel::kInstrumentNothing) {
718     // The client no longer needs instrumentation.
719     requested_instrumentation_levels_.erase(key);
720   } else {
721     // The client needs instrumentation.
722     requested_instrumentation_levels_.Overwrite(key, desired_level);
723   }
724 
725   UpdateInstrumentationLevels(desired_level);
726   UpdateStubs();
727 }
728 
EnableSingleThreadDeopt()729 void Instrumentation::EnableSingleThreadDeopt() {
730   // Single-thread deopt only uses interpreter.
731   can_use_instrumentation_trampolines_ = false;
732   UpdateInstrumentationLevels(InstrumentationLevel::kInstrumentWithInterpreter);
733   UpdateStubs();
734 }
735 
UpdateStubs()736 void Instrumentation::UpdateStubs() {
737   // Look for the highest required instrumentation level.
738   InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
739   for (const auto& v : requested_instrumentation_levels_) {
740     requested_level = std::max(requested_level, v.second);
741   }
742 
743   DCHECK(can_use_instrumentation_trampolines_ ||
744          requested_level != InstrumentationLevel::kInstrumentWithInstrumentationStubs)
745       << "Use trampolines: " << can_use_instrumentation_trampolines_ << " level "
746       << requested_level;
747 
748   interpret_only_ = (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) ||
749                     forced_interpret_only_;
750 
751   if (!RequiresInstrumentationInstallation(requested_level)) {
752     // We're already set.
753     return;
754   }
755   Thread* const self = Thread::Current();
756   Runtime* runtime = Runtime::Current();
757   Locks::mutator_lock_->AssertExclusiveHeld(self);
758   Locks::thread_list_lock_->AssertNotHeld(self);
759   if (requested_level > InstrumentationLevel::kInstrumentNothing) {
760     if (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) {
761       interpreter_stubs_installed_ = true;
762       entry_exit_stubs_installed_ = true;
763     } else {
764       CHECK_EQ(requested_level, InstrumentationLevel::kInstrumentWithInstrumentationStubs);
765       entry_exit_stubs_installed_ = true;
766       interpreter_stubs_installed_ = false;
767     }
768     InstallStubsClassVisitor visitor(this);
769     runtime->GetClassLinker()->VisitClasses(&visitor);
770     instrumentation_stubs_installed_ = true;
771     MutexLock mu(self, *Locks::thread_list_lock_);
772     runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
773   } else {
774     interpreter_stubs_installed_ = false;
775     entry_exit_stubs_installed_ = false;
776     InstallStubsClassVisitor visitor(this);
777     runtime->GetClassLinker()->VisitClasses(&visitor);
778     // Restore stack only if there is no method currently deoptimized.
779     bool empty;
780     {
781       ReaderMutexLock mu(self, *GetDeoptimizedMethodsLock());
782       empty = IsDeoptimizedMethodsEmpty();  // Avoid lock violation.
783     }
784     if (empty) {
785       MutexLock mu(self, *Locks::thread_list_lock_);
786       Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
787       // Only do this after restoring, as walking the stack when restoring will see
788       // the instrumentation exit pc.
789       instrumentation_stubs_installed_ = false;
790     }
791   }
792 }
793 
ResetQuickAllocEntryPointsForThread(Thread * thread,void * arg ATTRIBUTE_UNUSED)794 static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg ATTRIBUTE_UNUSED) {
795   thread->ResetQuickAllocEntryPointsForThread(kUseReadBarrier && thread->GetIsGcMarking());
796 }
797 
SetEntrypointsInstrumented(bool instrumented)798 void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
799   Thread* self = Thread::Current();
800   Runtime* runtime = Runtime::Current();
801   Locks::mutator_lock_->AssertNotHeld(self);
802   Locks::instrument_entrypoints_lock_->AssertHeld(self);
803   if (runtime->IsStarted()) {
804     ScopedSuspendAll ssa(__FUNCTION__);
805     MutexLock mu(self, *Locks::runtime_shutdown_lock_);
806     SetQuickAllocEntryPointsInstrumented(instrumented);
807     ResetQuickAllocEntryPoints();
808     alloc_entrypoints_instrumented_ = instrumented;
809   } else {
810     MutexLock mu(self, *Locks::runtime_shutdown_lock_);
811     SetQuickAllocEntryPointsInstrumented(instrumented);
812 
813     // Note: ResetQuickAllocEntryPoints only works when the runtime is started. Manually run the
814     //       update for just this thread.
815     // Note: self may be null. One of those paths is setting instrumentation in the Heap
816     //       constructor for gcstress mode.
817     if (self != nullptr) {
818       ResetQuickAllocEntryPointsForThread(self, nullptr);
819     }
820 
821     alloc_entrypoints_instrumented_ = instrumented;
822   }
823 }
824 
InstrumentQuickAllocEntryPoints()825 void Instrumentation::InstrumentQuickAllocEntryPoints() {
826   MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
827   InstrumentQuickAllocEntryPointsLocked();
828 }
829 
UninstrumentQuickAllocEntryPoints()830 void Instrumentation::UninstrumentQuickAllocEntryPoints() {
831   MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
832   UninstrumentQuickAllocEntryPointsLocked();
833 }
834 
InstrumentQuickAllocEntryPointsLocked()835 void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
836   Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
837   if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
838     SetEntrypointsInstrumented(true);
839   }
840   ++quick_alloc_entry_points_instrumentation_counter_;
841 }
842 
UninstrumentQuickAllocEntryPointsLocked()843 void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
844   Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
845   CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
846   --quick_alloc_entry_points_instrumentation_counter_;
847   if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
848     SetEntrypointsInstrumented(false);
849   }
850 }
851 
ResetQuickAllocEntryPoints()852 void Instrumentation::ResetQuickAllocEntryPoints() {
853   Runtime* runtime = Runtime::Current();
854   if (runtime->IsStarted()) {
855     MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
856     runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
857   }
858 }
859 
UpdateMethodsCodeImpl(ArtMethod * method,const void * quick_code)860 void Instrumentation::UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code) {
861   const void* new_quick_code;
862   if (LIKELY(!instrumentation_stubs_installed_)) {
863     new_quick_code = quick_code;
864   } else {
865     if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
866       new_quick_code = GetQuickToInterpreterBridge();
867     } else {
868       ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
869       if (class_linker->IsQuickResolutionStub(quick_code) ||
870           class_linker->IsQuickToInterpreterBridge(quick_code)) {
871         new_quick_code = quick_code;
872       } else if (entry_exit_stubs_installed_ &&
873                  // We need to make sure not to replace anything that InstallStubsForMethod
874                  // wouldn't. Specifically we cannot stub out Proxy.<init> since subtypes copy the
875                  // implementation directly and this will confuse the instrumentation trampolines.
876                  // TODO We should remove the need for this since it makes it impossible to profile
877                  // Proxy.<init> correctly in all cases.
878                  method != jni::DecodeArtMethod(WellKnownClasses::java_lang_reflect_Proxy_init)) {
879         new_quick_code = GetQuickInstrumentationEntryPoint();
880         if (!method->IsNative() && Runtime::Current()->GetJit() != nullptr) {
881           // Native methods use trampoline entrypoints during interpreter tracing.
882           DCHECK(!Runtime::Current()->GetJit()->GetCodeCache()->GetGarbageCollectCodeUnsafe());
883           ProfilingInfo* profiling_info = method->GetProfilingInfo(kRuntimePointerSize);
884           // Tracing will look at the saved entry point in the profiling info to know the actual
885           // entrypoint, so we store it here.
886           if (profiling_info != nullptr) {
887             profiling_info->SetSavedEntryPoint(quick_code);
888           }
889         }
890       } else {
891         new_quick_code = quick_code;
892       }
893     }
894   }
895   UpdateEntrypoints(method, new_quick_code);
896 }
897 
UpdateNativeMethodsCodeToJitCode(ArtMethod * method,const void * quick_code)898 void Instrumentation::UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* quick_code) {
899   // We don't do any read barrier on `method`'s declaring class in this code, as the JIT might
900   // enter here on a soon-to-be deleted ArtMethod. Updating the entrypoint is OK though, as
901   // the ArtMethod is still in memory.
902   const void* new_quick_code = quick_code;
903   if (UNLIKELY(instrumentation_stubs_installed_) && entry_exit_stubs_installed_) {
904     new_quick_code = GetQuickInstrumentationEntryPoint();
905   }
906   UpdateEntrypoints(method, new_quick_code);
907 }
908 
UpdateMethodsCode(ArtMethod * method,const void * quick_code)909 void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* quick_code) {
910   DCHECK(method->GetDeclaringClass()->IsResolved());
911   UpdateMethodsCodeImpl(method, quick_code);
912 }
913 
UpdateMethodsCodeToInterpreterEntryPoint(ArtMethod * method)914 void Instrumentation::UpdateMethodsCodeToInterpreterEntryPoint(ArtMethod* method) {
915   UpdateMethodsCodeImpl(method, GetQuickToInterpreterBridge());
916 }
917 
UpdateMethodsCodeForJavaDebuggable(ArtMethod * method,const void * quick_code)918 void Instrumentation::UpdateMethodsCodeForJavaDebuggable(ArtMethod* method,
919                                                          const void* quick_code) {
920   // When the runtime is set to Java debuggable, we may update the entry points of
921   // all methods of a class to the interpreter bridge. A method's declaring class
922   // might not be in resolved state yet in that case, so we bypass the DCHECK in
923   // UpdateMethodsCode.
924   UpdateMethodsCodeImpl(method, quick_code);
925 }
926 
AddDeoptimizedMethod(ArtMethod * method)927 bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
928   if (IsDeoptimizedMethod(method)) {
929     // Already in the map. Return.
930     return false;
931   }
932   // Not found. Add it.
933   deoptimized_methods_.insert(method);
934   return true;
935 }
936 
IsDeoptimizedMethod(ArtMethod * method)937 bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
938   return deoptimized_methods_.find(method) != deoptimized_methods_.end();
939 }
940 
BeginDeoptimizedMethod()941 ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
942   if (deoptimized_methods_.empty()) {
943     // Empty.
944     return nullptr;
945   }
946   return *deoptimized_methods_.begin();
947 }
948 
RemoveDeoptimizedMethod(ArtMethod * method)949 bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
950   auto it = deoptimized_methods_.find(method);
951   if (it == deoptimized_methods_.end()) {
952     return false;
953   }
954   deoptimized_methods_.erase(it);
955   return true;
956 }
957 
IsDeoptimizedMethodsEmpty() const958 bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
959   return deoptimized_methods_.empty();
960 }
961 
Deoptimize(ArtMethod * method)962 void Instrumentation::Deoptimize(ArtMethod* method) {
963   CHECK(!method->IsNative());
964   CHECK(!method->IsProxyMethod());
965   CHECK(method->IsInvokable());
966 
967   Thread* self = Thread::Current();
968   {
969     WriterMutexLock mu(self, *GetDeoptimizedMethodsLock());
970     bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
971     CHECK(has_not_been_deoptimized) << "Method " << ArtMethod::PrettyMethod(method)
972         << " is already deoptimized";
973   }
974   if (!interpreter_stubs_installed_) {
975     UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint());
976 
977     // Install instrumentation exit stub and instrumentation frames. We may already have installed
978     // these previously so it will only cover the newly created frames.
979     instrumentation_stubs_installed_ = true;
980     MutexLock mu(self, *Locks::thread_list_lock_);
981     Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
982   }
983 }
984 
Undeoptimize(ArtMethod * method)985 void Instrumentation::Undeoptimize(ArtMethod* method) {
986   CHECK(!method->IsNative());
987   CHECK(!method->IsProxyMethod());
988   CHECK(method->IsInvokable());
989 
990   Thread* self = Thread::Current();
991   bool empty;
992   {
993     WriterMutexLock mu(self, *GetDeoptimizedMethodsLock());
994     bool found_and_erased = RemoveDeoptimizedMethod(method);
995     CHECK(found_and_erased) << "Method " << ArtMethod::PrettyMethod(method)
996         << " is not deoptimized";
997     empty = IsDeoptimizedMethodsEmpty();
998   }
999 
1000   // Restore code and possibly stack only if we did not deoptimize everything.
1001   if (!interpreter_stubs_installed_) {
1002     // Restore its code or resolution trampoline.
1003     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1004     if (method->IsStatic() && !method->IsConstructor() &&
1005         !method->GetDeclaringClass()->IsInitialized()) {
1006       UpdateEntrypoints(method, GetQuickResolutionStub());
1007     } else {
1008       const void* quick_code = NeedDebugVersionFor(method)
1009           ? GetQuickToInterpreterBridge()
1010           : class_linker->GetQuickOatCodeFor(method);
1011       UpdateEntrypoints(method, quick_code);
1012     }
1013 
1014     // If there is no deoptimized method left, we can restore the stack of each thread.
1015     if (empty && !entry_exit_stubs_installed_) {
1016       MutexLock mu(self, *Locks::thread_list_lock_);
1017       Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
1018       instrumentation_stubs_installed_ = false;
1019     }
1020   }
1021 }
1022 
IsDeoptimized(ArtMethod * method)1023 bool Instrumentation::IsDeoptimized(ArtMethod* method) {
1024   DCHECK(method != nullptr);
1025   ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
1026   return IsDeoptimizedMethod(method);
1027 }
1028 
EnableDeoptimization()1029 void Instrumentation::EnableDeoptimization() {
1030   ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
1031   CHECK(IsDeoptimizedMethodsEmpty());
1032   CHECK_EQ(deoptimization_enabled_, false);
1033   deoptimization_enabled_ = true;
1034 }
1035 
DisableDeoptimization(const char * key)1036 void Instrumentation::DisableDeoptimization(const char* key) {
1037   CHECK_EQ(deoptimization_enabled_, true);
1038   // If we deoptimized everything, undo it.
1039   InstrumentationLevel level = GetCurrentInstrumentationLevel();
1040   if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
1041     UndeoptimizeEverything(key);
1042   }
1043   // Undeoptimized selected methods.
1044   while (true) {
1045     ArtMethod* method;
1046     {
1047       ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
1048       if (IsDeoptimizedMethodsEmpty()) {
1049         break;
1050       }
1051       method = BeginDeoptimizedMethod();
1052       CHECK(method != nullptr);
1053     }
1054     Undeoptimize(method);
1055   }
1056   deoptimization_enabled_ = false;
1057 }
1058 
1059 // Indicates if instrumentation should notify method enter/exit events to the listeners.
ShouldNotifyMethodEnterExitEvents() const1060 bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
1061   if (!HasMethodEntryListeners() && !HasMethodExitListeners()) {
1062     return false;
1063   }
1064   return !deoptimization_enabled_ && !interpreter_stubs_installed_;
1065 }
1066 
DeoptimizeEverything(const char * key)1067 void Instrumentation::DeoptimizeEverything(const char* key) {
1068   CHECK(deoptimization_enabled_);
1069   ConfigureStubs(key, InstrumentationLevel::kInstrumentWithInterpreter);
1070 }
1071 
UndeoptimizeEverything(const char * key)1072 void Instrumentation::UndeoptimizeEverything(const char* key) {
1073   CHECK(interpreter_stubs_installed_);
1074   CHECK(deoptimization_enabled_);
1075   ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
1076 }
1077 
EnableMethodTracing(const char * key,bool needs_interpreter)1078 void Instrumentation::EnableMethodTracing(const char* key, bool needs_interpreter) {
1079   InstrumentationLevel level;
1080   if (needs_interpreter) {
1081     level = InstrumentationLevel::kInstrumentWithInterpreter;
1082   } else {
1083     level = InstrumentationLevel::kInstrumentWithInstrumentationStubs;
1084   }
1085   ConfigureStubs(key, level);
1086 }
1087 
DisableMethodTracing(const char * key)1088 void Instrumentation::DisableMethodTracing(const char* key) {
1089   ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
1090 }
1091 
GetCodeForInvoke(ArtMethod * method) const1092 const void* Instrumentation::GetCodeForInvoke(ArtMethod* method) const {
1093   // This is called by instrumentation entry only and that should never be getting proxy methods.
1094   DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1095   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1096   if (LIKELY(!instrumentation_stubs_installed_ && !interpreter_stubs_installed_)) {
1097     // In general we just return whatever the method thinks its entrypoint is here. The only
1098     // exception is if it still has the instrumentation entrypoint. That means we are racing another
1099     // thread getting rid of instrumentation which is unexpected but possible. In that case we want
1100     // to wait and try to get it from the oat file or jit.
1101     const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
1102     DCHECK(code != nullptr);
1103     if (code != GetQuickInstrumentationEntryPoint()) {
1104       return code;
1105     } else if (method->IsNative()) {
1106       return class_linker->GetQuickOatCodeFor(method);
1107     }
1108     // We don't know what it is. Fallthough to try to find the code from the JIT or Oat file.
1109   } else if (method->IsNative()) {
1110     // TODO We could have JIT compiled native entrypoints. It might be worth it to find these.
1111     return class_linker->GetQuickOatCodeFor(method);
1112   } else if (UNLIKELY(interpreter_stubs_installed_)) {
1113     return GetQuickToInterpreterBridge();
1114   }
1115   // Since the method cannot be native due to ifs above we can always fall back to interpreter
1116   // bridge.
1117   const void* result = GetQuickToInterpreterBridge();
1118   if (!NeedDebugVersionFor(method)) {
1119     // If we don't need a debug version we should see what the oat file/class linker has to say.
1120     result = class_linker->GetQuickOatCodeFor(method);
1121   }
1122   // If both those fail try the jit.
1123   if (result == GetQuickToInterpreterBridge()) {
1124     jit::Jit* jit = Runtime::Current()->GetJit();
1125     if (jit != nullptr) {
1126       const void* res = jit->GetCodeCache()->FindCompiledCodeForInstrumentation(method);
1127       if (res != nullptr) {
1128         result = res;
1129       }
1130     }
1131   }
1132   return result;
1133 }
1134 
GetQuickCodeFor(ArtMethod * method,PointerSize pointer_size) const1135 const void* Instrumentation::GetQuickCodeFor(ArtMethod* method, PointerSize pointer_size) const {
1136   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1137   if (LIKELY(!instrumentation_stubs_installed_)) {
1138     const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
1139     DCHECK(code != nullptr);
1140     if (LIKELY(!class_linker->IsQuickResolutionStub(code) &&
1141                !class_linker->IsQuickToInterpreterBridge(code)) &&
1142                !class_linker->IsQuickResolutionStub(code) &&
1143                !class_linker->IsQuickToInterpreterBridge(code)) {
1144       return code;
1145     }
1146   }
1147   return class_linker->GetQuickOatCodeFor(method);
1148 }
1149 
MethodEnterEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1150 void Instrumentation::MethodEnterEventImpl(Thread* thread,
1151                                            ObjPtr<mirror::Object> this_object,
1152                                            ArtMethod* method,
1153                                            uint32_t dex_pc) const {
1154   DCHECK(!method->IsRuntimeMethod());
1155   if (HasMethodEntryListeners()) {
1156     Thread* self = Thread::Current();
1157     StackHandleScope<1> hs(self);
1158     Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1159     for (InstrumentationListener* listener : method_entry_listeners_) {
1160       if (listener != nullptr) {
1161         listener->MethodEntered(thread, thiz, method, dex_pc);
1162       }
1163     }
1164   }
1165 }
1166 
MethodExitEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,const JValue & return_value) const1167 void Instrumentation::MethodExitEventImpl(Thread* thread,
1168                                           ObjPtr<mirror::Object> this_object,
1169                                           ArtMethod* method,
1170                                           uint32_t dex_pc,
1171                                           const JValue& return_value) const {
1172   if (HasMethodExitListeners()) {
1173     Thread* self = Thread::Current();
1174     StackHandleScope<2> hs(self);
1175     Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1176     if (method->GetInterfaceMethodIfProxy(kRuntimePointerSize)
1177               ->GetReturnTypePrimitive() != Primitive::kPrimNot) {
1178       for (InstrumentationListener* listener : method_exit_listeners_) {
1179         if (listener != nullptr) {
1180           listener->MethodExited(thread, thiz, method, dex_pc, return_value);
1181         }
1182       }
1183     } else {
1184       Handle<mirror::Object> ret(hs.NewHandle(return_value.GetL()));
1185       for (InstrumentationListener* listener : method_exit_listeners_) {
1186         if (listener != nullptr) {
1187           listener->MethodExited(thread, thiz, method, dex_pc, ret);
1188         }
1189       }
1190     }
1191   }
1192 }
1193 
MethodUnwindEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc) const1194 void Instrumentation::MethodUnwindEvent(Thread* thread,
1195                                         mirror::Object* this_object,
1196                                         ArtMethod* method,
1197                                         uint32_t dex_pc) const {
1198   if (HasMethodUnwindListeners()) {
1199     Thread* self = Thread::Current();
1200     StackHandleScope<1> hs(self);
1201     Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1202     for (InstrumentationListener* listener : method_unwind_listeners_) {
1203       if (listener != nullptr) {
1204         listener->MethodUnwind(thread, thiz, method, dex_pc);
1205       }
1206     }
1207   }
1208 }
1209 
DexPcMovedEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1210 void Instrumentation::DexPcMovedEventImpl(Thread* thread,
1211                                           ObjPtr<mirror::Object> this_object,
1212                                           ArtMethod* method,
1213                                           uint32_t dex_pc) const {
1214   Thread* self = Thread::Current();
1215   StackHandleScope<1> hs(self);
1216   Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1217   for (InstrumentationListener* listener : dex_pc_listeners_) {
1218     if (listener != nullptr) {
1219       listener->DexPcMoved(thread, thiz, method, dex_pc);
1220     }
1221   }
1222 }
1223 
BranchImpl(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset) const1224 void Instrumentation::BranchImpl(Thread* thread,
1225                                  ArtMethod* method,
1226                                  uint32_t dex_pc,
1227                                  int32_t offset) const {
1228   for (InstrumentationListener* listener : branch_listeners_) {
1229     if (listener != nullptr) {
1230       listener->Branch(thread, method, dex_pc, offset);
1231     }
1232   }
1233 }
1234 
WatchedFramePopImpl(Thread * thread,const ShadowFrame & frame) const1235 void Instrumentation::WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const {
1236   for (InstrumentationListener* listener : watched_frame_pop_listeners_) {
1237     if (listener != nullptr) {
1238       listener->WatchedFramePop(thread, frame);
1239     }
1240   }
1241 }
1242 
FieldReadEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field) const1243 void Instrumentation::FieldReadEventImpl(Thread* thread,
1244                                          ObjPtr<mirror::Object> this_object,
1245                                          ArtMethod* method,
1246                                          uint32_t dex_pc,
1247                                          ArtField* field) const {
1248   Thread* self = Thread::Current();
1249   StackHandleScope<1> hs(self);
1250   Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1251   for (InstrumentationListener* listener : field_read_listeners_) {
1252     if (listener != nullptr) {
1253       listener->FieldRead(thread, thiz, method, dex_pc, field);
1254     }
1255   }
1256 }
1257 
FieldWriteEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value) const1258 void Instrumentation::FieldWriteEventImpl(Thread* thread,
1259                                           ObjPtr<mirror::Object> this_object,
1260                                           ArtMethod* method,
1261                                           uint32_t dex_pc,
1262                                           ArtField* field,
1263                                           const JValue& field_value) const {
1264   Thread* self = Thread::Current();
1265   StackHandleScope<2> hs(self);
1266   Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1267   if (field->IsPrimitiveType()) {
1268     for (InstrumentationListener* listener : field_write_listeners_) {
1269       if (listener != nullptr) {
1270         listener->FieldWritten(thread, thiz, method, dex_pc, field, field_value);
1271       }
1272     }
1273   } else {
1274     Handle<mirror::Object> val(hs.NewHandle(field_value.GetL()));
1275     for (InstrumentationListener* listener : field_write_listeners_) {
1276       if (listener != nullptr) {
1277         listener->FieldWritten(thread, thiz, method, dex_pc, field, val);
1278       }
1279     }
1280   }
1281 }
1282 
ExceptionThrownEvent(Thread * thread,mirror::Throwable * exception_object) const1283 void Instrumentation::ExceptionThrownEvent(Thread* thread,
1284                                            mirror::Throwable* exception_object) const {
1285   Thread* self = Thread::Current();
1286   StackHandleScope<1> hs(self);
1287   Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1288   if (HasExceptionThrownListeners()) {
1289     DCHECK_EQ(thread->GetException(), h_exception.Get());
1290     thread->ClearException();
1291     for (InstrumentationListener* listener : exception_thrown_listeners_) {
1292       if (listener != nullptr) {
1293         listener->ExceptionThrown(thread, h_exception);
1294       }
1295     }
1296     // See b/65049545 for discussion about this behavior.
1297     thread->AssertNoPendingException();
1298     thread->SetException(h_exception.Get());
1299   }
1300 }
1301 
ExceptionHandledEvent(Thread * thread,mirror::Throwable * exception_object) const1302 void Instrumentation::ExceptionHandledEvent(Thread* thread,
1303                                             mirror::Throwable* exception_object) const {
1304   Thread* self = Thread::Current();
1305   StackHandleScope<1> hs(self);
1306   Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1307   if (HasExceptionHandledListeners()) {
1308     // We should have cleared the exception so that callers can detect a new one.
1309     DCHECK(thread->GetException() == nullptr);
1310     for (InstrumentationListener* listener : exception_handled_listeners_) {
1311       if (listener != nullptr) {
1312         listener->ExceptionHandled(thread, h_exception);
1313       }
1314     }
1315   }
1316 }
1317 
1318 // Computes a frame ID by ignoring inlined frames.
ComputeFrameId(Thread * self,size_t frame_depth,size_t inlined_frames_before_frame)1319 size_t Instrumentation::ComputeFrameId(Thread* self,
1320                                        size_t frame_depth,
1321                                        size_t inlined_frames_before_frame) {
1322   CHECK_GE(frame_depth, inlined_frames_before_frame);
1323   size_t no_inline_depth = frame_depth - inlined_frames_before_frame;
1324   return StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk) - no_inline_depth;
1325 }
1326 
CheckStackDepth(Thread * self,const InstrumentationStackFrame & instrumentation_frame,int delta)1327 static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
1328                             int delta)
1329     REQUIRES_SHARED(Locks::mutator_lock_) {
1330   size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk) + delta;
1331   if (frame_id != instrumentation_frame.frame_id_) {
1332     LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
1333         << instrumentation_frame.frame_id_;
1334     StackVisitor::DescribeStack(self);
1335     CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
1336   }
1337 }
1338 
PushInstrumentationStackFrame(Thread * self,mirror::Object * this_object,ArtMethod * method,uintptr_t lr,bool interpreter_entry)1339 void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
1340                                                     ArtMethod* method,
1341                                                     uintptr_t lr, bool interpreter_entry) {
1342   DCHECK(!self->IsExceptionPending());
1343   std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1344   if (kVerboseInstrumentation) {
1345     LOG(INFO) << "Entering " << ArtMethod::PrettyMethod(method) << " from PC "
1346               << reinterpret_cast<void*>(lr);
1347   }
1348 
1349   // We send the enter event before pushing the instrumentation frame to make cleanup easier. If the
1350   // event causes an exception we can simply send the unwind event and return.
1351   StackHandleScope<1> hs(self);
1352   Handle<mirror::Object> h_this(hs.NewHandle(this_object));
1353   if (!interpreter_entry) {
1354     MethodEnterEvent(self, h_this.Get(), method, 0);
1355     if (self->IsExceptionPending()) {
1356       MethodUnwindEvent(self, h_this.Get(), method, 0);
1357       return;
1358     }
1359   }
1360 
1361   // We have a callee-save frame meaning this value is guaranteed to never be 0.
1362   DCHECK(!self->IsExceptionPending());
1363   size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk);
1364 
1365   instrumentation::InstrumentationStackFrame instrumentation_frame(h_this.Get(), method, lr,
1366                                                                    frame_id, interpreter_entry);
1367   stack->push_front(instrumentation_frame);
1368 }
1369 
GetDeoptimizationMethodType(ArtMethod * method)1370 DeoptimizationMethodType Instrumentation::GetDeoptimizationMethodType(ArtMethod* method) {
1371   if (method->IsRuntimeMethod()) {
1372     // Certain methods have strict requirement on whether the dex instruction
1373     // should be re-executed upon deoptimization.
1374     if (method == Runtime::Current()->GetCalleeSaveMethod(
1375         CalleeSaveType::kSaveEverythingForClinit)) {
1376       return DeoptimizationMethodType::kKeepDexPc;
1377     }
1378     if (method == Runtime::Current()->GetCalleeSaveMethod(
1379         CalleeSaveType::kSaveEverythingForSuspendCheck)) {
1380       return DeoptimizationMethodType::kKeepDexPc;
1381     }
1382   }
1383   return DeoptimizationMethodType::kDefault;
1384 }
1385 
1386 // Try to get the shorty of a runtime method if it's an invocation stub.
GetRuntimeMethodShorty(Thread * thread)1387 static char GetRuntimeMethodShorty(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_) {
1388   char shorty = 'V';
1389   StackVisitor::WalkStack(
1390       [&shorty](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1391         ArtMethod* m = stack_visitor->GetMethod();
1392         if (m == nullptr || m->IsRuntimeMethod()) {
1393           return true;
1394         }
1395         // The first Java method.
1396         if (m->IsNative()) {
1397           // Use JNI method's shorty for the jni stub.
1398           shorty = m->GetShorty()[0];
1399         } else if (m->IsProxyMethod()) {
1400           // Proxy method just invokes its proxied method via
1401           // art_quick_proxy_invoke_handler.
1402           shorty = m->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty()[0];
1403         } else {
1404           const Instruction& instr = m->DexInstructions().InstructionAt(stack_visitor->GetDexPc());
1405           if (instr.IsInvoke()) {
1406             auto get_method_index_fn = [](ArtMethod* caller,
1407                                           const Instruction& inst,
1408                                           uint32_t dex_pc)
1409                 REQUIRES_SHARED(Locks::mutator_lock_) {
1410               switch (inst.Opcode()) {
1411                 case Instruction::INVOKE_VIRTUAL_RANGE_QUICK:
1412                 case Instruction::INVOKE_VIRTUAL_QUICK: {
1413                   uint16_t method_idx = caller->GetIndexFromQuickening(dex_pc);
1414                   CHECK_NE(method_idx, DexFile::kDexNoIndex16);
1415                   return method_idx;
1416                 }
1417                 default: {
1418                   return static_cast<uint16_t>(inst.VRegB());
1419                 }
1420               }
1421             };
1422 
1423             uint16_t method_index = get_method_index_fn(m, instr, stack_visitor->GetDexPc());
1424             const DexFile* dex_file = m->GetDexFile();
1425             if (interpreter::IsStringInit(dex_file, method_index)) {
1426               // Invoking string init constructor is turned into invoking
1427               // StringFactory.newStringFromChars() which returns a string.
1428               shorty = 'L';
1429             } else {
1430               shorty = dex_file->GetMethodShorty(method_index)[0];
1431             }
1432 
1433           } else {
1434             // It could be that a non-invoke opcode invokes a stub, which in turn
1435             // invokes Java code. In such cases, we should never expect a return
1436             // value from the stub.
1437           }
1438         }
1439         // Stop stack walking since we've seen a Java frame.
1440         return false;
1441       },
1442       thread,
1443       /* context= */ nullptr,
1444       art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
1445   return shorty;
1446 }
1447 
PopInstrumentationStackFrame(Thread * self,uintptr_t * return_pc,uint64_t * gpr_result,uint64_t * fpr_result)1448 TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self,
1449                                                             uintptr_t* return_pc,
1450                                                             uint64_t* gpr_result,
1451                                                             uint64_t* fpr_result) {
1452   DCHECK(gpr_result != nullptr);
1453   DCHECK(fpr_result != nullptr);
1454   // Do the pop.
1455   std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1456   CHECK_GT(stack->size(), 0U);
1457   InstrumentationStackFrame instrumentation_frame = stack->front();
1458   stack->pop_front();
1459 
1460   // Set return PC and check the sanity of the stack.
1461   *return_pc = instrumentation_frame.return_pc_;
1462   CheckStackDepth(self, instrumentation_frame, 0);
1463   self->VerifyStack();
1464 
1465   ArtMethod* method = instrumentation_frame.method_;
1466   uint32_t length;
1467   const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1468   char return_shorty;
1469 
1470   // Runtime method does not call into MethodExitEvent() so there should not be
1471   // suspension point below.
1472   ScopedAssertNoThreadSuspension ants(__FUNCTION__, method->IsRuntimeMethod());
1473   if (method->IsRuntimeMethod()) {
1474     if (method != Runtime::Current()->GetCalleeSaveMethod(
1475         CalleeSaveType::kSaveEverythingForClinit)) {
1476       // If the caller is at an invocation point and the runtime method is not
1477       // for clinit, we need to pass return results to the caller.
1478       // We need the correct shorty to decide whether we need to pass the return
1479       // result for deoptimization below.
1480       return_shorty = GetRuntimeMethodShorty(self);
1481     } else {
1482       // Some runtime methods such as allocations, unresolved field getters, etc.
1483       // have return value. We don't need to set return_value since MethodExitEvent()
1484       // below isn't called for runtime methods. Deoptimization doesn't need the
1485       // value either since the dex instruction will be re-executed by the
1486       // interpreter, except these two cases:
1487       // (1) For an invoke, which is handled above to get the correct shorty.
1488       // (2) For MONITOR_ENTER/EXIT, which cannot be re-executed since it's not
1489       //     idempotent. However there is no return value for it anyway.
1490       return_shorty = 'V';
1491     }
1492   } else {
1493     return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
1494   }
1495 
1496   bool is_ref = return_shorty == '[' || return_shorty == 'L';
1497   StackHandleScope<1> hs(self);
1498   MutableHandle<mirror::Object> res(hs.NewHandle<mirror::Object>(nullptr));
1499   JValue return_value;
1500   if (return_shorty == 'V') {
1501     return_value.SetJ(0);
1502   } else if (return_shorty == 'F' || return_shorty == 'D') {
1503     return_value.SetJ(*fpr_result);
1504   } else {
1505     return_value.SetJ(*gpr_result);
1506   }
1507   if (is_ref) {
1508     // Take a handle to the return value so we won't lose it if we suspend.
1509     res.Assign(return_value.GetL());
1510   }
1511   // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1512   //       return_pc.
1513   uint32_t dex_pc = dex::kDexNoIndex;
1514   mirror::Object* this_object = instrumentation_frame.this_object_;
1515   if (!method->IsRuntimeMethod() && !instrumentation_frame.interpreter_entry_) {
1516     MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
1517   }
1518 
1519   // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1520   // back to an upcall.
1521   NthCallerVisitor visitor(self, 1, true);
1522   visitor.WalkStack(true);
1523   bool deoptimize = (visitor.caller != nullptr) &&
1524                     (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller) ||
1525                     self->IsForceInterpreter() ||
1526                     Dbg::IsForcedInterpreterNeededForUpcall(self, visitor.caller));
1527   if (is_ref) {
1528     // Restore the return value if it's a reference since it might have moved.
1529     *reinterpret_cast<mirror::Object**>(gpr_result) = res.Get();
1530   }
1531   if (deoptimize && Runtime::Current()->IsAsyncDeoptimizeable(*return_pc)) {
1532     if (kVerboseInstrumentation) {
1533       LOG(INFO) << "Deoptimizing "
1534                 << visitor.caller->PrettyMethod()
1535                 << " by returning from "
1536                 << method->PrettyMethod()
1537                 << " with result "
1538                 << std::hex << return_value.GetJ() << std::dec
1539                 << " in "
1540                 << *self;
1541     }
1542     DeoptimizationMethodType deopt_method_type = GetDeoptimizationMethodType(method);
1543     self->PushDeoptimizationContext(return_value,
1544                                     return_shorty == 'L' || return_shorty == '[',
1545                                     /* exception= */ nullptr ,
1546                                     /* from_code= */ false,
1547                                     deopt_method_type);
1548     return GetTwoWordSuccessValue(*return_pc,
1549                                   reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
1550   } else {
1551     if (deoptimize && !Runtime::Current()->IsAsyncDeoptimizeable(*return_pc)) {
1552       VLOG(deopt) << "Got a deoptimization request on un-deoptimizable " << method->PrettyMethod()
1553                   << " at PC " << reinterpret_cast<void*>(*return_pc);
1554     }
1555     if (kVerboseInstrumentation) {
1556       LOG(INFO) << "Returning from " << method->PrettyMethod()
1557                 << " to PC " << reinterpret_cast<void*>(*return_pc);
1558     }
1559     return GetTwoWordSuccessValue(0, *return_pc);
1560   }
1561 }
1562 
PopFramesForDeoptimization(Thread * self,size_t nframes) const1563 uintptr_t Instrumentation::PopFramesForDeoptimization(Thread* self, size_t nframes) const {
1564   std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1565   CHECK_GE(stack->size(), nframes);
1566   if (nframes == 0) {
1567     return 0u;
1568   }
1569   // Only need to send instrumentation events if it's not for deopt (do give the log messages if we
1570   // have verbose-instrumentation anyway though).
1571   if (kVerboseInstrumentation) {
1572     for (size_t i = 0; i < nframes; i++) {
1573       LOG(INFO) << "Popping for deoptimization " << stack->at(i).method_->PrettyMethod();
1574     }
1575   }
1576   // Now that we've sent all the instrumentation events we can actually modify the
1577   // instrumentation-stack. We cannot do this earlier since MethodUnwindEvent can re-enter java and
1578   // do other things that require the instrumentation stack to be in a consistent state with the
1579   // actual stack.
1580   for (size_t i = 0; i < nframes - 1; i++) {
1581     stack->pop_front();
1582   }
1583   uintptr_t return_pc = stack->front().return_pc_;
1584   stack->pop_front();
1585   return return_pc;
1586 }
1587 
Dump() const1588 std::string InstrumentationStackFrame::Dump() const {
1589   std::ostringstream os;
1590   os << "Frame " << frame_id_ << " " << ArtMethod::PrettyMethod(method_) << ":"
1591       << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1592   return os.str();
1593 }
1594 
1595 }  // namespace instrumentation
1596 }  // namespace art
1597