• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_INSTRUMENTATION_H_
18 #define ART_RUNTIME_INSTRUMENTATION_H_
19 
20 #include <stdint.h>
21 
22 #include <functional>
23 #include <list>
24 #include <memory>
25 #include <optional>
26 #include <unordered_set>
27 
28 #include "arch/instruction_set.h"
29 #include "base/enums.h"
30 #include "base/locks.h"
31 #include "base/macros.h"
32 #include "base/safe_map.h"
33 #include "gc_root.h"
34 #include "offsets.h"
35 
36 namespace art {
37 namespace mirror {
38 class Class;
39 class Object;
40 class Throwable;
41 }  // namespace mirror
42 class ArtField;
43 class ArtMethod;
44 template <typename T> class Handle;
45 template <typename T> class MutableHandle;
46 struct NthCallerVisitor;
47 union JValue;
48 class SHARED_LOCKABLE ReaderWriterMutex;
49 class ShadowFrame;
50 class Thread;
51 enum class DeoptimizationMethodType;
52 
53 namespace instrumentation {
54 
55 
56 // Do we want to deoptimize for method entry and exit listeners or just try to intercept
57 // invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
58 // application's performance.
59 static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
60 
61 // an optional frame is either Some(const ShadowFrame& current_frame) or None depending on if the
62 // method being exited has a shadow-frame associed with the current stack frame. In cases where
63 // there is no shadow-frame associated with this stack frame this will be None.
64 using OptionalFrame = std::optional<std::reference_wrapper<const ShadowFrame>>;
65 
66 // Instrumentation event listener API. Registered listeners will get the appropriate call back for
67 // the events they are listening for. The call backs supply the thread, method and dex_pc the event
68 // occurred upon. The thread may or may not be Thread::Current().
69 struct InstrumentationListener {
InstrumentationListenerInstrumentationListener70   InstrumentationListener() {}
~InstrumentationListenerInstrumentationListener71   virtual ~InstrumentationListener() {}
72 
73   // Call-back for when a method is entered.
74   virtual void MethodEntered(Thread* thread, ArtMethod* method)
75       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
76 
77   virtual void MethodExited(Thread* thread,
78                             ArtMethod* method,
79                             OptionalFrame frame,
80                             MutableHandle<mirror::Object>& return_value)
81       REQUIRES_SHARED(Locks::mutator_lock_);
82 
83   // Call-back for when a method is exited. The implementor should either handler-ize the return
84   // value (if appropriate) or use the alternate MethodExited callback instead if they need to
85   // go through a suspend point.
86   virtual void MethodExited(Thread* thread,
87                             ArtMethod* method,
88                             OptionalFrame frame,
89                             JValue& return_value)
90       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
91 
92   // Call-back for when a method is popped due to an exception throw. A method will either cause a
93   // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
94   virtual void MethodUnwind(Thread* thread,
95                             Handle<mirror::Object> this_object,
96                             ArtMethod* method,
97                             uint32_t dex_pc)
98       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
99 
100   // Call-back for when the dex pc moves in a method.
101   virtual void DexPcMoved(Thread* thread,
102                           Handle<mirror::Object> this_object,
103                           ArtMethod* method,
104                           uint32_t new_dex_pc)
105       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
106 
107   // Call-back for when we read from a field.
108   virtual void FieldRead(Thread* thread,
109                          Handle<mirror::Object> this_object,
110                          ArtMethod* method,
111                          uint32_t dex_pc,
112                          ArtField* field) = 0;
113 
114   virtual void FieldWritten(Thread* thread,
115                             Handle<mirror::Object> this_object,
116                             ArtMethod* method,
117                             uint32_t dex_pc,
118                             ArtField* field,
119                             Handle<mirror::Object> field_value)
120       REQUIRES_SHARED(Locks::mutator_lock_);
121 
122   // Call-back for when we write into a field.
123   virtual void FieldWritten(Thread* thread,
124                             Handle<mirror::Object> this_object,
125                             ArtMethod* method,
126                             uint32_t dex_pc,
127                             ArtField* field,
128                             const JValue& field_value)
129       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
130 
131   // Call-back when an exception is thrown.
132   virtual void ExceptionThrown(Thread* thread,
133                                Handle<mirror::Throwable> exception_object)
134       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
135 
136   // Call-back when an exception is caught/handled by java code.
137   virtual void ExceptionHandled(Thread* thread, Handle<mirror::Throwable> exception_object)
138       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
139 
140   // Call-back for when we execute a branch.
141   virtual void Branch(Thread* thread,
142                       ArtMethod* method,
143                       uint32_t dex_pc,
144                       int32_t dex_pc_offset)
145       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
146 
147   // Call-back when a shadow_frame with the needs_notify_pop_ boolean set is popped off the stack by
148   // either return or exceptions. Normally instrumentation listeners should ensure that there are
149   // shadow-frames by deoptimizing stacks.
150   virtual void WatchedFramePop(Thread* thread ATTRIBUTE_UNUSED,
151                                const ShadowFrame& frame ATTRIBUTE_UNUSED)
152       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
153 };
154 
155 class Instrumentation;
156 // A helper to send instrumentation events while popping the stack in a safe way.
157 class InstrumentationStackPopper {
158  public:
159   explicit InstrumentationStackPopper(Thread* self);
160   ~InstrumentationStackPopper() REQUIRES_SHARED(Locks::mutator_lock_);
161 
162   // Increase the number of frames being popped up to `stack_pointer`. Return true if the
163   // frames were popped without any exceptions, false otherwise. The exception that caused
164   // the pop is 'exception'.
165   bool PopFramesTo(uintptr_t stack_pointer, /*in-out*/MutableHandle<mirror::Throwable>& exception)
166       REQUIRES_SHARED(Locks::mutator_lock_);
167 
168  private:
169   Thread* self_;
170   Instrumentation* instrumentation_;
171   // The stack pointer limit for frames to pop.
172   uintptr_t pop_until_;
173 };
174 
175 // Instrumentation is a catch-all for when extra information is required from the runtime. The
176 // typical use for instrumentation is for profiling and debugging. Instrumentation may add stubs
177 // to method entry and exit, it may also force execution to be switched to the interpreter and
178 // trigger deoptimization.
179 class Instrumentation {
180  public:
181   enum InstrumentationEvent {
182     kMethodEntered = 0x1,
183     kMethodExited = 0x2,
184     kMethodUnwind = 0x4,
185     kDexPcMoved = 0x8,
186     kFieldRead = 0x10,
187     kFieldWritten = 0x20,
188     kExceptionThrown = 0x40,
189     kBranch = 0x80,
190     kWatchedFramePop = 0x200,
191     kExceptionHandled = 0x400,
192   };
193 
194   enum class InstrumentationLevel {
195     kInstrumentNothing,                   // execute without instrumentation
196     kInstrumentWithInstrumentationStubs,  // execute with instrumentation entry/exit stubs
197     kInstrumentWithInterpreter            // execute with interpreter
198   };
199 
200   Instrumentation();
201 
NeedsEntryExitHooksOffset()202   static constexpr MemberOffset NeedsEntryExitHooksOffset() {
203     // Assert that instrumentation_stubs_installed_ is 8bits wide. If the size changes
204     // update the compare instructions in the code generator when generating checks for
205     // MethodEntryExitHooks.
206     static_assert(sizeof(instrumentation_stubs_installed_) == 1,
207                   "instrumentation_stubs_installed_ isn't expected size");
208     return MemberOffset(OFFSETOF_MEMBER(Instrumentation, instrumentation_stubs_installed_));
209   }
210 
211   // Add a listener to be notified of the masked together sent of instrumentation events. This
212   // suspend the runtime to install stubs. You are expected to hold the mutator lock as a proxy
213   // for saying you should have suspended all threads (installing stubs while threads are running
214   // will break).
215   void AddListener(InstrumentationListener* listener, uint32_t events)
216       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
217 
218   // Removes a listener possibly removing instrumentation stubs.
219   void RemoveListener(InstrumentationListener* listener, uint32_t events)
220       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
221 
222   // Calls UndeoptimizeEverything which may visit class linker classes through ConfigureStubs.
223   void DisableDeoptimization(const char* key)
224       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
225       REQUIRES(!GetDeoptimizedMethodsLock());
226 
AreAllMethodsDeoptimized()227   bool AreAllMethodsDeoptimized() const {
228     return InterpreterStubsInstalled();
229   }
230   bool ShouldNotifyMethodEnterExitEvents() const REQUIRES_SHARED(Locks::mutator_lock_);
231 
232   // Executes everything with interpreter.
233   void DeoptimizeEverything(const char* key)
234       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
235       REQUIRES(!Locks::thread_list_lock_,
236                !Locks::classlinker_classes_lock_,
237                !GetDeoptimizedMethodsLock());
238 
239   // Executes everything with compiled code (or interpreter if there is no code). May visit class
240   // linker classes through ConfigureStubs.
241   void UndeoptimizeEverything(const char* key)
242       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
243       REQUIRES(!Locks::thread_list_lock_,
244                !Locks::classlinker_classes_lock_,
245                !GetDeoptimizedMethodsLock());
246 
247   // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
248   // method (except a class initializer) set to the resolution trampoline will be deoptimized only
249   // once its declaring class is initialized.
250   void Deoptimize(ArtMethod* method)
251       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !GetDeoptimizedMethodsLock());
252 
253   // Undeoptimze the method by restoring its entrypoints. Nevertheless, a static method
254   // (except a class initializer) set to the resolution trampoline will be updated only once its
255   // declaring class is initialized.
256   void Undeoptimize(ArtMethod* method)
257       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !GetDeoptimizedMethodsLock());
258 
259   // Indicates whether the method has been deoptimized so it is executed with the interpreter.
260   bool IsDeoptimized(ArtMethod* method)
261       REQUIRES(!GetDeoptimizedMethodsLock()) REQUIRES_SHARED(Locks::mutator_lock_);
262 
263   // Indicates if any method needs to be deoptimized. This is used to avoid walking the stack to
264   // determine if a deoptimization is required.
265   bool IsDeoptimizedMethodsEmpty() const
266       REQUIRES(!GetDeoptimizedMethodsLock()) REQUIRES_SHARED(Locks::mutator_lock_);
267 
268   // Enable method tracing by installing instrumentation entry/exit stubs or interpreter.
269   void EnableMethodTracing(const char* key,
270                            bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners)
271       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
272       REQUIRES(!Locks::thread_list_lock_,
273                !Locks::classlinker_classes_lock_,
274                !GetDeoptimizedMethodsLock());
275 
276   // Disable method tracing by uninstalling instrumentation entry/exit stubs or interpreter.
277   void DisableMethodTracing(const char* key)
278       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
279       REQUIRES(!Locks::thread_list_lock_,
280                !Locks::classlinker_classes_lock_,
281                !GetDeoptimizedMethodsLock());
282 
283 
284   void InstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
285   void UninstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
286   void InstrumentQuickAllocEntryPointsLocked()
287       REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
288                !Locks::runtime_shutdown_lock_);
289   void UninstrumentQuickAllocEntryPointsLocked()
290       REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
291                !Locks::runtime_shutdown_lock_);
292   void ResetQuickAllocEntryPoints() REQUIRES(Locks::runtime_shutdown_lock_);
293 
294   // Returns a string representation of the given entry point.
295   static std::string EntryPointString(const void* code);
296 
297   // Initialize the entrypoint of the method .`aot_code` is the AOT code.
298   void InitializeMethodsCode(ArtMethod* method, const void* aot_code)
299       REQUIRES_SHARED(Locks::mutator_lock_);
300 
301   // Update the code of a method respecting any installed stubs.
302   void UpdateMethodsCode(ArtMethod* method, const void* new_code)
303       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
304 
305   // Update the code of a native method to a JITed stub.
306   void UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* new_code)
307       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
308 
309   // Return the code that we can execute for an invoke including from the JIT.
310   const void* GetCodeForInvoke(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_);
311 
312   // Return the code that we can execute considering the current instrumentation level.
313   // If interpreter stubs are installed return interpreter bridge. If the entry exit stubs
314   // are installed return an instrumentation entry point. Otherwise, return the code that
315   // can be executed including from the JIT.
316   const void* GetMaybeInstrumentedCodeForInvoke(ArtMethod* method)
317       REQUIRES_SHARED(Locks::mutator_lock_);
318 
ForceInterpretOnly()319   void ForceInterpretOnly() {
320     forced_interpret_only_ = true;
321   }
322 
EntryExitStubsInstalled()323   bool EntryExitStubsInstalled() const {
324     return instrumentation_level_ == InstrumentationLevel::kInstrumentWithInstrumentationStubs ||
325            instrumentation_level_ == InstrumentationLevel::kInstrumentWithInterpreter;
326   }
327 
InterpreterStubsInstalled()328   bool InterpreterStubsInstalled() const {
329     return instrumentation_level_ == InstrumentationLevel::kInstrumentWithInterpreter;
330   }
331 
332   // Called by ArtMethod::Invoke to determine dispatch mechanism.
InterpretOnly()333   bool InterpretOnly() const {
334     return forced_interpret_only_ || InterpreterStubsInstalled();
335   }
336   bool InterpretOnly(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_);
337 
IsForcedInterpretOnly()338   bool IsForcedInterpretOnly() const {
339     return forced_interpret_only_;
340   }
341 
AreExitStubsInstalled()342   bool AreExitStubsInstalled() const {
343     return instrumentation_stubs_installed_;
344   }
345 
HasMethodEntryListeners()346   bool HasMethodEntryListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
347     return have_method_entry_listeners_;
348   }
349 
HasMethodExitListeners()350   bool HasMethodExitListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
351     return have_method_exit_listeners_;
352   }
353 
HasMethodUnwindListeners()354   bool HasMethodUnwindListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
355     return have_method_unwind_listeners_;
356   }
357 
HasDexPcListeners()358   bool HasDexPcListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
359     return have_dex_pc_listeners_;
360   }
361 
HasFieldReadListeners()362   bool HasFieldReadListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
363     return have_field_read_listeners_;
364   }
365 
HasFieldWriteListeners()366   bool HasFieldWriteListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
367     return have_field_write_listeners_;
368   }
369 
HasExceptionThrownListeners()370   bool HasExceptionThrownListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
371     return have_exception_thrown_listeners_;
372   }
373 
HasBranchListeners()374   bool HasBranchListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
375     return have_branch_listeners_;
376   }
377 
HasWatchedFramePopListeners()378   bool HasWatchedFramePopListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
379     return have_watched_frame_pop_listeners_;
380   }
381 
HasExceptionHandledListeners()382   bool HasExceptionHandledListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
383     return have_exception_handled_listeners_;
384   }
385 
NeedsSlowInterpreterForListeners()386   bool NeedsSlowInterpreterForListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
387     return have_field_read_listeners_ ||
388            have_field_write_listeners_ ||
389            have_watched_frame_pop_listeners_ ||
390            have_exception_handled_listeners_;
391   }
392 
393   // Inform listeners that a method has been entered. A dex PC is provided as we may install
394   // listeners into executing code and get method enter events for methods already on the stack.
MethodEnterEvent(Thread * thread,ArtMethod * method)395   void MethodEnterEvent(Thread* thread, ArtMethod* method) const
396       REQUIRES_SHARED(Locks::mutator_lock_) {
397     if (UNLIKELY(HasMethodEntryListeners())) {
398       MethodEnterEventImpl(thread, method);
399     }
400   }
401 
402   // Inform listeners that a method has been exited.
403   template<typename T>
MethodExitEvent(Thread * thread,ArtMethod * method,OptionalFrame frame,T & return_value)404   void MethodExitEvent(Thread* thread,
405                        ArtMethod* method,
406                        OptionalFrame frame,
407                        T& return_value) const
408       REQUIRES_SHARED(Locks::mutator_lock_) {
409     if (UNLIKELY(HasMethodExitListeners())) {
410       MethodExitEventImpl(thread, method, frame, return_value);
411     }
412   }
413 
414   // Inform listeners that a method has been exited due to an exception.
415   void MethodUnwindEvent(Thread* thread,
416                          ObjPtr<mirror::Object> this_object,
417                          ArtMethod* method,
418                          uint32_t dex_pc) const
419       REQUIRES_SHARED(Locks::mutator_lock_);
420 
421   // Inform listeners that the dex pc has moved (only supported by the interpreter).
DexPcMovedEvent(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc)422   void DexPcMovedEvent(Thread* thread,
423                        ObjPtr<mirror::Object> this_object,
424                        ArtMethod* method,
425                        uint32_t dex_pc) const
426       REQUIRES_SHARED(Locks::mutator_lock_) {
427     if (UNLIKELY(HasDexPcListeners())) {
428       DexPcMovedEventImpl(thread, this_object, method, dex_pc);
429     }
430   }
431 
432   // Inform listeners that a branch has been taken (only supported by the interpreter).
Branch(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset)433   void Branch(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
434       REQUIRES_SHARED(Locks::mutator_lock_) {
435     if (UNLIKELY(HasBranchListeners())) {
436       BranchImpl(thread, method, dex_pc, offset);
437     }
438   }
439 
440   // Inform listeners that we read a field (only supported by the interpreter).
FieldReadEvent(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field)441   void FieldReadEvent(Thread* thread,
442                       ObjPtr<mirror::Object> this_object,
443                       ArtMethod* method,
444                       uint32_t dex_pc,
445                       ArtField* field) const
446       REQUIRES_SHARED(Locks::mutator_lock_) {
447     if (UNLIKELY(HasFieldReadListeners())) {
448       FieldReadEventImpl(thread, this_object, method, dex_pc, field);
449     }
450   }
451 
452   // Inform listeners that we write a field (only supported by the interpreter).
FieldWriteEvent(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value)453   void FieldWriteEvent(Thread* thread,
454                        ObjPtr<mirror::Object> this_object,
455                        ArtMethod* method,
456                        uint32_t dex_pc,
457                        ArtField* field,
458                        const JValue& field_value) const
459       REQUIRES_SHARED(Locks::mutator_lock_) {
460     if (UNLIKELY(HasFieldWriteListeners())) {
461       FieldWriteEventImpl(thread, this_object, method, dex_pc, field, field_value);
462     }
463   }
464 
465   // Inform listeners that a branch has been taken (only supported by the interpreter).
WatchedFramePopped(Thread * thread,const ShadowFrame & frame)466   void WatchedFramePopped(Thread* thread, const ShadowFrame& frame) const
467       REQUIRES_SHARED(Locks::mutator_lock_) {
468     if (UNLIKELY(HasWatchedFramePopListeners())) {
469       WatchedFramePopImpl(thread, frame);
470     }
471   }
472 
473   // Inform listeners that an exception was thrown.
474   void ExceptionThrownEvent(Thread* thread, ObjPtr<mirror::Throwable> exception_object) const
475       REQUIRES_SHARED(Locks::mutator_lock_);
476 
477   // Inform listeners that an exception has been handled. This is not sent for native code or for
478   // exceptions which reach the end of the thread's stack.
479   void ExceptionHandledEvent(Thread* thread, ObjPtr<mirror::Throwable> exception_object) const
480       REQUIRES_SHARED(Locks::mutator_lock_);
481 
482   JValue GetReturnValue(Thread* self,
483                         ArtMethod* method,
484                         bool* is_ref,
485                         uint64_t* gpr_result,
486                         uint64_t* fpr_result) REQUIRES_SHARED(Locks::mutator_lock_);
487   bool ShouldDeoptimizeMethod(Thread* self, const NthCallerVisitor& visitor)
488       REQUIRES_SHARED(Locks::mutator_lock_);
489 
490   // Called when an instrumented method is entered. The intended link register (lr) is saved so
491   // that returning causes a branch to the method exit stub. Generates method enter events.
492   void PushInstrumentationStackFrame(Thread* self,
493                                      ObjPtr<mirror::Object> this_object,
494                                      ArtMethod* method,
495                                      uintptr_t stack_pointer,
496                                      uintptr_t lr,
497                                      bool interpreter_entry)
498       REQUIRES_SHARED(Locks::mutator_lock_);
499 
500   DeoptimizationMethodType GetDeoptimizationMethodType(ArtMethod* method)
501       REQUIRES_SHARED(Locks::mutator_lock_);
502 
503   // Called when an instrumented method is exited. Removes the pushed instrumentation frame
504   // returning the intended link register. Generates method exit events. The gpr_result and
505   // fpr_result pointers are pointers to the locations where the integer/pointer and floating point
506   // result values of the function are stored. Both pointers must always be valid but the values
507   // held there will only be meaningful if interpreted as the appropriate type given the function
508   // being returned from.
509   TwoWordReturn PopInstrumentationStackFrame(Thread* self,
510                                              uintptr_t* return_pc_addr,
511                                              uint64_t* gpr_result,
512                                              uint64_t* fpr_result)
513       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
514 
515   // Pops nframes instrumentation frames from the current thread. Returns the return pc for the last
516   // instrumentation frame that's popped.
517   uintptr_t PopFramesForDeoptimization(Thread* self, uintptr_t stack_pointer) const
518       REQUIRES_SHARED(Locks::mutator_lock_);
519 
520   // Call back for configure stubs.
521   void InstallStubsForClass(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_)
522       REQUIRES(!GetDeoptimizedMethodsLock());
523 
524   void InstallStubsForMethod(ArtMethod* method)
525       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
526 
527   // Install instrumentation exit stub on every method of the stack of the given thread.
528   // This is used by:
529   //  - the debugger to cause a deoptimization of the all frames in thread's stack (for
530   //    example, after updating local variables)
531   //  - to call method entry / exit hooks for tracing. For this we instrument
532   //    the stack frame to run entry / exit hooks but we don't need to deoptimize.
533   // deopt_all_frames indicates whether the frames need to deoptimize or not.
534   void InstrumentThreadStack(Thread* thread, bool deopt_all_frames) REQUIRES(Locks::mutator_lock_);
535 
536   // Force all currently running frames to be deoptimized back to interpreter. This should only be
537   // used in cases where basically all compiled code has been invalidated.
538   void DeoptimizeAllThreadFrames() REQUIRES(art::Locks::mutator_lock_);
539 
540   static size_t ComputeFrameId(Thread* self,
541                                size_t frame_depth,
542                                size_t inlined_frames_before_frame)
543       REQUIRES_SHARED(Locks::mutator_lock_);
544 
545   // Does not hold lock, used to check if someone changed from not instrumented to instrumented
546   // during a GC suspend point.
AllocEntrypointsInstrumented()547   bool AllocEntrypointsInstrumented() const REQUIRES_SHARED(Locks::mutator_lock_) {
548     return alloc_entrypoints_instrumented_;
549   }
550 
551   InstrumentationLevel GetCurrentInstrumentationLevel() const;
552 
553  private:
554   // Returns true if moving to the given instrumentation level requires the installation of stubs.
555   // False otherwise.
556   bool RequiresInstrumentationInstallation(InstrumentationLevel new_level) const;
557 
558   // Returns true if we need entry exit stub to call entry hooks. JITed code
559   // directly call entry / exit hooks and don't need the stub.
560   static bool CodeNeedsEntryExitStub(const void* code, ArtMethod* method)
561       REQUIRES_SHARED(Locks::mutator_lock_);
562 
563   // Update the current instrumentation_level_.
564   void UpdateInstrumentationLevel(InstrumentationLevel level);
565 
566   // Does the job of installing or removing instrumentation code within methods.
567   // In order to support multiple clients using instrumentation at the same time,
568   // the caller must pass a unique key (a string) identifying it so we remind which
569   // instrumentation level it needs. Therefore the current instrumentation level
570   // becomes the highest instrumentation level required by a client.
571   void ConfigureStubs(const char* key, InstrumentationLevel desired_instrumentation_level)
572       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
573       REQUIRES(!GetDeoptimizedMethodsLock(),
574                !Locks::thread_list_lock_,
575                !Locks::classlinker_classes_lock_);
576   void UpdateStubs() REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
577       REQUIRES(!GetDeoptimizedMethodsLock(),
578                !Locks::thread_list_lock_,
579                !Locks::classlinker_classes_lock_);
580 
581   // If there are no pending deoptimizations restores the stack to the normal state by updating the
582   // return pcs to actual return addresses from the instrumentation stack and clears the
583   // instrumentation stack.
584   void MaybeRestoreInstrumentationStack() REQUIRES(Locks::mutator_lock_);
585 
586   // No thread safety analysis to get around SetQuickAllocEntryPointsInstrumented requiring
587   // exclusive access to mutator lock which you can't get if the runtime isn't started.
588   void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS;
589 
590   void MethodEnterEventImpl(Thread* thread, ArtMethod* method) const
591       REQUIRES_SHARED(Locks::mutator_lock_);
592   template <typename T>
593   void MethodExitEventImpl(Thread* thread,
594                            ArtMethod* method,
595                            OptionalFrame frame,
596                            T& return_value) const
597       REQUIRES_SHARED(Locks::mutator_lock_);
598   void DexPcMovedEventImpl(Thread* thread,
599                            ObjPtr<mirror::Object> this_object,
600                            ArtMethod* method,
601                            uint32_t dex_pc) const
602       REQUIRES_SHARED(Locks::mutator_lock_);
603   void BranchImpl(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
604       REQUIRES_SHARED(Locks::mutator_lock_);
605   void WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const
606       REQUIRES_SHARED(Locks::mutator_lock_);
607   void FieldReadEventImpl(Thread* thread,
608                           ObjPtr<mirror::Object> this_object,
609                           ArtMethod* method,
610                           uint32_t dex_pc,
611                           ArtField* field) const
612       REQUIRES_SHARED(Locks::mutator_lock_);
613   void FieldWriteEventImpl(Thread* thread,
614                            ObjPtr<mirror::Object> this_object,
615                            ArtMethod* method,
616                            uint32_t dex_pc,
617                            ArtField* field,
618                            const JValue& field_value) const
619       REQUIRES_SHARED(Locks::mutator_lock_);
620 
621   // Read barrier-aware utility functions for accessing deoptimized_methods_
622   bool AddDeoptimizedMethod(ArtMethod* method)
623       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(GetDeoptimizedMethodsLock());
624   bool IsDeoptimizedMethod(ArtMethod* method)
625       REQUIRES_SHARED(Locks::mutator_lock_, GetDeoptimizedMethodsLock());
626   bool RemoveDeoptimizedMethod(ArtMethod* method)
627       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(GetDeoptimizedMethodsLock());
628   ArtMethod* BeginDeoptimizedMethod()
629       REQUIRES_SHARED(Locks::mutator_lock_, GetDeoptimizedMethodsLock());
630   bool IsDeoptimizedMethodsEmptyLocked() const
631       REQUIRES_SHARED(Locks::mutator_lock_, GetDeoptimizedMethodsLock());
632   void UpdateMethodsCodeImpl(ArtMethod* method, const void* new_code)
633       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
634 
GetDeoptimizedMethodsLock()635   ReaderWriterMutex* GetDeoptimizedMethodsLock() const {
636     return deoptimized_methods_lock_.get();
637   }
638 
639   // A counter that's incremented every time a DeoptimizeAllFrames. We check each
640   // InstrumentationStackFrames creation id against this number and if they differ we deopt even if
641   // we could otherwise continue running.
642   uint64_t current_force_deopt_id_ GUARDED_BY(Locks::mutator_lock_);
643 
644   // Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
645   bool instrumentation_stubs_installed_;
646 
647   // The required level of instrumentation. This could be one of the following values:
648   // kInstrumentNothing: no instrumentation support is needed
649   // kInstrumentWithInstrumentationStubs: needs support to call method entry/exit stubs.
650   // kInstrumentWithInterpreter: only execute with interpreter
651   Instrumentation::InstrumentationLevel instrumentation_level_;
652 
653   // Did the runtime request we only run in the interpreter? ie -Xint mode.
654   bool forced_interpret_only_;
655 
656   // Do we have any listeners for method entry events? Short-cut to avoid taking the
657   // instrumentation_lock_.
658   bool have_method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
659 
660   // Do we have any listeners for method exit events? Short-cut to avoid taking the
661   // instrumentation_lock_.
662   bool have_method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
663 
664   // Do we have any listeners for method unwind events? Short-cut to avoid taking the
665   // instrumentation_lock_.
666   bool have_method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
667 
668   // Do we have any listeners for dex move events? Short-cut to avoid taking the
669   // instrumentation_lock_.
670   bool have_dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
671 
672   // Do we have any listeners for field read events? Short-cut to avoid taking the
673   // instrumentation_lock_.
674   bool have_field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
675 
676   // Do we have any listeners for field write events? Short-cut to avoid taking the
677   // instrumentation_lock_.
678   bool have_field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
679 
680   // Do we have any exception thrown listeners? Short-cut to avoid taking the instrumentation_lock_.
681   bool have_exception_thrown_listeners_ GUARDED_BY(Locks::mutator_lock_);
682 
683   // Do we have any frame pop listeners? Short-cut to avoid taking the instrumentation_lock_.
684   bool have_watched_frame_pop_listeners_ GUARDED_BY(Locks::mutator_lock_);
685 
686   // Do we have any branch listeners? Short-cut to avoid taking the instrumentation_lock_.
687   bool have_branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
688 
689   // Do we have any exception handled listeners? Short-cut to avoid taking the
690   // instrumentation_lock_.
691   bool have_exception_handled_listeners_ GUARDED_BY(Locks::mutator_lock_);
692 
693   // Contains the instrumentation level required by each client of the instrumentation identified
694   // by a string key.
695   using InstrumentationLevelTable = SafeMap<const char*, InstrumentationLevel>;
696   InstrumentationLevelTable requested_instrumentation_levels_ GUARDED_BY(Locks::mutator_lock_);
697 
698   // The event listeners, written to with the mutator_lock_ exclusively held.
699   // Mutators must be able to iterate over these lists concurrently, that is, with listeners being
700   // added or removed while iterating. The modifying thread holds exclusive lock,
701   // so other threads cannot iterate (i.e. read the data of the list) at the same time but they
702   // do keep iterators that need to remain valid. This is the reason these listeners are std::list
703   // and not for example std::vector: the existing storage for a std::list does not move.
704   // Note that mutators cannot make a copy of these lists before iterating, as the instrumentation
705   // listeners can also be deleted concurrently.
706   // As a result, these lists are never trimmed. That's acceptable given the low number of
707   // listeners we have.
708   std::list<InstrumentationListener*> method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
709   std::list<InstrumentationListener*> method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
710   std::list<InstrumentationListener*> method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
711   std::list<InstrumentationListener*> branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
712   std::list<InstrumentationListener*> dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
713   std::list<InstrumentationListener*> field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
714   std::list<InstrumentationListener*> field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
715   std::list<InstrumentationListener*> exception_thrown_listeners_ GUARDED_BY(Locks::mutator_lock_);
716   std::list<InstrumentationListener*> watched_frame_pop_listeners_ GUARDED_BY(Locks::mutator_lock_);
717   std::list<InstrumentationListener*> exception_handled_listeners_ GUARDED_BY(Locks::mutator_lock_);
718 
719   // The set of methods being deoptimized (by the debugger) which must be executed with interpreter
720   // only.
721   mutable std::unique_ptr<ReaderWriterMutex> deoptimized_methods_lock_ BOTTOM_MUTEX_ACQUIRED_AFTER;
722   std::unordered_set<ArtMethod*> deoptimized_methods_ GUARDED_BY(GetDeoptimizedMethodsLock());
723 
724   // Current interpreter handler table. This is updated each time the thread state flags are
725   // modified.
726 
727   // Greater than 0 if quick alloc entry points instrumented.
728   size_t quick_alloc_entry_points_instrumentation_counter_;
729 
730   // alloc_entrypoints_instrumented_ is only updated with all the threads suspended, this is done
731   // to prevent races with the GC where the GC relies on thread suspension only see
732   // alloc_entrypoints_instrumented_ change during suspend points.
733   bool alloc_entrypoints_instrumented_;
734 
735   friend class InstrumentationTest;  // For GetCurrentInstrumentationLevel and ConfigureStubs.
736   friend class InstrumentationStackPopper;  // For popping instrumentation frames.
737   friend void InstrumentationInstallStack(Thread*, void*, bool);
738 
739   DISALLOW_COPY_AND_ASSIGN(Instrumentation);
740 };
741 std::ostream& operator<<(std::ostream& os, Instrumentation::InstrumentationEvent rhs);
742 std::ostream& operator<<(std::ostream& os, Instrumentation::InstrumentationLevel rhs);
743 
744 // An element in the instrumentation side stack maintained in art::Thread.
745 struct InstrumentationStackFrame {
InstrumentationStackFrameInstrumentationStackFrame746   InstrumentationStackFrame(mirror::Object* this_object,
747                             ArtMethod* method,
748                             uintptr_t return_pc,
749                             bool interpreter_entry,
750                             uint64_t force_deopt_id)
751       : this_object_(this_object),
752         method_(method),
753         return_pc_(return_pc),
754         interpreter_entry_(interpreter_entry),
755         force_deopt_id_(force_deopt_id) {
756   }
757 
758   std::string Dump() const REQUIRES_SHARED(Locks::mutator_lock_);
759 
760   mirror::Object* this_object_;
761   ArtMethod* method_;
762   uintptr_t return_pc_;
763   bool interpreter_entry_;
764   uint64_t force_deopt_id_;
765 };
766 
767 }  // namespace instrumentation
768 }  // namespace art
769 
770 #endif  // ART_RUNTIME_INSTRUMENTATION_H_
771