• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_INSTRUMENTATION_H_
18 #define ART_RUNTIME_INSTRUMENTATION_H_
19 
20 #include <stdint.h>
21 #include <list>
22 #include <unordered_set>
23 
24 #include "arch/instruction_set.h"
25 #include "base/macros.h"
26 #include "base/mutex.h"
27 #include "gc_root.h"
28 #include "safe_map.h"
29 
30 namespace art {
31 namespace mirror {
32   class Class;
33   class Object;
34   class Throwable;
35 }  // namespace mirror
36 class ArtField;
37 class ArtMethod;
38 union JValue;
39 class Thread;
40 
41 namespace instrumentation {
42 
43 // Interpreter handler tables.
44 enum InterpreterHandlerTable {
45   kMainHandlerTable = 0,          // Main handler table: no suspend check, no instrumentation.
46   kAlternativeHandlerTable = 1,   // Alternative handler table: suspend check and/or instrumentation
47                                   // enabled.
48   kNumHandlerTables
49 };
50 
51 // Do we want to deoptimize for method entry and exit listeners or just try to intercept
52 // invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
53 // application's performance.
54 static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
55 
56 // Instrumentation event listener API. Registered listeners will get the appropriate call back for
57 // the events they are listening for. The call backs supply the thread, method and dex_pc the event
58 // occurred upon. The thread may or may not be Thread::Current().
59 struct InstrumentationListener {
InstrumentationListenerInstrumentationListener60   InstrumentationListener() {}
~InstrumentationListenerInstrumentationListener61   virtual ~InstrumentationListener() {}
62 
63   // Call-back for when a method is entered.
64   virtual void MethodEntered(Thread* thread, mirror::Object* this_object,
65                              ArtMethod* method,
66                              uint32_t dex_pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
67 
68   // Call-back for when a method is exited.
69   virtual void MethodExited(Thread* thread, mirror::Object* this_object,
70                             ArtMethod* method, uint32_t dex_pc,
71                             const JValue& return_value)
72       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
73 
74   // Call-back for when a method is popped due to an exception throw. A method will either cause a
75   // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
76   virtual void MethodUnwind(Thread* thread, mirror::Object* this_object,
77                             ArtMethod* method, uint32_t dex_pc)
78       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
79 
80   // Call-back for when the dex pc moves in a method.
81   virtual void DexPcMoved(Thread* thread, mirror::Object* this_object,
82                           ArtMethod* method, uint32_t new_dex_pc)
83       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
84 
85   // Call-back for when we read from a field.
86   virtual void FieldRead(Thread* thread, mirror::Object* this_object, ArtMethod* method,
87                          uint32_t dex_pc, ArtField* field) = 0;
88 
89   // Call-back for when we write into a field.
90   virtual void FieldWritten(Thread* thread, mirror::Object* this_object, ArtMethod* method,
91                             uint32_t dex_pc, ArtField* field, const JValue& field_value) = 0;
92 
93   // Call-back when an exception is caught.
94   virtual void ExceptionCaught(Thread* thread, mirror::Throwable* exception_object)
95       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
96 
97   // Call-back for when we get a backward branch.
98   virtual void BackwardBranch(Thread* thread, ArtMethod* method, int32_t dex_pc_offset)
99       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
100 };
101 
102 // Instrumentation is a catch-all for when extra information is required from the runtime. The
103 // typical use for instrumentation is for profiling and debugging. Instrumentation may add stubs
104 // to method entry and exit, it may also force execution to be switched to the interpreter and
105 // trigger deoptimization.
106 class Instrumentation {
107  public:
108   enum InstrumentationEvent {
109     kMethodEntered = 0x1,
110     kMethodExited = 0x2,
111     kMethodUnwind = 0x4,
112     kDexPcMoved = 0x8,
113     kFieldRead = 0x10,
114     kFieldWritten = 0x20,
115     kExceptionCaught = 0x40,
116     kBackwardBranch = 0x80,
117   };
118 
119   enum class InstrumentationLevel {
120     kInstrumentNothing,                   // execute without instrumentation
121     kInstrumentWithInstrumentationStubs,  // execute with instrumentation entry/exit stubs
122     kInstrumentWithInterpreter            // execute with interpreter
123   };
124 
125   Instrumentation();
126 
127   // Add a listener to be notified of the masked together sent of instrumentation events. This
128   // suspend the runtime to install stubs. You are expected to hold the mutator lock as a proxy
129   // for saying you should have suspended all threads (installing stubs while threads are running
130   // will break).
131   void AddListener(InstrumentationListener* listener, uint32_t events)
132       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
133       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_);
134 
135   // Removes a listener possibly removing instrumentation stubs.
136   void RemoveListener(InstrumentationListener* listener, uint32_t events)
137       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
138       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_);
139 
140   // Deoptimization.
141   void EnableDeoptimization()
142       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
143       LOCKS_EXCLUDED(deoptimized_methods_lock_);
144   void DisableDeoptimization(const char* key)
145       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
146       LOCKS_EXCLUDED(deoptimized_methods_lock_);
AreAllMethodsDeoptimized()147   bool AreAllMethodsDeoptimized() const {
148     return interpreter_stubs_installed_;
149   }
150   bool ShouldNotifyMethodEnterExitEvents() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
151 
152   // Executes everything with interpreter.
153   void DeoptimizeEverything(const char* key)
154       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
155       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_);
156 
157   // Executes everything with compiled code (or interpreter if there is no code).
158   void UndeoptimizeEverything(const char* key)
159       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
160       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_);
161 
162   // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
163   // method (except a class initializer) set to the resolution trampoline will be deoptimized only
164   // once its declaring class is initialized.
165   void Deoptimize(ArtMethod* method)
166       LOCKS_EXCLUDED(Locks::thread_list_lock_, deoptimized_methods_lock_)
167       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
168 
169   // Undeoptimze the method by restoring its entrypoints. Nevertheless, a static method
170   // (except a class initializer) set to the resolution trampoline will be updated only once its
171   // declaring class is initialized.
172   void Undeoptimize(ArtMethod* method)
173       LOCKS_EXCLUDED(Locks::thread_list_lock_, deoptimized_methods_lock_)
174       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
175 
176   // Indicates whether the method has been deoptimized so it is executed with the interpreter.
177   bool IsDeoptimized(ArtMethod* method)
178       LOCKS_EXCLUDED(deoptimized_methods_lock_)
179       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
180 
181   // Enable method tracing by installing instrumentation entry/exit stubs or interpreter.
182   void EnableMethodTracing(const char* key,
183                            bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners)
184       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
185       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_);
186 
187   // Disable method tracing by uninstalling instrumentation entry/exit stubs or interpreter.
188   void DisableMethodTracing(const char* key)
189       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
190       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_);
191 
GetInterpreterHandlerTable()192   InterpreterHandlerTable GetInterpreterHandlerTable() const
193       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
194     return interpreter_handler_table_;
195   }
196 
197   void InstrumentQuickAllocEntryPoints() LOCKS_EXCLUDED(Locks::instrument_entrypoints_lock_);
198   void UninstrumentQuickAllocEntryPoints() LOCKS_EXCLUDED(Locks::instrument_entrypoints_lock_);
199   void InstrumentQuickAllocEntryPointsLocked()
200       EXCLUSIVE_LOCKS_REQUIRED(Locks::instrument_entrypoints_lock_)
201       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::runtime_shutdown_lock_);
202   void UninstrumentQuickAllocEntryPointsLocked()
203       EXCLUSIVE_LOCKS_REQUIRED(Locks::instrument_entrypoints_lock_)
204       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::runtime_shutdown_lock_);
205   void ResetQuickAllocEntryPoints() EXCLUSIVE_LOCKS_REQUIRED(Locks::runtime_shutdown_lock_);
206 
207   // Update the code of a method respecting any installed stubs.
208   void UpdateMethodsCode(ArtMethod* method, const void* quick_code)
209       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
210 
211   // Get the quick code for the given method. More efficient than asking the class linker as it
212   // will short-cut to GetCode if instrumentation and static method resolution stubs aren't
213   // installed.
214   const void* GetQuickCodeFor(ArtMethod* method, size_t pointer_size) const
215       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
216 
ForceInterpretOnly()217   void ForceInterpretOnly() {
218     interpret_only_ = true;
219     forced_interpret_only_ = true;
220   }
221 
222   // Called by ArtMethod::Invoke to determine dispatch mechanism.
InterpretOnly()223   bool InterpretOnly() const {
224     return interpret_only_;
225   }
226 
IsForcedInterpretOnly()227   bool IsForcedInterpretOnly() const {
228     return forced_interpret_only_;
229   }
230 
AreExitStubsInstalled()231   bool AreExitStubsInstalled() const {
232     return instrumentation_stubs_installed_;
233   }
234 
HasMethodEntryListeners()235   bool HasMethodEntryListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
236     return have_method_entry_listeners_;
237   }
238 
HasMethodExitListeners()239   bool HasMethodExitListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
240     return have_method_exit_listeners_;
241   }
242 
HasMethodUnwindListeners()243   bool HasMethodUnwindListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
244     return have_method_unwind_listeners_;
245   }
246 
HasDexPcListeners()247   bool HasDexPcListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
248     return have_dex_pc_listeners_;
249   }
250 
HasFieldReadListeners()251   bool HasFieldReadListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
252     return have_field_read_listeners_;
253   }
254 
HasFieldWriteListeners()255   bool HasFieldWriteListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
256     return have_field_write_listeners_;
257   }
258 
HasExceptionCaughtListeners()259   bool HasExceptionCaughtListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
260     return have_exception_caught_listeners_;
261   }
262 
HasBackwardBranchListeners()263   bool HasBackwardBranchListeners() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
264     return have_backward_branch_listeners_;
265   }
266 
IsActive()267   bool IsActive() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
268     return have_dex_pc_listeners_ || have_method_entry_listeners_ || have_method_exit_listeners_ ||
269         have_field_read_listeners_ || have_field_write_listeners_ ||
270         have_exception_caught_listeners_ || have_method_unwind_listeners_;
271   }
272 
273   // Inform listeners that a method has been entered. A dex PC is provided as we may install
274   // listeners into executing code and get method enter events for methods already on the stack.
MethodEnterEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc)275   void MethodEnterEvent(Thread* thread, mirror::Object* this_object,
276                         ArtMethod* method, uint32_t dex_pc) const
277       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
278     if (UNLIKELY(HasMethodEntryListeners())) {
279       MethodEnterEventImpl(thread, this_object, method, dex_pc);
280     }
281   }
282 
283   // Inform listeners that a method has been exited.
MethodExitEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,const JValue & return_value)284   void MethodExitEvent(Thread* thread, mirror::Object* this_object,
285                        ArtMethod* method, uint32_t dex_pc,
286                        const JValue& return_value) const
287       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
288     if (UNLIKELY(HasMethodExitListeners())) {
289       MethodExitEventImpl(thread, this_object, method, dex_pc, return_value);
290     }
291   }
292 
293   // Inform listeners that a method has been exited due to an exception.
294   void MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
295                          ArtMethod* method, uint32_t dex_pc) const
296       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
297 
298   // Inform listeners that the dex pc has moved (only supported by the interpreter).
DexPcMovedEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc)299   void DexPcMovedEvent(Thread* thread, mirror::Object* this_object,
300                        ArtMethod* method, uint32_t dex_pc) const
301       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
302     if (UNLIKELY(HasDexPcListeners())) {
303       DexPcMovedEventImpl(thread, this_object, method, dex_pc);
304     }
305   }
306 
307   // Inform listeners that a backward branch has been taken (only supported by the interpreter).
BackwardBranch(Thread * thread,ArtMethod * method,int32_t offset)308   void BackwardBranch(Thread* thread, ArtMethod* method, int32_t offset) const
309       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
310     if (UNLIKELY(HasBackwardBranchListeners())) {
311       BackwardBranchImpl(thread, method, offset);
312     }
313   }
314 
315   // Inform listeners that we read a field (only supported by the interpreter).
FieldReadEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field)316   void FieldReadEvent(Thread* thread, mirror::Object* this_object,
317                       ArtMethod* method, uint32_t dex_pc,
318                       ArtField* field) const
319       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
320     if (UNLIKELY(HasFieldReadListeners())) {
321       FieldReadEventImpl(thread, this_object, method, dex_pc, field);
322     }
323   }
324 
325   // Inform listeners that we write a field (only supported by the interpreter).
FieldWriteEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value)326   void FieldWriteEvent(Thread* thread, mirror::Object* this_object,
327                        ArtMethod* method, uint32_t dex_pc,
328                        ArtField* field, const JValue& field_value) const
329       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
330     if (UNLIKELY(HasFieldWriteListeners())) {
331       FieldWriteEventImpl(thread, this_object, method, dex_pc, field, field_value);
332     }
333   }
334 
335   // Inform listeners that an exception was caught.
336   void ExceptionCaughtEvent(Thread* thread, mirror::Throwable* exception_object) const
337       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
338 
339   // Called when an instrumented method is entered. The intended link register (lr) is saved so
340   // that returning causes a branch to the method exit stub. Generates method enter events.
341   void PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
342                                      ArtMethod* method, uintptr_t lr,
343                                      bool interpreter_entry)
344       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
345 
346   // Called when an instrumented method is exited. Removes the pushed instrumentation frame
347   // returning the intended link register. Generates method exit events.
348   TwoWordReturn PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
349                                              uint64_t gpr_result, uint64_t fpr_result)
350       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
351 
352   // Pops an instrumentation frame from the current thread and generate an unwind event.
353   void PopMethodForUnwind(Thread* self, bool is_deoptimization) const
354       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
355 
356   // Call back for configure stubs.
357   void InstallStubsForClass(mirror::Class* klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
358 
359   void InstallStubsForMethod(ArtMethod* method)
360       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
361 
362  private:
363   InstrumentationLevel GetCurrentInstrumentationLevel() const;
364 
365   // Does the job of installing or removing instrumentation code within methods.
366   // In order to support multiple clients using instrumentation at the same time,
367   // the caller must pass a unique key (a string) identifying it so we remind which
368   // instrumentation level it needs. Therefore the current instrumentation level
369   // becomes the highest instrumentation level required by a client.
370   void ConfigureStubs(const char* key, InstrumentationLevel desired_instrumentation_level)
371       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
372       LOCKS_EXCLUDED(Locks::thread_list_lock_, Locks::classlinker_classes_lock_,
373                      deoptimized_methods_lock_);
374 
UpdateInterpreterHandlerTable()375   void UpdateInterpreterHandlerTable() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) {
376     interpreter_handler_table_ = IsActive() ? kAlternativeHandlerTable : kMainHandlerTable;
377   }
378 
379   // No thread safety analysis to get around SetQuickAllocEntryPointsInstrumented requiring
380   // exclusive access to mutator lock which you can't get if the runtime isn't started.
381   void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS;
382 
383   void MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
384                             ArtMethod* method, uint32_t dex_pc) const
385       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
386   void MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
387                            ArtMethod* method,
388                            uint32_t dex_pc, const JValue& return_value) const
389       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
390   void DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
391                            ArtMethod* method, uint32_t dex_pc) const
392       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
393   void BackwardBranchImpl(Thread* thread, ArtMethod* method, int32_t offset) const
394       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
395   void FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
396                            ArtMethod* method, uint32_t dex_pc,
397                            ArtField* field) const
398       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
399   void FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
400                            ArtMethod* method, uint32_t dex_pc,
401                            ArtField* field, const JValue& field_value) const
402       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
403 
404   // Read barrier-aware utility functions for accessing deoptimized_methods_
405   bool AddDeoptimizedMethod(ArtMethod* method)
406       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
407       EXCLUSIVE_LOCKS_REQUIRED(deoptimized_methods_lock_);
408   bool IsDeoptimizedMethod(ArtMethod* method)
409       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, deoptimized_methods_lock_);
410   bool RemoveDeoptimizedMethod(ArtMethod* method)
411       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
412       EXCLUSIVE_LOCKS_REQUIRED(deoptimized_methods_lock_);
413   ArtMethod* BeginDeoptimizedMethod()
414       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, deoptimized_methods_lock_);
415   bool IsDeoptimizedMethodsEmpty() const
416       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, deoptimized_methods_lock_);
417 
418   // Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
419   bool instrumentation_stubs_installed_;
420 
421   // Have we hijacked ArtMethod::code_ to reference the enter/exit stubs?
422   bool entry_exit_stubs_installed_;
423 
424   // Have we hijacked ArtMethod::code_ to reference the enter interpreter stub?
425   bool interpreter_stubs_installed_;
426 
427   // Do we need the fidelity of events that we only get from running within the interpreter?
428   bool interpret_only_;
429 
430   // Did the runtime request we only run in the interpreter? ie -Xint mode.
431   bool forced_interpret_only_;
432 
433   // Do we have any listeners for method entry events? Short-cut to avoid taking the
434   // instrumentation_lock_.
435   bool have_method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
436 
437   // Do we have any listeners for method exit events? Short-cut to avoid taking the
438   // instrumentation_lock_.
439   bool have_method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
440 
441   // Do we have any listeners for method unwind events? Short-cut to avoid taking the
442   // instrumentation_lock_.
443   bool have_method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
444 
445   // Do we have any listeners for dex move events? Short-cut to avoid taking the
446   // instrumentation_lock_.
447   bool have_dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
448 
449   // Do we have any listeners for field read events? Short-cut to avoid taking the
450   // instrumentation_lock_.
451   bool have_field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
452 
453   // Do we have any listeners for field write events? Short-cut to avoid taking the
454   // instrumentation_lock_.
455   bool have_field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
456 
457   // Do we have any exception caught listeners? Short-cut to avoid taking the instrumentation_lock_.
458   bool have_exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_);
459 
460   // Do we have any backward branch listeners? Short-cut to avoid taking the instrumentation_lock_.
461   bool have_backward_branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
462 
463   // Contains the instrumentation level required by each client of the instrumentation identified
464   // by a string key.
465   typedef SafeMap<const char*, InstrumentationLevel> InstrumentationLevelTable;
466   InstrumentationLevelTable requested_instrumentation_levels_ GUARDED_BY(Locks::mutator_lock_);
467 
468   // The event listeners, written to with the mutator_lock_ exclusively held.
469   std::list<InstrumentationListener*> method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
470   std::list<InstrumentationListener*> method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
471   std::list<InstrumentationListener*> method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
472   std::list<InstrumentationListener*> backward_branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
473   std::shared_ptr<std::list<InstrumentationListener*>> dex_pc_listeners_
474       GUARDED_BY(Locks::mutator_lock_);
475   std::shared_ptr<std::list<InstrumentationListener*>> field_read_listeners_
476       GUARDED_BY(Locks::mutator_lock_);
477   std::shared_ptr<std::list<InstrumentationListener*>> field_write_listeners_
478       GUARDED_BY(Locks::mutator_lock_);
479   std::shared_ptr<std::list<InstrumentationListener*>> exception_caught_listeners_
480       GUARDED_BY(Locks::mutator_lock_);
481 
482   // The set of methods being deoptimized (by the debugger) which must be executed with interpreter
483   // only.
484   mutable ReaderWriterMutex deoptimized_methods_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
485   std::unordered_set<ArtMethod*> deoptimized_methods_ GUARDED_BY(deoptimized_methods_lock_);
486   bool deoptimization_enabled_;
487 
488   // Current interpreter handler table. This is updated each time the thread state flags are
489   // modified.
490   InterpreterHandlerTable interpreter_handler_table_ GUARDED_BY(Locks::mutator_lock_);
491 
492   // Greater than 0 if quick alloc entry points instrumented.
493   size_t quick_alloc_entry_points_instrumentation_counter_
494       GUARDED_BY(Locks::instrument_entrypoints_lock_);
495 
496   friend class InstrumentationTest;  // For GetCurrentInstrumentationLevel and ConfigureStubs.
497 
498   DISALLOW_COPY_AND_ASSIGN(Instrumentation);
499 };
500 std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationEvent& rhs);
501 std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationLevel& rhs);
502 
503 // An element in the instrumentation side stack maintained in art::Thread.
504 struct InstrumentationStackFrame {
InstrumentationStackFrameInstrumentationStackFrame505   InstrumentationStackFrame(mirror::Object* this_object, ArtMethod* method,
506                             uintptr_t return_pc, size_t frame_id, bool interpreter_entry)
507       : this_object_(this_object), method_(method), return_pc_(return_pc), frame_id_(frame_id),
508         interpreter_entry_(interpreter_entry) {
509   }
510 
511   std::string Dump() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
512 
513   mirror::Object* this_object_;
514   ArtMethod* method_;
515   uintptr_t return_pc_;
516   size_t frame_id_;
517   bool interpreter_entry_;
518 };
519 
520 }  // namespace instrumentation
521 }  // namespace art
522 
523 #endif  // ART_RUNTIME_INSTRUMENTATION_H_
524