1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "instrumentation.h"
18
19 #include <sstream>
20
21 #include "arch/context.h"
22 #include "art_method-inl.h"
23 #include "art_field-inl.h"
24 #include "atomic.h"
25 #include "base/callee_save_type.h"
26 #include "class_linker.h"
27 #include "debugger.h"
28 #include "dex_file-inl.h"
29 #include "entrypoints/quick/quick_entrypoints.h"
30 #include "entrypoints/quick/quick_alloc_entrypoints.h"
31 #include "entrypoints/runtime_asm_entrypoints.h"
32 #include "gc_root-inl.h"
33 #include "interpreter/interpreter.h"
34 #include "jit/jit.h"
35 #include "jit/jit_code_cache.h"
36 #include "jvalue-inl.h"
37 #include "mirror/class-inl.h"
38 #include "mirror/dex_cache.h"
39 #include "mirror/object_array-inl.h"
40 #include "mirror/object-inl.h"
41 #include "nth_caller_visitor.h"
42 #include "oat_quick_method_header.h"
43 #include "thread.h"
44 #include "thread_list.h"
45
46 namespace art {
47 namespace instrumentation {
48
49 constexpr bool kVerboseInstrumentation = false;
50
MethodExited(Thread * thread,Handle<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,Handle<mirror::Object> return_value)51 void InstrumentationListener::MethodExited(Thread* thread,
52 Handle<mirror::Object> this_object,
53 ArtMethod* method,
54 uint32_t dex_pc,
55 Handle<mirror::Object> return_value) {
56 DCHECK_EQ(method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive(),
57 Primitive::kPrimNot);
58 JValue v;
59 v.SetL(return_value.Get());
60 MethodExited(thread, this_object, method, dex_pc, v);
61 }
62
FieldWritten(Thread * thread,Handle<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,Handle<mirror::Object> field_value)63 void InstrumentationListener::FieldWritten(Thread* thread,
64 Handle<mirror::Object> this_object,
65 ArtMethod* method,
66 uint32_t dex_pc,
67 ArtField* field,
68 Handle<mirror::Object> field_value) {
69 DCHECK(!field->IsPrimitiveType());
70 JValue v;
71 v.SetL(field_value.Get());
72 FieldWritten(thread, this_object, method, dex_pc, field, v);
73 }
74
75 // Instrumentation works on non-inlined frames by updating returned PCs
76 // of compiled frames.
77 static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
78 StackVisitor::StackWalkKind::kSkipInlinedFrames;
79
80 class InstallStubsClassVisitor : public ClassVisitor {
81 public:
InstallStubsClassVisitor(Instrumentation * instrumentation)82 explicit InstallStubsClassVisitor(Instrumentation* instrumentation)
83 : instrumentation_(instrumentation) {}
84
operator ()(ObjPtr<mirror::Class> klass)85 bool operator()(ObjPtr<mirror::Class> klass) OVERRIDE REQUIRES(Locks::mutator_lock_) {
86 instrumentation_->InstallStubsForClass(klass.Ptr());
87 return true; // we visit all classes.
88 }
89
90 private:
91 Instrumentation* const instrumentation_;
92 };
93
94
Instrumentation()95 Instrumentation::Instrumentation()
96 : instrumentation_stubs_installed_(false),
97 entry_exit_stubs_installed_(false),
98 interpreter_stubs_installed_(false),
99 interpret_only_(false),
100 forced_interpret_only_(false),
101 have_method_entry_listeners_(false),
102 have_method_exit_listeners_(false),
103 have_method_unwind_listeners_(false),
104 have_dex_pc_listeners_(false),
105 have_field_read_listeners_(false),
106 have_field_write_listeners_(false),
107 have_exception_caught_listeners_(false),
108 have_branch_listeners_(false),
109 have_invoke_virtual_or_interface_listeners_(false),
110 deoptimized_methods_lock_("deoptimized methods lock", kDeoptimizedMethodsLock),
111 deoptimization_enabled_(false),
112 interpreter_handler_table_(kMainHandlerTable),
113 quick_alloc_entry_points_instrumentation_counter_(0),
114 alloc_entrypoints_instrumented_(false) {
115 }
116
InstallStubsForClass(mirror::Class * klass)117 void Instrumentation::InstallStubsForClass(mirror::Class* klass) {
118 if (!klass->IsResolved()) {
119 // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
120 // could not be initialized or linked with regards to class inheritance.
121 } else if (klass->IsErroneousResolved()) {
122 // We can't execute code in a erroneous class: do nothing.
123 } else {
124 for (ArtMethod& method : klass->GetMethods(kRuntimePointerSize)) {
125 InstallStubsForMethod(&method);
126 }
127 }
128 }
129
UpdateEntrypoints(ArtMethod * method,const void * quick_code)130 static void UpdateEntrypoints(ArtMethod* method, const void* quick_code)
131 REQUIRES_SHARED(Locks::mutator_lock_) {
132 method->SetEntryPointFromQuickCompiledCode(quick_code);
133 }
134
NeedDebugVersionFor(ArtMethod * method) const135 bool Instrumentation::NeedDebugVersionFor(ArtMethod* method) const REQUIRES_SHARED(Locks::mutator_lock_) {
136 return Dbg::IsDebuggerActive() &&
137 Runtime::Current()->IsJavaDebuggable() &&
138 !method->IsNative() &&
139 !method->IsProxyMethod();
140 }
141
InstallStubsForMethod(ArtMethod * method)142 void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
143 if (!method->IsInvokable() || method->IsProxyMethod()) {
144 // Do not change stubs for these methods.
145 return;
146 }
147 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
148 if (method->IsConstructor() &&
149 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;")) {
150 return;
151 }
152 const void* new_quick_code;
153 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
154 Runtime* const runtime = Runtime::Current();
155 ClassLinker* const class_linker = runtime->GetClassLinker();
156 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
157 if (uninstall) {
158 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
159 new_quick_code = GetQuickToInterpreterBridge();
160 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
161 if (NeedDebugVersionFor(method)) {
162 new_quick_code = GetQuickToInterpreterBridge();
163 } else {
164 new_quick_code = class_linker->GetQuickOatCodeFor(method);
165 }
166 } else {
167 new_quick_code = GetQuickResolutionStub();
168 }
169 } else { // !uninstall
170 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
171 !method->IsNative()) {
172 new_quick_code = GetQuickToInterpreterBridge();
173 } else {
174 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
175 // class, all its static methods code will be set to the instrumentation entry point.
176 // For more details, see ClassLinker::FixupStaticTrampolines.
177 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
178 if (NeedDebugVersionFor(method)) {
179 // Oat code should not be used. Don't install instrumentation stub and
180 // use interpreter for instrumentation.
181 new_quick_code = GetQuickToInterpreterBridge();
182 } else if (entry_exit_stubs_installed_) {
183 new_quick_code = GetQuickInstrumentationEntryPoint();
184 } else {
185 new_quick_code = class_linker->GetQuickOatCodeFor(method);
186 }
187 } else {
188 new_quick_code = GetQuickResolutionStub();
189 }
190 }
191 }
192 UpdateEntrypoints(method, new_quick_code);
193 }
194
195 // Places the instrumentation exit pc as the return PC for every quick frame. This also allows
196 // deoptimization of quick frames to interpreter frames.
197 // Since we may already have done this previously, we need to push new instrumentation frame before
198 // existing instrumentation frames.
InstrumentationInstallStack(Thread * thread,void * arg)199 static void InstrumentationInstallStack(Thread* thread, void* arg)
200 REQUIRES_SHARED(Locks::mutator_lock_) {
201 struct InstallStackVisitor FINAL : public StackVisitor {
202 InstallStackVisitor(Thread* thread_in, Context* context, uintptr_t instrumentation_exit_pc)
203 : StackVisitor(thread_in, context, kInstrumentationStackWalk),
204 instrumentation_stack_(thread_in->GetInstrumentationStack()),
205 instrumentation_exit_pc_(instrumentation_exit_pc),
206 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
207 last_return_pc_(0) {
208 }
209
210 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
211 ArtMethod* m = GetMethod();
212 if (m == nullptr) {
213 if (kVerboseInstrumentation) {
214 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
215 }
216 last_return_pc_ = 0;
217 return true; // Ignore upcalls.
218 }
219 if (GetCurrentQuickFrame() == nullptr) {
220 bool interpreter_frame = true;
221 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
222 interpreter_frame);
223 if (kVerboseInstrumentation) {
224 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
225 }
226 shadow_stack_.push_back(instrumentation_frame);
227 return true; // Continue.
228 }
229 uintptr_t return_pc = GetReturnPc();
230 if (m->IsRuntimeMethod()) {
231 if (return_pc == instrumentation_exit_pc_) {
232 if (kVerboseInstrumentation) {
233 LOG(INFO) << " Handling quick to interpreter transition. Frame " << GetFrameId();
234 }
235 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
236 const InstrumentationStackFrame& frame =
237 instrumentation_stack_->at(instrumentation_stack_depth_);
238 CHECK(frame.interpreter_entry_);
239 // This is an interpreter frame so method enter event must have been reported. However we
240 // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
241 // Since we won't report method entry here, we can safely push any DEX pc.
242 dex_pcs_.push_back(0);
243 last_return_pc_ = frame.return_pc_;
244 ++instrumentation_stack_depth_;
245 return true;
246 } else {
247 if (kVerboseInstrumentation) {
248 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId();
249 }
250 last_return_pc_ = GetReturnPc();
251 return true; // Ignore unresolved methods since they will be instrumented after resolution.
252 }
253 }
254 if (kVerboseInstrumentation) {
255 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
256 }
257 if (return_pc == instrumentation_exit_pc_) {
258 // We've reached a frame which has already been installed with instrumentation exit stub.
259 // We should have already installed instrumentation on previous frames.
260 reached_existing_instrumentation_frames_ = true;
261
262 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
263 const InstrumentationStackFrame& frame =
264 instrumentation_stack_->at(instrumentation_stack_depth_);
265 CHECK_EQ(m, frame.method_) << "Expected " << ArtMethod::PrettyMethod(m)
266 << ", Found " << ArtMethod::PrettyMethod(frame.method_);
267 return_pc = frame.return_pc_;
268 if (kVerboseInstrumentation) {
269 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
270 }
271 } else {
272 CHECK_NE(return_pc, 0U);
273 CHECK(!reached_existing_instrumentation_frames_);
274 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
275 false);
276 if (kVerboseInstrumentation) {
277 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
278 }
279
280 // Insert frame at the right position so we do not corrupt the instrumentation stack.
281 // Instrumentation stack frames are in descending frame id order.
282 auto it = instrumentation_stack_->begin();
283 for (auto end = instrumentation_stack_->end(); it != end; ++it) {
284 const InstrumentationStackFrame& current = *it;
285 if (instrumentation_frame.frame_id_ >= current.frame_id_) {
286 break;
287 }
288 }
289 instrumentation_stack_->insert(it, instrumentation_frame);
290 SetReturnPc(instrumentation_exit_pc_);
291 }
292 dex_pcs_.push_back((GetCurrentOatQuickMethodHeader() == nullptr)
293 ? DexFile::kDexNoIndex
294 : GetCurrentOatQuickMethodHeader()->ToDexPc(m, last_return_pc_));
295 last_return_pc_ = return_pc;
296 ++instrumentation_stack_depth_;
297 return true; // Continue.
298 }
299 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
300 std::vector<InstrumentationStackFrame> shadow_stack_;
301 std::vector<uint32_t> dex_pcs_;
302 const uintptr_t instrumentation_exit_pc_;
303 bool reached_existing_instrumentation_frames_;
304 size_t instrumentation_stack_depth_;
305 uintptr_t last_return_pc_;
306 };
307 if (kVerboseInstrumentation) {
308 std::string thread_name;
309 thread->GetThreadName(thread_name);
310 LOG(INFO) << "Installing exit stubs in " << thread_name;
311 }
312
313 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
314 std::unique_ptr<Context> context(Context::Create());
315 uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
316 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
317 visitor.WalkStack(true);
318 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
319
320 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
321 // Create method enter events for all methods currently on the thread's stack. We only do this
322 // if no debugger is attached to prevent from posting events twice.
323 auto ssi = visitor.shadow_stack_.rbegin();
324 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
325 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
326 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
327 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
328 ++ssi;
329 }
330 uint32_t dex_pc = visitor.dex_pcs_.back();
331 visitor.dex_pcs_.pop_back();
332 if (!isi->interpreter_entry_) {
333 instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
334 }
335 }
336 }
337 thread->VerifyStack();
338 }
339
InstrumentThreadStack(Thread * thread)340 void Instrumentation::InstrumentThreadStack(Thread* thread) {
341 instrumentation_stubs_installed_ = true;
342 InstrumentationInstallStack(thread, this);
343 }
344
345 // Removes the instrumentation exit pc as the return PC for every quick frame.
InstrumentationRestoreStack(Thread * thread,void * arg)346 static void InstrumentationRestoreStack(Thread* thread, void* arg)
347 REQUIRES(Locks::mutator_lock_) {
348 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
349
350 struct RestoreStackVisitor FINAL : public StackVisitor {
351 RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
352 Instrumentation* instrumentation)
353 : StackVisitor(thread_in, nullptr, kInstrumentationStackWalk),
354 thread_(thread_in),
355 instrumentation_exit_pc_(instrumentation_exit_pc),
356 instrumentation_(instrumentation),
357 instrumentation_stack_(thread_in->GetInstrumentationStack()),
358 frames_removed_(0) {}
359
360 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
361 if (instrumentation_stack_->size() == 0) {
362 return false; // Stop.
363 }
364 ArtMethod* m = GetMethod();
365 if (GetCurrentQuickFrame() == nullptr) {
366 if (kVerboseInstrumentation) {
367 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
368 << " Method=" << ArtMethod::PrettyMethod(m);
369 }
370 return true; // Ignore shadow frames.
371 }
372 if (m == nullptr) {
373 if (kVerboseInstrumentation) {
374 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
375 }
376 return true; // Ignore upcalls.
377 }
378 bool removed_stub = false;
379 // TODO: make this search more efficient?
380 const size_t frameId = GetFrameId();
381 for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
382 if (instrumentation_frame.frame_id_ == frameId) {
383 if (kVerboseInstrumentation) {
384 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
385 }
386 if (instrumentation_frame.interpreter_entry_) {
387 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
388 } else {
389 CHECK(m == instrumentation_frame.method_) << ArtMethod::PrettyMethod(m);
390 }
391 SetReturnPc(instrumentation_frame.return_pc_);
392 if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
393 // Create the method exit events. As the methods didn't really exit the result is 0.
394 // We only do this if no debugger is attached to prevent from posting events twice.
395 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
396 GetDexPc(), JValue());
397 }
398 frames_removed_++;
399 removed_stub = true;
400 break;
401 }
402 }
403 if (!removed_stub) {
404 if (kVerboseInstrumentation) {
405 LOG(INFO) << " No exit stub in " << DescribeLocation();
406 }
407 }
408 return true; // Continue.
409 }
410 Thread* const thread_;
411 const uintptr_t instrumentation_exit_pc_;
412 Instrumentation* const instrumentation_;
413 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
414 size_t frames_removed_;
415 };
416 if (kVerboseInstrumentation) {
417 std::string thread_name;
418 thread->GetThreadName(thread_name);
419 LOG(INFO) << "Removing exit stubs in " << thread_name;
420 }
421 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
422 if (stack->size() > 0) {
423 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
424 uintptr_t instrumentation_exit_pc =
425 reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
426 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
427 visitor.WalkStack(true);
428 CHECK_EQ(visitor.frames_removed_, stack->size());
429 while (stack->size() > 0) {
430 stack->pop_front();
431 }
432 }
433 }
434
HasEvent(Instrumentation::InstrumentationEvent expected,uint32_t events)435 static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
436 return (events & expected) != 0;
437 }
438
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)439 static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
440 uint32_t events,
441 std::list<InstrumentationListener*>& list,
442 InstrumentationListener* listener,
443 bool* has_listener)
444 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
445 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
446 if (!HasEvent(event, events)) {
447 return;
448 }
449 // If there is a free slot in the list, we insert the listener in that slot.
450 // Otherwise we add it to the end of the list.
451 auto it = std::find(list.begin(), list.end(), nullptr);
452 if (it != list.end()) {
453 *it = listener;
454 } else {
455 list.push_back(listener);
456 }
457 *has_listener = true;
458 }
459
AddListener(InstrumentationListener * listener,uint32_t events)460 void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
461 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
462 PotentiallyAddListenerTo(kMethodEntered,
463 events,
464 method_entry_listeners_,
465 listener,
466 &have_method_entry_listeners_);
467 PotentiallyAddListenerTo(kMethodExited,
468 events,
469 method_exit_listeners_,
470 listener,
471 &have_method_exit_listeners_);
472 PotentiallyAddListenerTo(kMethodUnwind,
473 events,
474 method_unwind_listeners_,
475 listener,
476 &have_method_unwind_listeners_);
477 PotentiallyAddListenerTo(kBranch,
478 events,
479 branch_listeners_,
480 listener,
481 &have_branch_listeners_);
482 PotentiallyAddListenerTo(kInvokeVirtualOrInterface,
483 events,
484 invoke_virtual_or_interface_listeners_,
485 listener,
486 &have_invoke_virtual_or_interface_listeners_);
487 PotentiallyAddListenerTo(kDexPcMoved,
488 events,
489 dex_pc_listeners_,
490 listener,
491 &have_dex_pc_listeners_);
492 PotentiallyAddListenerTo(kFieldRead,
493 events,
494 field_read_listeners_,
495 listener,
496 &have_field_read_listeners_);
497 PotentiallyAddListenerTo(kFieldWritten,
498 events,
499 field_write_listeners_,
500 listener,
501 &have_field_write_listeners_);
502 PotentiallyAddListenerTo(kExceptionCaught,
503 events,
504 exception_caught_listeners_,
505 listener,
506 &have_exception_caught_listeners_);
507 UpdateInterpreterHandlerTable();
508 }
509
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)510 static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
511 uint32_t events,
512 std::list<InstrumentationListener*>& list,
513 InstrumentationListener* listener,
514 bool* has_listener)
515 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
516 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
517 if (!HasEvent(event, events)) {
518 return;
519 }
520 auto it = std::find(list.begin(), list.end(), listener);
521 if (it != list.end()) {
522 // Just update the entry, do not remove from the list. Removing entries in the list
523 // is unsafe when mutators are iterating over it.
524 *it = nullptr;
525 }
526
527 // Check if the list contains any non-null listener, and update 'has_listener'.
528 for (InstrumentationListener* l : list) {
529 if (l != nullptr) {
530 *has_listener = true;
531 return;
532 }
533 }
534 *has_listener = false;
535 }
536
RemoveListener(InstrumentationListener * listener,uint32_t events)537 void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
538 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
539 PotentiallyRemoveListenerFrom(kMethodEntered,
540 events,
541 method_entry_listeners_,
542 listener,
543 &have_method_entry_listeners_);
544 PotentiallyRemoveListenerFrom(kMethodExited,
545 events,
546 method_exit_listeners_,
547 listener,
548 &have_method_exit_listeners_);
549 PotentiallyRemoveListenerFrom(kMethodUnwind,
550 events,
551 method_unwind_listeners_,
552 listener,
553 &have_method_unwind_listeners_);
554 PotentiallyRemoveListenerFrom(kBranch,
555 events,
556 branch_listeners_,
557 listener,
558 &have_branch_listeners_);
559 PotentiallyRemoveListenerFrom(kInvokeVirtualOrInterface,
560 events,
561 invoke_virtual_or_interface_listeners_,
562 listener,
563 &have_invoke_virtual_or_interface_listeners_);
564 PotentiallyRemoveListenerFrom(kDexPcMoved,
565 events,
566 dex_pc_listeners_,
567 listener,
568 &have_dex_pc_listeners_);
569 PotentiallyRemoveListenerFrom(kFieldRead,
570 events,
571 field_read_listeners_,
572 listener,
573 &have_field_read_listeners_);
574 PotentiallyRemoveListenerFrom(kFieldWritten,
575 events,
576 field_write_listeners_,
577 listener,
578 &have_field_write_listeners_);
579 PotentiallyRemoveListenerFrom(kExceptionCaught,
580 events,
581 exception_caught_listeners_,
582 listener,
583 &have_exception_caught_listeners_);
584 UpdateInterpreterHandlerTable();
585 }
586
GetCurrentInstrumentationLevel() const587 Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
588 if (interpreter_stubs_installed_) {
589 return InstrumentationLevel::kInstrumentWithInterpreter;
590 } else if (entry_exit_stubs_installed_) {
591 return InstrumentationLevel::kInstrumentWithInstrumentationStubs;
592 } else {
593 return InstrumentationLevel::kInstrumentNothing;
594 }
595 }
596
RequiresInstrumentationInstallation(InstrumentationLevel new_level) const597 bool Instrumentation::RequiresInstrumentationInstallation(InstrumentationLevel new_level) const {
598 // We need to reinstall instrumentation if we go to a different level.
599 return GetCurrentInstrumentationLevel() != new_level;
600 }
601
ConfigureStubs(const char * key,InstrumentationLevel desired_level)602 void Instrumentation::ConfigureStubs(const char* key, InstrumentationLevel desired_level) {
603 // Store the instrumentation level for this key or remove it.
604 if (desired_level == InstrumentationLevel::kInstrumentNothing) {
605 // The client no longer needs instrumentation.
606 requested_instrumentation_levels_.erase(key);
607 } else {
608 // The client needs instrumentation.
609 requested_instrumentation_levels_.Overwrite(key, desired_level);
610 }
611
612 // Look for the highest required instrumentation level.
613 InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
614 for (const auto& v : requested_instrumentation_levels_) {
615 requested_level = std::max(requested_level, v.second);
616 }
617
618 interpret_only_ = (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) ||
619 forced_interpret_only_;
620
621 if (!RequiresInstrumentationInstallation(requested_level)) {
622 // We're already set.
623 return;
624 }
625 Thread* const self = Thread::Current();
626 Runtime* runtime = Runtime::Current();
627 Locks::mutator_lock_->AssertExclusiveHeld(self);
628 Locks::thread_list_lock_->AssertNotHeld(self);
629 if (requested_level > InstrumentationLevel::kInstrumentNothing) {
630 if (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) {
631 interpreter_stubs_installed_ = true;
632 entry_exit_stubs_installed_ = true;
633 } else {
634 CHECK_EQ(requested_level, InstrumentationLevel::kInstrumentWithInstrumentationStubs);
635 entry_exit_stubs_installed_ = true;
636 interpreter_stubs_installed_ = false;
637 }
638 InstallStubsClassVisitor visitor(this);
639 runtime->GetClassLinker()->VisitClasses(&visitor);
640 instrumentation_stubs_installed_ = true;
641 MutexLock mu(self, *Locks::thread_list_lock_);
642 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
643 } else {
644 interpreter_stubs_installed_ = false;
645 entry_exit_stubs_installed_ = false;
646 InstallStubsClassVisitor visitor(this);
647 runtime->GetClassLinker()->VisitClasses(&visitor);
648 // Restore stack only if there is no method currently deoptimized.
649 bool empty;
650 {
651 ReaderMutexLock mu(self, deoptimized_methods_lock_);
652 empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation.
653 }
654 if (empty) {
655 MutexLock mu(self, *Locks::thread_list_lock_);
656 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
657 // Only do this after restoring, as walking the stack when restoring will see
658 // the instrumentation exit pc.
659 instrumentation_stubs_installed_ = false;
660 }
661 }
662 }
663
ResetQuickAllocEntryPointsForThread(Thread * thread,void * arg ATTRIBUTE_UNUSED)664 static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg ATTRIBUTE_UNUSED) {
665 thread->ResetQuickAllocEntryPointsForThread(kUseReadBarrier && thread->GetIsGcMarking());
666 }
667
SetEntrypointsInstrumented(bool instrumented)668 void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
669 Thread* self = Thread::Current();
670 Runtime* runtime = Runtime::Current();
671 Locks::mutator_lock_->AssertNotHeld(self);
672 Locks::instrument_entrypoints_lock_->AssertHeld(self);
673 if (runtime->IsStarted()) {
674 ScopedSuspendAll ssa(__FUNCTION__);
675 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
676 SetQuickAllocEntryPointsInstrumented(instrumented);
677 ResetQuickAllocEntryPoints();
678 alloc_entrypoints_instrumented_ = instrumented;
679 } else {
680 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
681 SetQuickAllocEntryPointsInstrumented(instrumented);
682
683 // Note: ResetQuickAllocEntryPoints only works when the runtime is started. Manually run the
684 // update for just this thread.
685 // Note: self may be null. One of those paths is setting instrumentation in the Heap
686 // constructor for gcstress mode.
687 if (self != nullptr) {
688 ResetQuickAllocEntryPointsForThread(self, nullptr);
689 }
690
691 alloc_entrypoints_instrumented_ = instrumented;
692 }
693 }
694
InstrumentQuickAllocEntryPoints()695 void Instrumentation::InstrumentQuickAllocEntryPoints() {
696 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
697 InstrumentQuickAllocEntryPointsLocked();
698 }
699
UninstrumentQuickAllocEntryPoints()700 void Instrumentation::UninstrumentQuickAllocEntryPoints() {
701 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
702 UninstrumentQuickAllocEntryPointsLocked();
703 }
704
InstrumentQuickAllocEntryPointsLocked()705 void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
706 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
707 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
708 SetEntrypointsInstrumented(true);
709 }
710 ++quick_alloc_entry_points_instrumentation_counter_;
711 }
712
UninstrumentQuickAllocEntryPointsLocked()713 void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
714 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
715 CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
716 --quick_alloc_entry_points_instrumentation_counter_;
717 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
718 SetEntrypointsInstrumented(false);
719 }
720 }
721
ResetQuickAllocEntryPoints()722 void Instrumentation::ResetQuickAllocEntryPoints() {
723 Runtime* runtime = Runtime::Current();
724 if (runtime->IsStarted()) {
725 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
726 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
727 }
728 }
729
UpdateMethodsCodeImpl(ArtMethod * method,const void * quick_code)730 void Instrumentation::UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code) {
731 const void* new_quick_code;
732 if (LIKELY(!instrumentation_stubs_installed_)) {
733 new_quick_code = quick_code;
734 } else {
735 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
736 new_quick_code = GetQuickToInterpreterBridge();
737 } else {
738 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
739 if (class_linker->IsQuickResolutionStub(quick_code) ||
740 class_linker->IsQuickToInterpreterBridge(quick_code)) {
741 new_quick_code = quick_code;
742 } else if (entry_exit_stubs_installed_) {
743 new_quick_code = GetQuickInstrumentationEntryPoint();
744 } else {
745 new_quick_code = quick_code;
746 }
747 }
748 }
749 UpdateEntrypoints(method, new_quick_code);
750 }
751
UpdateMethodsCode(ArtMethod * method,const void * quick_code)752 void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* quick_code) {
753 DCHECK(method->GetDeclaringClass()->IsResolved());
754 UpdateMethodsCodeImpl(method, quick_code);
755 }
756
UpdateMethodsCodeForJavaDebuggable(ArtMethod * method,const void * quick_code)757 void Instrumentation::UpdateMethodsCodeForJavaDebuggable(ArtMethod* method,
758 const void* quick_code) {
759 // When the runtime is set to Java debuggable, we may update the entry points of
760 // all methods of a class to the interpreter bridge. A method's declaring class
761 // might not be in resolved state yet in that case, so we bypass the DCHECK in
762 // UpdateMethodsCode.
763 UpdateMethodsCodeImpl(method, quick_code);
764 }
765
AddDeoptimizedMethod(ArtMethod * method)766 bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
767 if (IsDeoptimizedMethod(method)) {
768 // Already in the map. Return.
769 return false;
770 }
771 // Not found. Add it.
772 deoptimized_methods_.insert(method);
773 return true;
774 }
775
IsDeoptimizedMethod(ArtMethod * method)776 bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
777 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
778 }
779
BeginDeoptimizedMethod()780 ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
781 if (deoptimized_methods_.empty()) {
782 // Empty.
783 return nullptr;
784 }
785 return *deoptimized_methods_.begin();
786 }
787
RemoveDeoptimizedMethod(ArtMethod * method)788 bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
789 auto it = deoptimized_methods_.find(method);
790 if (it == deoptimized_methods_.end()) {
791 return false;
792 }
793 deoptimized_methods_.erase(it);
794 return true;
795 }
796
IsDeoptimizedMethodsEmpty() const797 bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
798 return deoptimized_methods_.empty();
799 }
800
Deoptimize(ArtMethod * method)801 void Instrumentation::Deoptimize(ArtMethod* method) {
802 CHECK(!method->IsNative());
803 CHECK(!method->IsProxyMethod());
804 CHECK(method->IsInvokable());
805
806 Thread* self = Thread::Current();
807 {
808 WriterMutexLock mu(self, deoptimized_methods_lock_);
809 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
810 CHECK(has_not_been_deoptimized) << "Method " << ArtMethod::PrettyMethod(method)
811 << " is already deoptimized";
812 }
813 if (!interpreter_stubs_installed_) {
814 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint());
815
816 // Install instrumentation exit stub and instrumentation frames. We may already have installed
817 // these previously so it will only cover the newly created frames.
818 instrumentation_stubs_installed_ = true;
819 MutexLock mu(self, *Locks::thread_list_lock_);
820 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
821 }
822 }
823
Undeoptimize(ArtMethod * method)824 void Instrumentation::Undeoptimize(ArtMethod* method) {
825 CHECK(!method->IsNative());
826 CHECK(!method->IsProxyMethod());
827 CHECK(method->IsInvokable());
828
829 Thread* self = Thread::Current();
830 bool empty;
831 {
832 WriterMutexLock mu(self, deoptimized_methods_lock_);
833 bool found_and_erased = RemoveDeoptimizedMethod(method);
834 CHECK(found_and_erased) << "Method " << ArtMethod::PrettyMethod(method)
835 << " is not deoptimized";
836 empty = IsDeoptimizedMethodsEmpty();
837 }
838
839 // Restore code and possibly stack only if we did not deoptimize everything.
840 if (!interpreter_stubs_installed_) {
841 // Restore its code or resolution trampoline.
842 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
843 if (method->IsStatic() && !method->IsConstructor() &&
844 !method->GetDeclaringClass()->IsInitialized()) {
845 UpdateEntrypoints(method, GetQuickResolutionStub());
846 } else {
847 const void* quick_code = NeedDebugVersionFor(method)
848 ? GetQuickToInterpreterBridge()
849 : class_linker->GetQuickOatCodeFor(method);
850 UpdateEntrypoints(method, quick_code);
851 }
852
853 // If there is no deoptimized method left, we can restore the stack of each thread.
854 if (empty) {
855 MutexLock mu(self, *Locks::thread_list_lock_);
856 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
857 instrumentation_stubs_installed_ = false;
858 }
859 }
860 }
861
IsDeoptimized(ArtMethod * method)862 bool Instrumentation::IsDeoptimized(ArtMethod* method) {
863 DCHECK(method != nullptr);
864 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
865 return IsDeoptimizedMethod(method);
866 }
867
EnableDeoptimization()868 void Instrumentation::EnableDeoptimization() {
869 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
870 CHECK(IsDeoptimizedMethodsEmpty());
871 CHECK_EQ(deoptimization_enabled_, false);
872 deoptimization_enabled_ = true;
873 }
874
DisableDeoptimization(const char * key)875 void Instrumentation::DisableDeoptimization(const char* key) {
876 CHECK_EQ(deoptimization_enabled_, true);
877 // If we deoptimized everything, undo it.
878 InstrumentationLevel level = GetCurrentInstrumentationLevel();
879 if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
880 UndeoptimizeEverything(key);
881 }
882 // Undeoptimized selected methods.
883 while (true) {
884 ArtMethod* method;
885 {
886 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
887 if (IsDeoptimizedMethodsEmpty()) {
888 break;
889 }
890 method = BeginDeoptimizedMethod();
891 CHECK(method != nullptr);
892 }
893 Undeoptimize(method);
894 }
895 deoptimization_enabled_ = false;
896 }
897
898 // Indicates if instrumentation should notify method enter/exit events to the listeners.
ShouldNotifyMethodEnterExitEvents() const899 bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
900 if (!HasMethodEntryListeners() && !HasMethodExitListeners()) {
901 return false;
902 }
903 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
904 }
905
DeoptimizeEverything(const char * key)906 void Instrumentation::DeoptimizeEverything(const char* key) {
907 CHECK(deoptimization_enabled_);
908 ConfigureStubs(key, InstrumentationLevel::kInstrumentWithInterpreter);
909 }
910
UndeoptimizeEverything(const char * key)911 void Instrumentation::UndeoptimizeEverything(const char* key) {
912 CHECK(interpreter_stubs_installed_);
913 CHECK(deoptimization_enabled_);
914 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
915 }
916
EnableMethodTracing(const char * key,bool needs_interpreter)917 void Instrumentation::EnableMethodTracing(const char* key, bool needs_interpreter) {
918 InstrumentationLevel level;
919 if (needs_interpreter) {
920 level = InstrumentationLevel::kInstrumentWithInterpreter;
921 } else {
922 level = InstrumentationLevel::kInstrumentWithInstrumentationStubs;
923 }
924 ConfigureStubs(key, level);
925 }
926
DisableMethodTracing(const char * key)927 void Instrumentation::DisableMethodTracing(const char* key) {
928 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
929 }
930
GetQuickCodeFor(ArtMethod * method,PointerSize pointer_size) const931 const void* Instrumentation::GetQuickCodeFor(ArtMethod* method, PointerSize pointer_size) const {
932 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
933 if (LIKELY(!instrumentation_stubs_installed_)) {
934 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
935 DCHECK(code != nullptr);
936 if (LIKELY(!class_linker->IsQuickResolutionStub(code) &&
937 !class_linker->IsQuickToInterpreterBridge(code)) &&
938 !class_linker->IsQuickResolutionStub(code) &&
939 !class_linker->IsQuickToInterpreterBridge(code)) {
940 return code;
941 }
942 }
943 return class_linker->GetQuickOatCodeFor(method);
944 }
945
MethodEnterEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const946 void Instrumentation::MethodEnterEventImpl(Thread* thread,
947 ObjPtr<mirror::Object> this_object,
948 ArtMethod* method,
949 uint32_t dex_pc) const {
950 if (HasMethodEntryListeners()) {
951 Thread* self = Thread::Current();
952 StackHandleScope<1> hs(self);
953 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
954 for (InstrumentationListener* listener : method_entry_listeners_) {
955 if (listener != nullptr) {
956 listener->MethodEntered(thread, thiz, method, dex_pc);
957 }
958 }
959 }
960 }
961
MethodExitEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,const JValue & return_value) const962 void Instrumentation::MethodExitEventImpl(Thread* thread,
963 ObjPtr<mirror::Object> this_object,
964 ArtMethod* method,
965 uint32_t dex_pc,
966 const JValue& return_value) const {
967 if (HasMethodExitListeners()) {
968 Thread* self = Thread::Current();
969 StackHandleScope<2> hs(self);
970 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
971 if (method->GetInterfaceMethodIfProxy(kRuntimePointerSize)
972 ->GetReturnTypePrimitive() != Primitive::kPrimNot) {
973 for (InstrumentationListener* listener : method_exit_listeners_) {
974 if (listener != nullptr) {
975 listener->MethodExited(thread, thiz, method, dex_pc, return_value);
976 }
977 }
978 } else {
979 Handle<mirror::Object> ret(hs.NewHandle(return_value.GetL()));
980 for (InstrumentationListener* listener : method_exit_listeners_) {
981 if (listener != nullptr) {
982 listener->MethodExited(thread, thiz, method, dex_pc, ret);
983 }
984 }
985 }
986 }
987 }
988
MethodUnwindEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc) const989 void Instrumentation::MethodUnwindEvent(Thread* thread,
990 mirror::Object* this_object,
991 ArtMethod* method,
992 uint32_t dex_pc) const {
993 if (HasMethodUnwindListeners()) {
994 Thread* self = Thread::Current();
995 StackHandleScope<1> hs(self);
996 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
997 for (InstrumentationListener* listener : method_unwind_listeners_) {
998 if (listener != nullptr) {
999 listener->MethodUnwind(thread, thiz, method, dex_pc);
1000 }
1001 }
1002 }
1003 }
1004
DexPcMovedEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1005 void Instrumentation::DexPcMovedEventImpl(Thread* thread,
1006 ObjPtr<mirror::Object> this_object,
1007 ArtMethod* method,
1008 uint32_t dex_pc) const {
1009 Thread* self = Thread::Current();
1010 StackHandleScope<1> hs(self);
1011 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1012 for (InstrumentationListener* listener : dex_pc_listeners_) {
1013 if (listener != nullptr) {
1014 listener->DexPcMoved(thread, thiz, method, dex_pc);
1015 }
1016 }
1017 }
1018
BranchImpl(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset) const1019 void Instrumentation::BranchImpl(Thread* thread,
1020 ArtMethod* method,
1021 uint32_t dex_pc,
1022 int32_t offset) const {
1023 for (InstrumentationListener* listener : branch_listeners_) {
1024 if (listener != nullptr) {
1025 listener->Branch(thread, method, dex_pc, offset);
1026 }
1027 }
1028 }
1029
InvokeVirtualOrInterfaceImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * caller,uint32_t dex_pc,ArtMethod * callee) const1030 void Instrumentation::InvokeVirtualOrInterfaceImpl(Thread* thread,
1031 ObjPtr<mirror::Object> this_object,
1032 ArtMethod* caller,
1033 uint32_t dex_pc,
1034 ArtMethod* callee) const {
1035 Thread* self = Thread::Current();
1036 StackHandleScope<1> hs(self);
1037 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1038 for (InstrumentationListener* listener : invoke_virtual_or_interface_listeners_) {
1039 if (listener != nullptr) {
1040 listener->InvokeVirtualOrInterface(thread, thiz, caller, dex_pc, callee);
1041 }
1042 }
1043 }
1044
FieldReadEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field) const1045 void Instrumentation::FieldReadEventImpl(Thread* thread,
1046 ObjPtr<mirror::Object> this_object,
1047 ArtMethod* method,
1048 uint32_t dex_pc,
1049 ArtField* field) const {
1050 Thread* self = Thread::Current();
1051 StackHandleScope<1> hs(self);
1052 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1053 for (InstrumentationListener* listener : field_read_listeners_) {
1054 if (listener != nullptr) {
1055 listener->FieldRead(thread, thiz, method, dex_pc, field);
1056 }
1057 }
1058 }
1059
FieldWriteEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value) const1060 void Instrumentation::FieldWriteEventImpl(Thread* thread,
1061 ObjPtr<mirror::Object> this_object,
1062 ArtMethod* method,
1063 uint32_t dex_pc,
1064 ArtField* field,
1065 const JValue& field_value) const {
1066 Thread* self = Thread::Current();
1067 StackHandleScope<2> hs(self);
1068 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1069 if (field->IsPrimitiveType()) {
1070 for (InstrumentationListener* listener : field_write_listeners_) {
1071 if (listener != nullptr) {
1072 listener->FieldWritten(thread, thiz, method, dex_pc, field, field_value);
1073 }
1074 }
1075 } else {
1076 Handle<mirror::Object> val(hs.NewHandle(field_value.GetL()));
1077 for (InstrumentationListener* listener : field_write_listeners_) {
1078 if (listener != nullptr) {
1079 listener->FieldWritten(thread, thiz, method, dex_pc, field, val);
1080 }
1081 }
1082 }
1083 }
1084
ExceptionCaughtEvent(Thread * thread,mirror::Throwable * exception_object) const1085 void Instrumentation::ExceptionCaughtEvent(Thread* thread,
1086 mirror::Throwable* exception_object) const {
1087 Thread* self = Thread::Current();
1088 StackHandleScope<1> hs(self);
1089 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1090 if (HasExceptionCaughtListeners()) {
1091 DCHECK_EQ(thread->GetException(), h_exception.Get());
1092 thread->ClearException();
1093 for (InstrumentationListener* listener : exception_caught_listeners_) {
1094 if (listener != nullptr) {
1095 listener->ExceptionCaught(thread, h_exception);
1096 }
1097 }
1098 thread->SetException(h_exception.Get());
1099 }
1100 }
1101
1102 // Computes a frame ID by ignoring inlined frames.
ComputeFrameId(Thread * self,size_t frame_depth,size_t inlined_frames_before_frame)1103 size_t Instrumentation::ComputeFrameId(Thread* self,
1104 size_t frame_depth,
1105 size_t inlined_frames_before_frame) {
1106 CHECK_GE(frame_depth, inlined_frames_before_frame);
1107 size_t no_inline_depth = frame_depth - inlined_frames_before_frame;
1108 return StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk) - no_inline_depth;
1109 }
1110
CheckStackDepth(Thread * self,const InstrumentationStackFrame & instrumentation_frame,int delta)1111 static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
1112 int delta)
1113 REQUIRES_SHARED(Locks::mutator_lock_) {
1114 size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk) + delta;
1115 if (frame_id != instrumentation_frame.frame_id_) {
1116 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
1117 << instrumentation_frame.frame_id_;
1118 StackVisitor::DescribeStack(self);
1119 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
1120 }
1121 }
1122
PushInstrumentationStackFrame(Thread * self,mirror::Object * this_object,ArtMethod * method,uintptr_t lr,bool interpreter_entry)1123 void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
1124 ArtMethod* method,
1125 uintptr_t lr, bool interpreter_entry) {
1126 DCHECK(!self->IsExceptionPending());
1127 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1128 if (kVerboseInstrumentation) {
1129 LOG(INFO) << "Entering " << ArtMethod::PrettyMethod(method) << " from PC "
1130 << reinterpret_cast<void*>(lr);
1131 }
1132
1133 // We send the enter event before pushing the instrumentation frame to make cleanup easier. If the
1134 // event causes an exception we can simply send the unwind event and return.
1135 StackHandleScope<1> hs(self);
1136 Handle<mirror::Object> h_this(hs.NewHandle(this_object));
1137 if (!interpreter_entry) {
1138 MethodEnterEvent(self, h_this.Get(), method, 0);
1139 if (self->IsExceptionPending()) {
1140 MethodUnwindEvent(self, h_this.Get(), method, 0);
1141 return;
1142 }
1143 }
1144
1145 // We have a callee-save frame meaning this value is guaranteed to never be 0.
1146 DCHECK(!self->IsExceptionPending());
1147 size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk);
1148
1149 instrumentation::InstrumentationStackFrame instrumentation_frame(h_this.Get(), method, lr,
1150 frame_id, interpreter_entry);
1151 stack->push_front(instrumentation_frame);
1152 }
1153
PopInstrumentationStackFrame(Thread * self,uintptr_t * return_pc,uint64_t * gpr_result,uint64_t * fpr_result)1154 TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self,
1155 uintptr_t* return_pc,
1156 uint64_t* gpr_result,
1157 uint64_t* fpr_result) {
1158 DCHECK(gpr_result != nullptr);
1159 DCHECK(fpr_result != nullptr);
1160 // Do the pop.
1161 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1162 CHECK_GT(stack->size(), 0U);
1163 InstrumentationStackFrame instrumentation_frame = stack->front();
1164 stack->pop_front();
1165
1166 // Set return PC and check the sanity of the stack.
1167 *return_pc = instrumentation_frame.return_pc_;
1168 CheckStackDepth(self, instrumentation_frame, 0);
1169 self->VerifyStack();
1170
1171 ArtMethod* method = instrumentation_frame.method_;
1172 uint32_t length;
1173 const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1174 char return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
1175 bool is_ref = return_shorty == '[' || return_shorty == 'L';
1176 StackHandleScope<1> hs(self);
1177 MutableHandle<mirror::Object> res(hs.NewHandle<mirror::Object>(nullptr));
1178 JValue return_value;
1179 if (return_shorty == 'V') {
1180 return_value.SetJ(0);
1181 } else if (return_shorty == 'F' || return_shorty == 'D') {
1182 return_value.SetJ(*fpr_result);
1183 } else {
1184 return_value.SetJ(*gpr_result);
1185 }
1186 if (is_ref) {
1187 // Take a handle to the return value so we won't lose it if we suspend.
1188 res.Assign(return_value.GetL());
1189 }
1190 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1191 // return_pc.
1192 uint32_t dex_pc = DexFile::kDexNoIndex;
1193 mirror::Object* this_object = instrumentation_frame.this_object_;
1194 if (!instrumentation_frame.interpreter_entry_) {
1195 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
1196 }
1197
1198 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1199 // back to an upcall.
1200 NthCallerVisitor visitor(self, 1, true);
1201 visitor.WalkStack(true);
1202 bool deoptimize = (visitor.caller != nullptr) &&
1203 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller) ||
1204 Dbg::IsForcedInterpreterNeededForUpcall(self, visitor.caller));
1205 if (is_ref) {
1206 // Restore the return value if it's a reference since it might have moved.
1207 *reinterpret_cast<mirror::Object**>(gpr_result) = res.Get();
1208 }
1209 if (deoptimize && Runtime::Current()->IsAsyncDeoptimizeable(*return_pc)) {
1210 if (kVerboseInstrumentation) {
1211 LOG(INFO) << "Deoptimizing "
1212 << visitor.caller->PrettyMethod()
1213 << " by returning from "
1214 << method->PrettyMethod()
1215 << " with result "
1216 << std::hex << return_value.GetJ() << std::dec
1217 << " in "
1218 << *self;
1219 }
1220 self->PushDeoptimizationContext(return_value,
1221 return_shorty == 'L',
1222 false /* from_code */,
1223 nullptr /* no pending exception */);
1224 return GetTwoWordSuccessValue(*return_pc,
1225 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
1226 } else {
1227 if (deoptimize && !Runtime::Current()->IsAsyncDeoptimizeable(*return_pc)) {
1228 LOG(WARNING) << "Got a deoptimization request on un-deoptimizable " << method->PrettyMethod()
1229 << " at PC " << reinterpret_cast<void*>(*return_pc);
1230 }
1231 if (kVerboseInstrumentation) {
1232 LOG(INFO) << "Returning from " << method->PrettyMethod()
1233 << " to PC " << reinterpret_cast<void*>(*return_pc);
1234 }
1235 return GetTwoWordSuccessValue(0, *return_pc);
1236 }
1237 }
1238
PopMethodForUnwind(Thread * self,bool is_deoptimization) const1239 uintptr_t Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
1240 // Do the pop.
1241 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1242 CHECK_GT(stack->size(), 0U);
1243 size_t idx = stack->size();
1244 InstrumentationStackFrame instrumentation_frame = stack->front();
1245
1246 ArtMethod* method = instrumentation_frame.method_;
1247 if (is_deoptimization) {
1248 if (kVerboseInstrumentation) {
1249 LOG(INFO) << "Popping for deoptimization " << ArtMethod::PrettyMethod(method);
1250 }
1251 } else {
1252 if (kVerboseInstrumentation) {
1253 LOG(INFO) << "Popping for unwind " << ArtMethod::PrettyMethod(method);
1254 }
1255
1256 // Notify listeners of method unwind.
1257 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1258 // return_pc.
1259 uint32_t dex_pc = DexFile::kDexNoIndex;
1260 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1261 }
1262 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
1263 CHECK_EQ(stack->size(), idx);
1264 DCHECK(instrumentation_frame.method_ == stack->front().method_);
1265 stack->pop_front();
1266 return instrumentation_frame.return_pc_;
1267 }
1268
Dump() const1269 std::string InstrumentationStackFrame::Dump() const {
1270 std::ostringstream os;
1271 os << "Frame " << frame_id_ << " " << ArtMethod::PrettyMethod(method_) << ":"
1272 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1273 return os.str();
1274 }
1275
1276 } // namespace instrumentation
1277 } // namespace art
1278