1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "instrumentation.h"
18
19 #include <sstream>
20
21 #include "arch/context.h"
22 #include "art_method-inl.h"
23 #include "atomic.h"
24 #include "class_linker.h"
25 #include "debugger.h"
26 #include "dex_file-inl.h"
27 #include "entrypoints/quick/quick_entrypoints.h"
28 #include "entrypoints/quick/quick_alloc_entrypoints.h"
29 #include "entrypoints/runtime_asm_entrypoints.h"
30 #include "gc_root-inl.h"
31 #include "interpreter/interpreter.h"
32 #include "jit/jit.h"
33 #include "jit/jit_code_cache.h"
34 #include "mirror/class-inl.h"
35 #include "mirror/dex_cache.h"
36 #include "mirror/object_array-inl.h"
37 #include "mirror/object-inl.h"
38 #include "nth_caller_visitor.h"
39 #include "oat_quick_method_header.h"
40 #include "thread.h"
41 #include "thread_list.h"
42
43 namespace art {
44 namespace instrumentation {
45
46 constexpr bool kVerboseInstrumentation = false;
47
48 // Instrumentation works on non-inlined frames by updating returned PCs
49 // of compiled frames.
50 static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
51 StackVisitor::StackWalkKind::kSkipInlinedFrames;
52
53 class InstallStubsClassVisitor : public ClassVisitor {
54 public:
InstallStubsClassVisitor(Instrumentation * instrumentation)55 explicit InstallStubsClassVisitor(Instrumentation* instrumentation)
56 : instrumentation_(instrumentation) {}
57
operator ()(mirror::Class * klass)58 bool operator()(mirror::Class* klass) OVERRIDE REQUIRES(Locks::mutator_lock_) {
59 instrumentation_->InstallStubsForClass(klass);
60 return true; // we visit all classes.
61 }
62
63 private:
64 Instrumentation* const instrumentation_;
65 };
66
67
Instrumentation()68 Instrumentation::Instrumentation()
69 : instrumentation_stubs_installed_(false),
70 entry_exit_stubs_installed_(false),
71 interpreter_stubs_installed_(false),
72 interpret_only_(false),
73 forced_interpret_only_(false),
74 have_method_entry_listeners_(false),
75 have_method_exit_listeners_(false),
76 have_method_unwind_listeners_(false),
77 have_dex_pc_listeners_(false),
78 have_field_read_listeners_(false),
79 have_field_write_listeners_(false),
80 have_exception_caught_listeners_(false),
81 have_branch_listeners_(false),
82 have_invoke_virtual_or_interface_listeners_(false),
83 deoptimized_methods_lock_("deoptimized methods lock", kDeoptimizedMethodsLock),
84 deoptimization_enabled_(false),
85 interpreter_handler_table_(kMainHandlerTable),
86 quick_alloc_entry_points_instrumentation_counter_(0),
87 alloc_entrypoints_instrumented_(false) {
88 }
89
InstallStubsForClass(mirror::Class * klass)90 void Instrumentation::InstallStubsForClass(mirror::Class* klass) {
91 if (klass->IsErroneous()) {
92 // We can't execute code in a erroneous class: do nothing.
93 } else if (!klass->IsResolved()) {
94 // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
95 // could not be initialized or linked with regards to class inheritance.
96 } else {
97 for (ArtMethod& method : klass->GetMethods(sizeof(void*))) {
98 InstallStubsForMethod(&method);
99 }
100 }
101 }
102
UpdateEntrypoints(ArtMethod * method,const void * quick_code)103 static void UpdateEntrypoints(ArtMethod* method, const void* quick_code)
104 SHARED_REQUIRES(Locks::mutator_lock_) {
105 method->SetEntryPointFromQuickCompiledCode(quick_code);
106 }
107
NeedDebugVersionForBootImageCode(ArtMethod * method,const void * code) const108 bool Instrumentation::NeedDebugVersionForBootImageCode(ArtMethod* method, const void* code) const
109 SHARED_REQUIRES(Locks::mutator_lock_) {
110 return Dbg::IsDebuggerActive() &&
111 Runtime::Current()->GetHeap()->IsInBootImageOatFile(code) &&
112 !method->IsNative() &&
113 !method->IsProxyMethod();
114 }
115
InstallStubsForMethod(ArtMethod * method)116 void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
117 if (!method->IsInvokable() || method->IsProxyMethod()) {
118 // Do not change stubs for these methods.
119 return;
120 }
121 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
122 if (method->IsConstructor() &&
123 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;")) {
124 return;
125 }
126 const void* new_quick_code;
127 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
128 Runtime* const runtime = Runtime::Current();
129 ClassLinker* const class_linker = runtime->GetClassLinker();
130 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
131 if (uninstall) {
132 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
133 new_quick_code = GetQuickToInterpreterBridge();
134 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
135 new_quick_code = class_linker->GetQuickOatCodeFor(method);
136 if (NeedDebugVersionForBootImageCode(method, new_quick_code)) {
137 new_quick_code = GetQuickToInterpreterBridge();
138 }
139 } else {
140 new_quick_code = GetQuickResolutionStub();
141 }
142 } else { // !uninstall
143 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
144 !method->IsNative()) {
145 new_quick_code = GetQuickToInterpreterBridge();
146 } else {
147 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
148 // class, all its static methods code will be set to the instrumentation entry point.
149 // For more details, see ClassLinker::FixupStaticTrampolines.
150 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
151 new_quick_code = class_linker->GetQuickOatCodeFor(method);
152 if (NeedDebugVersionForBootImageCode(method, new_quick_code)) {
153 // Oat code should not be used. Don't install instrumentation stub and
154 // use interpreter for instrumentation.
155 new_quick_code = GetQuickToInterpreterBridge();
156 } else if (entry_exit_stubs_installed_) {
157 new_quick_code = GetQuickInstrumentationEntryPoint();
158 }
159 } else {
160 new_quick_code = GetQuickResolutionStub();
161 }
162 }
163 }
164 UpdateEntrypoints(method, new_quick_code);
165 }
166
167 // Places the instrumentation exit pc as the return PC for every quick frame. This also allows
168 // deoptimization of quick frames to interpreter frames.
169 // Since we may already have done this previously, we need to push new instrumentation frame before
170 // existing instrumentation frames.
InstrumentationInstallStack(Thread * thread,void * arg)171 static void InstrumentationInstallStack(Thread* thread, void* arg)
172 SHARED_REQUIRES(Locks::mutator_lock_) {
173 struct InstallStackVisitor FINAL : public StackVisitor {
174 InstallStackVisitor(Thread* thread_in, Context* context, uintptr_t instrumentation_exit_pc)
175 : StackVisitor(thread_in, context, kInstrumentationStackWalk),
176 instrumentation_stack_(thread_in->GetInstrumentationStack()),
177 instrumentation_exit_pc_(instrumentation_exit_pc),
178 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
179 last_return_pc_(0) {
180 }
181
182 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
183 ArtMethod* m = GetMethod();
184 if (m == nullptr) {
185 if (kVerboseInstrumentation) {
186 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
187 }
188 last_return_pc_ = 0;
189 return true; // Ignore upcalls.
190 }
191 if (GetCurrentQuickFrame() == nullptr) {
192 bool interpreter_frame = true;
193 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
194 interpreter_frame);
195 if (kVerboseInstrumentation) {
196 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
197 }
198 shadow_stack_.push_back(instrumentation_frame);
199 return true; // Continue.
200 }
201 uintptr_t return_pc = GetReturnPc();
202 if (m->IsRuntimeMethod()) {
203 if (return_pc == instrumentation_exit_pc_) {
204 if (kVerboseInstrumentation) {
205 LOG(INFO) << " Handling quick to interpreter transition. Frame " << GetFrameId();
206 }
207 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
208 const InstrumentationStackFrame& frame =
209 instrumentation_stack_->at(instrumentation_stack_depth_);
210 CHECK(frame.interpreter_entry_);
211 // This is an interpreter frame so method enter event must have been reported. However we
212 // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
213 // Since we won't report method entry here, we can safely push any DEX pc.
214 dex_pcs_.push_back(0);
215 last_return_pc_ = frame.return_pc_;
216 ++instrumentation_stack_depth_;
217 return true;
218 } else {
219 if (kVerboseInstrumentation) {
220 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId();
221 }
222 last_return_pc_ = GetReturnPc();
223 return true; // Ignore unresolved methods since they will be instrumented after resolution.
224 }
225 }
226 if (kVerboseInstrumentation) {
227 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
228 }
229 if (return_pc == instrumentation_exit_pc_) {
230 // We've reached a frame which has already been installed with instrumentation exit stub.
231 // We should have already installed instrumentation on previous frames.
232 reached_existing_instrumentation_frames_ = true;
233
234 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
235 const InstrumentationStackFrame& frame =
236 instrumentation_stack_->at(instrumentation_stack_depth_);
237 CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
238 << ", Found " << PrettyMethod(frame.method_);
239 return_pc = frame.return_pc_;
240 if (kVerboseInstrumentation) {
241 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
242 }
243 } else {
244 CHECK_NE(return_pc, 0U);
245 CHECK(!reached_existing_instrumentation_frames_);
246 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
247 false);
248 if (kVerboseInstrumentation) {
249 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
250 }
251
252 // Insert frame at the right position so we do not corrupt the instrumentation stack.
253 // Instrumentation stack frames are in descending frame id order.
254 auto it = instrumentation_stack_->begin();
255 for (auto end = instrumentation_stack_->end(); it != end; ++it) {
256 const InstrumentationStackFrame& current = *it;
257 if (instrumentation_frame.frame_id_ >= current.frame_id_) {
258 break;
259 }
260 }
261 instrumentation_stack_->insert(it, instrumentation_frame);
262 SetReturnPc(instrumentation_exit_pc_);
263 }
264 dex_pcs_.push_back((GetCurrentOatQuickMethodHeader() == nullptr)
265 ? DexFile::kDexNoIndex
266 : GetCurrentOatQuickMethodHeader()->ToDexPc(m, last_return_pc_));
267 last_return_pc_ = return_pc;
268 ++instrumentation_stack_depth_;
269 return true; // Continue.
270 }
271 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
272 std::vector<InstrumentationStackFrame> shadow_stack_;
273 std::vector<uint32_t> dex_pcs_;
274 const uintptr_t instrumentation_exit_pc_;
275 bool reached_existing_instrumentation_frames_;
276 size_t instrumentation_stack_depth_;
277 uintptr_t last_return_pc_;
278 };
279 if (kVerboseInstrumentation) {
280 std::string thread_name;
281 thread->GetThreadName(thread_name);
282 LOG(INFO) << "Installing exit stubs in " << thread_name;
283 }
284
285 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
286 std::unique_ptr<Context> context(Context::Create());
287 uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
288 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
289 visitor.WalkStack(true);
290 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
291
292 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
293 // Create method enter events for all methods currently on the thread's stack. We only do this
294 // if no debugger is attached to prevent from posting events twice.
295 auto ssi = visitor.shadow_stack_.rbegin();
296 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
297 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
298 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
299 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
300 ++ssi;
301 }
302 uint32_t dex_pc = visitor.dex_pcs_.back();
303 visitor.dex_pcs_.pop_back();
304 if (!isi->interpreter_entry_) {
305 instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
306 }
307 }
308 }
309 thread->VerifyStack();
310 }
311
InstrumentThreadStack(Thread * thread)312 void Instrumentation::InstrumentThreadStack(Thread* thread) {
313 instrumentation_stubs_installed_ = true;
314 InstrumentationInstallStack(thread, this);
315 }
316
317 // Removes the instrumentation exit pc as the return PC for every quick frame.
InstrumentationRestoreStack(Thread * thread,void * arg)318 static void InstrumentationRestoreStack(Thread* thread, void* arg)
319 REQUIRES(Locks::mutator_lock_) {
320 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
321
322 struct RestoreStackVisitor FINAL : public StackVisitor {
323 RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
324 Instrumentation* instrumentation)
325 : StackVisitor(thread_in, nullptr, kInstrumentationStackWalk),
326 thread_(thread_in),
327 instrumentation_exit_pc_(instrumentation_exit_pc),
328 instrumentation_(instrumentation),
329 instrumentation_stack_(thread_in->GetInstrumentationStack()),
330 frames_removed_(0) {}
331
332 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
333 if (instrumentation_stack_->size() == 0) {
334 return false; // Stop.
335 }
336 ArtMethod* m = GetMethod();
337 if (GetCurrentQuickFrame() == nullptr) {
338 if (kVerboseInstrumentation) {
339 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
340 << " Method=" << PrettyMethod(m);
341 }
342 return true; // Ignore shadow frames.
343 }
344 if (m == nullptr) {
345 if (kVerboseInstrumentation) {
346 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
347 }
348 return true; // Ignore upcalls.
349 }
350 bool removed_stub = false;
351 // TODO: make this search more efficient?
352 const size_t frameId = GetFrameId();
353 for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
354 if (instrumentation_frame.frame_id_ == frameId) {
355 if (kVerboseInstrumentation) {
356 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
357 }
358 if (instrumentation_frame.interpreter_entry_) {
359 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
360 } else {
361 CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
362 }
363 SetReturnPc(instrumentation_frame.return_pc_);
364 if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
365 // Create the method exit events. As the methods didn't really exit the result is 0.
366 // We only do this if no debugger is attached to prevent from posting events twice.
367 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
368 GetDexPc(), JValue());
369 }
370 frames_removed_++;
371 removed_stub = true;
372 break;
373 }
374 }
375 if (!removed_stub) {
376 if (kVerboseInstrumentation) {
377 LOG(INFO) << " No exit stub in " << DescribeLocation();
378 }
379 }
380 return true; // Continue.
381 }
382 Thread* const thread_;
383 const uintptr_t instrumentation_exit_pc_;
384 Instrumentation* const instrumentation_;
385 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
386 size_t frames_removed_;
387 };
388 if (kVerboseInstrumentation) {
389 std::string thread_name;
390 thread->GetThreadName(thread_name);
391 LOG(INFO) << "Removing exit stubs in " << thread_name;
392 }
393 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
394 if (stack->size() > 0) {
395 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
396 uintptr_t instrumentation_exit_pc =
397 reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
398 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
399 visitor.WalkStack(true);
400 CHECK_EQ(visitor.frames_removed_, stack->size());
401 while (stack->size() > 0) {
402 stack->pop_front();
403 }
404 }
405 }
406
HasEvent(Instrumentation::InstrumentationEvent expected,uint32_t events)407 static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
408 return (events & expected) != 0;
409 }
410
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)411 static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
412 uint32_t events,
413 std::list<InstrumentationListener*>& list,
414 InstrumentationListener* listener,
415 bool* has_listener)
416 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
417 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
418 if (!HasEvent(event, events)) {
419 return;
420 }
421 // If there is a free slot in the list, we insert the listener in that slot.
422 // Otherwise we add it to the end of the list.
423 auto it = std::find(list.begin(), list.end(), nullptr);
424 if (it != list.end()) {
425 *it = listener;
426 } else {
427 list.push_back(listener);
428 }
429 *has_listener = true;
430 }
431
AddListener(InstrumentationListener * listener,uint32_t events)432 void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
433 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
434 PotentiallyAddListenerTo(kMethodEntered,
435 events,
436 method_entry_listeners_,
437 listener,
438 &have_method_entry_listeners_);
439 PotentiallyAddListenerTo(kMethodExited,
440 events,
441 method_exit_listeners_,
442 listener,
443 &have_method_exit_listeners_);
444 PotentiallyAddListenerTo(kMethodUnwind,
445 events,
446 method_unwind_listeners_,
447 listener,
448 &have_method_unwind_listeners_);
449 PotentiallyAddListenerTo(kBranch,
450 events,
451 branch_listeners_,
452 listener,
453 &have_branch_listeners_);
454 PotentiallyAddListenerTo(kInvokeVirtualOrInterface,
455 events,
456 invoke_virtual_or_interface_listeners_,
457 listener,
458 &have_invoke_virtual_or_interface_listeners_);
459 PotentiallyAddListenerTo(kDexPcMoved,
460 events,
461 dex_pc_listeners_,
462 listener,
463 &have_dex_pc_listeners_);
464 PotentiallyAddListenerTo(kFieldRead,
465 events,
466 field_read_listeners_,
467 listener,
468 &have_field_read_listeners_);
469 PotentiallyAddListenerTo(kFieldWritten,
470 events,
471 field_write_listeners_,
472 listener,
473 &have_field_write_listeners_);
474 PotentiallyAddListenerTo(kExceptionCaught,
475 events,
476 exception_caught_listeners_,
477 listener,
478 &have_exception_caught_listeners_);
479 UpdateInterpreterHandlerTable();
480 }
481
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)482 static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
483 uint32_t events,
484 std::list<InstrumentationListener*>& list,
485 InstrumentationListener* listener,
486 bool* has_listener)
487 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
488 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
489 if (!HasEvent(event, events)) {
490 return;
491 }
492 auto it = std::find(list.begin(), list.end(), listener);
493 if (it != list.end()) {
494 // Just update the entry, do not remove from the list. Removing entries in the list
495 // is unsafe when mutators are iterating over it.
496 *it = nullptr;
497 }
498
499 // Check if the list contains any non-null listener, and update 'has_listener'.
500 for (InstrumentationListener* l : list) {
501 if (l != nullptr) {
502 *has_listener = true;
503 return;
504 }
505 }
506 *has_listener = false;
507 }
508
RemoveListener(InstrumentationListener * listener,uint32_t events)509 void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
510 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
511 PotentiallyRemoveListenerFrom(kMethodEntered,
512 events,
513 method_entry_listeners_,
514 listener,
515 &have_method_entry_listeners_);
516 PotentiallyRemoveListenerFrom(kMethodExited,
517 events,
518 method_exit_listeners_,
519 listener,
520 &have_method_exit_listeners_);
521 PotentiallyRemoveListenerFrom(kMethodUnwind,
522 events,
523 method_unwind_listeners_,
524 listener,
525 &have_method_unwind_listeners_);
526 PotentiallyRemoveListenerFrom(kBranch,
527 events,
528 branch_listeners_,
529 listener,
530 &have_branch_listeners_);
531 PotentiallyRemoveListenerFrom(kInvokeVirtualOrInterface,
532 events,
533 invoke_virtual_or_interface_listeners_,
534 listener,
535 &have_invoke_virtual_or_interface_listeners_);
536 PotentiallyRemoveListenerFrom(kDexPcMoved,
537 events,
538 dex_pc_listeners_,
539 listener,
540 &have_dex_pc_listeners_);
541 PotentiallyRemoveListenerFrom(kFieldRead,
542 events,
543 field_read_listeners_,
544 listener,
545 &have_field_read_listeners_);
546 PotentiallyRemoveListenerFrom(kFieldWritten,
547 events,
548 field_write_listeners_,
549 listener,
550 &have_field_write_listeners_);
551 PotentiallyRemoveListenerFrom(kExceptionCaught,
552 events,
553 exception_caught_listeners_,
554 listener,
555 &have_exception_caught_listeners_);
556 UpdateInterpreterHandlerTable();
557 }
558
GetCurrentInstrumentationLevel() const559 Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
560 if (interpreter_stubs_installed_) {
561 return InstrumentationLevel::kInstrumentWithInterpreter;
562 } else if (entry_exit_stubs_installed_) {
563 return InstrumentationLevel::kInstrumentWithInstrumentationStubs;
564 } else {
565 return InstrumentationLevel::kInstrumentNothing;
566 }
567 }
568
ConfigureStubs(const char * key,InstrumentationLevel desired_level)569 void Instrumentation::ConfigureStubs(const char* key, InstrumentationLevel desired_level) {
570 // Store the instrumentation level for this key or remove it.
571 if (desired_level == InstrumentationLevel::kInstrumentNothing) {
572 // The client no longer needs instrumentation.
573 requested_instrumentation_levels_.erase(key);
574 } else {
575 // The client needs instrumentation.
576 requested_instrumentation_levels_.Overwrite(key, desired_level);
577 }
578
579 // Look for the highest required instrumentation level.
580 InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
581 for (const auto& v : requested_instrumentation_levels_) {
582 requested_level = std::max(requested_level, v.second);
583 }
584
585 interpret_only_ = (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) ||
586 forced_interpret_only_;
587
588 InstrumentationLevel current_level = GetCurrentInstrumentationLevel();
589 if (requested_level == current_level) {
590 // We're already set.
591 return;
592 }
593 Thread* const self = Thread::Current();
594 Runtime* runtime = Runtime::Current();
595 Locks::mutator_lock_->AssertExclusiveHeld(self);
596 Locks::thread_list_lock_->AssertNotHeld(self);
597 if (requested_level > InstrumentationLevel::kInstrumentNothing) {
598 if (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) {
599 interpreter_stubs_installed_ = true;
600 entry_exit_stubs_installed_ = true;
601 } else {
602 CHECK_EQ(requested_level, InstrumentationLevel::kInstrumentWithInstrumentationStubs);
603 entry_exit_stubs_installed_ = true;
604 interpreter_stubs_installed_ = false;
605 }
606 InstallStubsClassVisitor visitor(this);
607 runtime->GetClassLinker()->VisitClasses(&visitor);
608 instrumentation_stubs_installed_ = true;
609 MutexLock mu(self, *Locks::thread_list_lock_);
610 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
611 } else {
612 interpreter_stubs_installed_ = false;
613 entry_exit_stubs_installed_ = false;
614 InstallStubsClassVisitor visitor(this);
615 runtime->GetClassLinker()->VisitClasses(&visitor);
616 // Restore stack only if there is no method currently deoptimized.
617 bool empty;
618 {
619 ReaderMutexLock mu(self, deoptimized_methods_lock_);
620 empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation.
621 }
622 if (empty) {
623 MutexLock mu(self, *Locks::thread_list_lock_);
624 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
625 // Only do this after restoring, as walking the stack when restoring will see
626 // the instrumentation exit pc.
627 instrumentation_stubs_installed_ = false;
628 }
629 }
630 }
631
ResetQuickAllocEntryPointsForThread(Thread * thread,void * arg ATTRIBUTE_UNUSED)632 static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg ATTRIBUTE_UNUSED) {
633 thread->ResetQuickAllocEntryPointsForThread();
634 }
635
SetEntrypointsInstrumented(bool instrumented)636 void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
637 Thread* self = Thread::Current();
638 Runtime* runtime = Runtime::Current();
639 Locks::mutator_lock_->AssertNotHeld(self);
640 Locks::instrument_entrypoints_lock_->AssertHeld(self);
641 if (runtime->IsStarted()) {
642 ScopedSuspendAll ssa(__FUNCTION__);
643 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
644 SetQuickAllocEntryPointsInstrumented(instrumented);
645 ResetQuickAllocEntryPoints();
646 alloc_entrypoints_instrumented_ = instrumented;
647 } else {
648 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
649 SetQuickAllocEntryPointsInstrumented(instrumented);
650 ResetQuickAllocEntryPoints();
651 alloc_entrypoints_instrumented_ = instrumented;
652 }
653 }
654
InstrumentQuickAllocEntryPoints()655 void Instrumentation::InstrumentQuickAllocEntryPoints() {
656 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
657 InstrumentQuickAllocEntryPointsLocked();
658 }
659
UninstrumentQuickAllocEntryPoints()660 void Instrumentation::UninstrumentQuickAllocEntryPoints() {
661 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
662 UninstrumentQuickAllocEntryPointsLocked();
663 }
664
InstrumentQuickAllocEntryPointsLocked()665 void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
666 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
667 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
668 SetEntrypointsInstrumented(true);
669 }
670 ++quick_alloc_entry_points_instrumentation_counter_;
671 }
672
UninstrumentQuickAllocEntryPointsLocked()673 void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
674 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
675 CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
676 --quick_alloc_entry_points_instrumentation_counter_;
677 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
678 SetEntrypointsInstrumented(false);
679 }
680 }
681
ResetQuickAllocEntryPoints()682 void Instrumentation::ResetQuickAllocEntryPoints() {
683 Runtime* runtime = Runtime::Current();
684 if (runtime->IsStarted()) {
685 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
686 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
687 }
688 }
689
UpdateMethodsCodeImpl(ArtMethod * method,const void * quick_code)690 void Instrumentation::UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code) {
691 const void* new_quick_code;
692 if (LIKELY(!instrumentation_stubs_installed_)) {
693 new_quick_code = quick_code;
694 } else {
695 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
696 new_quick_code = GetQuickToInterpreterBridge();
697 } else {
698 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
699 if (class_linker->IsQuickResolutionStub(quick_code) ||
700 class_linker->IsQuickToInterpreterBridge(quick_code)) {
701 new_quick_code = quick_code;
702 } else if (entry_exit_stubs_installed_) {
703 new_quick_code = GetQuickInstrumentationEntryPoint();
704 } else {
705 new_quick_code = quick_code;
706 }
707 }
708 }
709 UpdateEntrypoints(method, new_quick_code);
710 }
711
UpdateMethodsCode(ArtMethod * method,const void * quick_code)712 void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* quick_code) {
713 DCHECK(method->GetDeclaringClass()->IsResolved());
714 UpdateMethodsCodeImpl(method, quick_code);
715 }
716
UpdateMethodsCodeFromDebugger(ArtMethod * method,const void * quick_code)717 void Instrumentation::UpdateMethodsCodeFromDebugger(ArtMethod* method, const void* quick_code) {
718 // When debugger attaches, we may update the entry points of all methods of a class
719 // to the interpreter bridge. A method's declaring class might not be in resolved
720 // state yet in that case.
721 UpdateMethodsCodeImpl(method, quick_code);
722 }
723
AddDeoptimizedMethod(ArtMethod * method)724 bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
725 if (IsDeoptimizedMethod(method)) {
726 // Already in the map. Return.
727 return false;
728 }
729 // Not found. Add it.
730 deoptimized_methods_.insert(method);
731 return true;
732 }
733
IsDeoptimizedMethod(ArtMethod * method)734 bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
735 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
736 }
737
BeginDeoptimizedMethod()738 ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
739 if (deoptimized_methods_.empty()) {
740 // Empty.
741 return nullptr;
742 }
743 return *deoptimized_methods_.begin();
744 }
745
RemoveDeoptimizedMethod(ArtMethod * method)746 bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
747 auto it = deoptimized_methods_.find(method);
748 if (it == deoptimized_methods_.end()) {
749 return false;
750 }
751 deoptimized_methods_.erase(it);
752 return true;
753 }
754
IsDeoptimizedMethodsEmpty() const755 bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
756 return deoptimized_methods_.empty();
757 }
758
Deoptimize(ArtMethod * method)759 void Instrumentation::Deoptimize(ArtMethod* method) {
760 CHECK(!method->IsNative());
761 CHECK(!method->IsProxyMethod());
762 CHECK(method->IsInvokable());
763
764 Thread* self = Thread::Current();
765 {
766 WriterMutexLock mu(self, deoptimized_methods_lock_);
767 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
768 CHECK(has_not_been_deoptimized) << "Method " << PrettyMethod(method)
769 << " is already deoptimized";
770 }
771 if (!interpreter_stubs_installed_) {
772 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint());
773
774 // Install instrumentation exit stub and instrumentation frames. We may already have installed
775 // these previously so it will only cover the newly created frames.
776 instrumentation_stubs_installed_ = true;
777 MutexLock mu(self, *Locks::thread_list_lock_);
778 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
779 }
780 }
781
Undeoptimize(ArtMethod * method)782 void Instrumentation::Undeoptimize(ArtMethod* method) {
783 CHECK(!method->IsNative());
784 CHECK(!method->IsProxyMethod());
785 CHECK(method->IsInvokable());
786
787 Thread* self = Thread::Current();
788 bool empty;
789 {
790 WriterMutexLock mu(self, deoptimized_methods_lock_);
791 bool found_and_erased = RemoveDeoptimizedMethod(method);
792 CHECK(found_and_erased) << "Method " << PrettyMethod(method)
793 << " is not deoptimized";
794 empty = IsDeoptimizedMethodsEmpty();
795 }
796
797 // Restore code and possibly stack only if we did not deoptimize everything.
798 if (!interpreter_stubs_installed_) {
799 // Restore its code or resolution trampoline.
800 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
801 if (method->IsStatic() && !method->IsConstructor() &&
802 !method->GetDeclaringClass()->IsInitialized()) {
803 UpdateEntrypoints(method, GetQuickResolutionStub());
804 } else {
805 const void* quick_code = class_linker->GetQuickOatCodeFor(method);
806 if (NeedDebugVersionForBootImageCode(method, quick_code)) {
807 quick_code = GetQuickToInterpreterBridge();
808 }
809 UpdateEntrypoints(method, quick_code);
810 }
811
812 // If there is no deoptimized method left, we can restore the stack of each thread.
813 if (empty) {
814 MutexLock mu(self, *Locks::thread_list_lock_);
815 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
816 instrumentation_stubs_installed_ = false;
817 }
818 }
819 }
820
IsDeoptimized(ArtMethod * method)821 bool Instrumentation::IsDeoptimized(ArtMethod* method) {
822 DCHECK(method != nullptr);
823 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
824 return IsDeoptimizedMethod(method);
825 }
826
EnableDeoptimization()827 void Instrumentation::EnableDeoptimization() {
828 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
829 CHECK(IsDeoptimizedMethodsEmpty());
830 CHECK_EQ(deoptimization_enabled_, false);
831 deoptimization_enabled_ = true;
832 }
833
DisableDeoptimization(const char * key)834 void Instrumentation::DisableDeoptimization(const char* key) {
835 CHECK_EQ(deoptimization_enabled_, true);
836 // If we deoptimized everything, undo it.
837 if (interpreter_stubs_installed_) {
838 UndeoptimizeEverything(key);
839 }
840 // Undeoptimized selected methods.
841 while (true) {
842 ArtMethod* method;
843 {
844 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
845 if (IsDeoptimizedMethodsEmpty()) {
846 break;
847 }
848 method = BeginDeoptimizedMethod();
849 CHECK(method != nullptr);
850 }
851 Undeoptimize(method);
852 }
853 deoptimization_enabled_ = false;
854 }
855
856 // Indicates if instrumentation should notify method enter/exit events to the listeners.
ShouldNotifyMethodEnterExitEvents() const857 bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
858 if (!HasMethodEntryListeners() && !HasMethodExitListeners()) {
859 return false;
860 }
861 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
862 }
863
DeoptimizeEverything(const char * key)864 void Instrumentation::DeoptimizeEverything(const char* key) {
865 CHECK(deoptimization_enabled_);
866 ConfigureStubs(key, InstrumentationLevel::kInstrumentWithInterpreter);
867 }
868
UndeoptimizeEverything(const char * key)869 void Instrumentation::UndeoptimizeEverything(const char* key) {
870 CHECK(interpreter_stubs_installed_);
871 CHECK(deoptimization_enabled_);
872 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
873 }
874
EnableMethodTracing(const char * key,bool needs_interpreter)875 void Instrumentation::EnableMethodTracing(const char* key, bool needs_interpreter) {
876 InstrumentationLevel level;
877 if (needs_interpreter) {
878 level = InstrumentationLevel::kInstrumentWithInterpreter;
879 } else {
880 level = InstrumentationLevel::kInstrumentWithInstrumentationStubs;
881 }
882 ConfigureStubs(key, level);
883 }
884
DisableMethodTracing(const char * key)885 void Instrumentation::DisableMethodTracing(const char* key) {
886 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
887 }
888
GetQuickCodeFor(ArtMethod * method,size_t pointer_size) const889 const void* Instrumentation::GetQuickCodeFor(ArtMethod* method, size_t pointer_size) const {
890 Runtime* runtime = Runtime::Current();
891 if (LIKELY(!instrumentation_stubs_installed_)) {
892 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
893 DCHECK(code != nullptr);
894 ClassLinker* class_linker = runtime->GetClassLinker();
895 if (LIKELY(!class_linker->IsQuickResolutionStub(code) &&
896 !class_linker->IsQuickToInterpreterBridge(code)) &&
897 !class_linker->IsQuickResolutionStub(code) &&
898 !class_linker->IsQuickToInterpreterBridge(code)) {
899 return code;
900 }
901 }
902 return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
903 }
904
MethodEnterEventImpl(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc) const905 void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
906 ArtMethod* method,
907 uint32_t dex_pc) const {
908 if (HasMethodEntryListeners()) {
909 for (InstrumentationListener* listener : method_entry_listeners_) {
910 if (listener != nullptr) {
911 listener->MethodEntered(thread, this_object, method, dex_pc);
912 }
913 }
914 }
915 }
916
MethodExitEventImpl(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,const JValue & return_value) const917 void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
918 ArtMethod* method,
919 uint32_t dex_pc, const JValue& return_value) const {
920 if (HasMethodExitListeners()) {
921 for (InstrumentationListener* listener : method_exit_listeners_) {
922 if (listener != nullptr) {
923 listener->MethodExited(thread, this_object, method, dex_pc, return_value);
924 }
925 }
926 }
927 }
928
MethodUnwindEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc) const929 void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
930 ArtMethod* method,
931 uint32_t dex_pc) const {
932 if (HasMethodUnwindListeners()) {
933 for (InstrumentationListener* listener : method_unwind_listeners_) {
934 if (listener != nullptr) {
935 listener->MethodUnwind(thread, this_object, method, dex_pc);
936 }
937 }
938 }
939 }
940
DexPcMovedEventImpl(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc) const941 void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
942 ArtMethod* method,
943 uint32_t dex_pc) const {
944 for (InstrumentationListener* listener : dex_pc_listeners_) {
945 if (listener != nullptr) {
946 listener->DexPcMoved(thread, this_object, method, dex_pc);
947 }
948 }
949 }
950
BranchImpl(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset) const951 void Instrumentation::BranchImpl(Thread* thread,
952 ArtMethod* method,
953 uint32_t dex_pc,
954 int32_t offset) const {
955 for (InstrumentationListener* listener : branch_listeners_) {
956 if (listener != nullptr) {
957 listener->Branch(thread, method, dex_pc, offset);
958 }
959 }
960 }
961
InvokeVirtualOrInterfaceImpl(Thread * thread,mirror::Object * this_object,ArtMethod * caller,uint32_t dex_pc,ArtMethod * callee) const962 void Instrumentation::InvokeVirtualOrInterfaceImpl(Thread* thread,
963 mirror::Object* this_object,
964 ArtMethod* caller,
965 uint32_t dex_pc,
966 ArtMethod* callee) const {
967 // We cannot have thread suspension since that would cause the this_object parameter to
968 // potentially become a dangling pointer. An alternative could be to put it in a handle instead.
969 ScopedAssertNoThreadSuspension ants(thread, __FUNCTION__);
970 for (InstrumentationListener* listener : invoke_virtual_or_interface_listeners_) {
971 if (listener != nullptr) {
972 listener->InvokeVirtualOrInterface(thread, this_object, caller, dex_pc, callee);
973 }
974 }
975 }
976
FieldReadEventImpl(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field) const977 void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
978 ArtMethod* method, uint32_t dex_pc,
979 ArtField* field) const {
980 for (InstrumentationListener* listener : field_read_listeners_) {
981 if (listener != nullptr) {
982 listener->FieldRead(thread, this_object, method, dex_pc, field);
983 }
984 }
985 }
986
FieldWriteEventImpl(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value) const987 void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
988 ArtMethod* method, uint32_t dex_pc,
989 ArtField* field, const JValue& field_value) const {
990 for (InstrumentationListener* listener : field_write_listeners_) {
991 if (listener != nullptr) {
992 listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
993 }
994 }
995 }
996
ExceptionCaughtEvent(Thread * thread,mirror::Throwable * exception_object) const997 void Instrumentation::ExceptionCaughtEvent(Thread* thread,
998 mirror::Throwable* exception_object) const {
999 if (HasExceptionCaughtListeners()) {
1000 DCHECK_EQ(thread->GetException(), exception_object);
1001 thread->ClearException();
1002 for (InstrumentationListener* listener : exception_caught_listeners_) {
1003 if (listener != nullptr) {
1004 listener->ExceptionCaught(thread, exception_object);
1005 }
1006 }
1007 thread->SetException(exception_object);
1008 }
1009 }
1010
1011 // Computes a frame ID by ignoring inlined frames.
ComputeFrameId(Thread * self,size_t frame_depth,size_t inlined_frames_before_frame)1012 size_t Instrumentation::ComputeFrameId(Thread* self,
1013 size_t frame_depth,
1014 size_t inlined_frames_before_frame) {
1015 CHECK_GE(frame_depth, inlined_frames_before_frame);
1016 size_t no_inline_depth = frame_depth - inlined_frames_before_frame;
1017 return StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk) - no_inline_depth;
1018 }
1019
CheckStackDepth(Thread * self,const InstrumentationStackFrame & instrumentation_frame,int delta)1020 static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
1021 int delta)
1022 SHARED_REQUIRES(Locks::mutator_lock_) {
1023 size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk) + delta;
1024 if (frame_id != instrumentation_frame.frame_id_) {
1025 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
1026 << instrumentation_frame.frame_id_;
1027 StackVisitor::DescribeStack(self);
1028 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
1029 }
1030 }
1031
PushInstrumentationStackFrame(Thread * self,mirror::Object * this_object,ArtMethod * method,uintptr_t lr,bool interpreter_entry)1032 void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
1033 ArtMethod* method,
1034 uintptr_t lr, bool interpreter_entry) {
1035 // We have a callee-save frame meaning this value is guaranteed to never be 0.
1036 size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk);
1037 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1038 if (kVerboseInstrumentation) {
1039 LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
1040 }
1041 instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
1042 frame_id, interpreter_entry);
1043 stack->push_front(instrumentation_frame);
1044
1045 if (!interpreter_entry) {
1046 MethodEnterEvent(self, this_object, method, 0);
1047 }
1048 }
1049
PopInstrumentationStackFrame(Thread * self,uintptr_t * return_pc,uint64_t gpr_result,uint64_t fpr_result)1050 TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
1051 uint64_t gpr_result,
1052 uint64_t fpr_result) {
1053 // Do the pop.
1054 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1055 CHECK_GT(stack->size(), 0U);
1056 InstrumentationStackFrame instrumentation_frame = stack->front();
1057 stack->pop_front();
1058
1059 // Set return PC and check the sanity of the stack.
1060 *return_pc = instrumentation_frame.return_pc_;
1061 CheckStackDepth(self, instrumentation_frame, 0);
1062 self->VerifyStack();
1063
1064 ArtMethod* method = instrumentation_frame.method_;
1065 uint32_t length;
1066 const size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1067 char return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
1068 JValue return_value;
1069 if (return_shorty == 'V') {
1070 return_value.SetJ(0);
1071 } else if (return_shorty == 'F' || return_shorty == 'D') {
1072 return_value.SetJ(fpr_result);
1073 } else {
1074 return_value.SetJ(gpr_result);
1075 }
1076 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1077 // return_pc.
1078 uint32_t dex_pc = DexFile::kDexNoIndex;
1079 mirror::Object* this_object = instrumentation_frame.this_object_;
1080 if (!instrumentation_frame.interpreter_entry_) {
1081 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
1082 }
1083
1084 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1085 // back to an upcall.
1086 NthCallerVisitor visitor(self, 1, true);
1087 visitor.WalkStack(true);
1088 bool deoptimize = (visitor.caller != nullptr) &&
1089 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller) ||
1090 Dbg::IsForcedInterpreterNeededForUpcall(self, visitor.caller));
1091 if (deoptimize) {
1092 if (kVerboseInstrumentation) {
1093 LOG(INFO) << StringPrintf("Deoptimizing %s by returning from %s with result %#" PRIx64 " in ",
1094 PrettyMethod(visitor.caller).c_str(),
1095 PrettyMethod(method).c_str(),
1096 return_value.GetJ()) << *self;
1097 }
1098 self->PushDeoptimizationContext(return_value,
1099 return_shorty == 'L',
1100 false /* from_code */,
1101 nullptr /* no pending exception */);
1102 return GetTwoWordSuccessValue(*return_pc,
1103 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
1104 } else {
1105 if (kVerboseInstrumentation) {
1106 LOG(INFO) << "Returning from " << PrettyMethod(method)
1107 << " to PC " << reinterpret_cast<void*>(*return_pc);
1108 }
1109 return GetTwoWordSuccessValue(0, *return_pc);
1110 }
1111 }
1112
PopMethodForUnwind(Thread * self,bool is_deoptimization) const1113 void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
1114 // Do the pop.
1115 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1116 CHECK_GT(stack->size(), 0U);
1117 InstrumentationStackFrame instrumentation_frame = stack->front();
1118 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
1119 stack->pop_front();
1120
1121 ArtMethod* method = instrumentation_frame.method_;
1122 if (is_deoptimization) {
1123 if (kVerboseInstrumentation) {
1124 LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
1125 }
1126 } else {
1127 if (kVerboseInstrumentation) {
1128 LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
1129 }
1130
1131 // Notify listeners of method unwind.
1132 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1133 // return_pc.
1134 uint32_t dex_pc = DexFile::kDexNoIndex;
1135 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1136 }
1137 }
1138
Dump() const1139 std::string InstrumentationStackFrame::Dump() const {
1140 std::ostringstream os;
1141 os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
1142 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1143 return os.str();
1144 }
1145
1146 } // namespace instrumentation
1147 } // namespace art
1148