1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "instrumentation.h"
18
19 #include <functional>
20 #include <optional>
21 #include <sstream>
22
23 #include <android-base/logging.h>
24
25 #include "arch/context.h"
26 #include "art_field-inl.h"
27 #include "art_method-inl.h"
28 #include "base/atomic.h"
29 #include "base/callee_save_type.h"
30 #include "class_linker.h"
31 #include "debugger.h"
32 #include "dex/dex_file-inl.h"
33 #include "dex/dex_file_types.h"
34 #include "dex/dex_instruction-inl.h"
35 #include "entrypoints/quick/quick_alloc_entrypoints.h"
36 #include "entrypoints/quick/quick_entrypoints.h"
37 #include "entrypoints/runtime_asm_entrypoints.h"
38 #include "gc_root-inl.h"
39 #include "interpreter/interpreter.h"
40 #include "interpreter/interpreter_common.h"
41 #include "jit/jit.h"
42 #include "jit/jit_code_cache.h"
43 #include "jvalue-inl.h"
44 #include "jvalue.h"
45 #include "mirror/class-inl.h"
46 #include "mirror/dex_cache.h"
47 #include "mirror/object-inl.h"
48 #include "mirror/object_array-inl.h"
49 #include "nth_caller_visitor.h"
50 #include "oat_quick_method_header.h"
51 #include "runtime-inl.h"
52 #include "thread.h"
53 #include "thread_list.h"
54
55 namespace art {
56 namespace instrumentation {
57
58 constexpr bool kVerboseInstrumentation = false;
59
MethodExited(Thread * thread,Handle<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,OptionalFrame frame,MutableHandle<mirror::Object> & return_value)60 void InstrumentationListener::MethodExited(
61 Thread* thread,
62 Handle<mirror::Object> this_object,
63 ArtMethod* method,
64 uint32_t dex_pc,
65 OptionalFrame frame,
66 MutableHandle<mirror::Object>& return_value) {
67 DCHECK_EQ(method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive(),
68 Primitive::kPrimNot);
69 const void* original_ret = return_value.Get();
70 JValue v;
71 v.SetL(return_value.Get());
72 MethodExited(thread, this_object, method, dex_pc, frame, v);
73 DCHECK(original_ret == v.GetL()) << "Return value changed";
74 }
75
FieldWritten(Thread * thread,Handle<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,Handle<mirror::Object> field_value)76 void InstrumentationListener::FieldWritten(Thread* thread,
77 Handle<mirror::Object> this_object,
78 ArtMethod* method,
79 uint32_t dex_pc,
80 ArtField* field,
81 Handle<mirror::Object> field_value) {
82 DCHECK(!field->IsPrimitiveType());
83 JValue v;
84 v.SetL(field_value.Get());
85 FieldWritten(thread, this_object, method, dex_pc, field, v);
86 }
87
88 // Instrumentation works on non-inlined frames by updating returned PCs
89 // of compiled frames.
90 static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
91 StackVisitor::StackWalkKind::kSkipInlinedFrames;
92
93 class InstallStubsClassVisitor : public ClassVisitor {
94 public:
InstallStubsClassVisitor(Instrumentation * instrumentation)95 explicit InstallStubsClassVisitor(Instrumentation* instrumentation)
96 : instrumentation_(instrumentation) {}
97
operator ()(ObjPtr<mirror::Class> klass)98 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES(Locks::mutator_lock_) {
99 instrumentation_->InstallStubsForClass(klass.Ptr());
100 return true; // we visit all classes.
101 }
102
103 private:
104 Instrumentation* const instrumentation_;
105 };
106
InstrumentationStackPopper(Thread * self)107 InstrumentationStackPopper::InstrumentationStackPopper(Thread* self)
108 : self_(self),
109 instrumentation_(Runtime::Current()->GetInstrumentation()),
110 pop_until_(0u) {}
111
~InstrumentationStackPopper()112 InstrumentationStackPopper::~InstrumentationStackPopper() {
113 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
114 self_->GetInstrumentationStack();
115 for (auto i = stack->begin(); i != stack->end() && i->first <= pop_until_;) {
116 i = stack->erase(i);
117 }
118 }
119
PopFramesTo(uintptr_t stack_pointer,MutableHandle<mirror::Throwable> & exception)120 bool InstrumentationStackPopper::PopFramesTo(uintptr_t stack_pointer,
121 MutableHandle<mirror::Throwable>& exception) {
122 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
123 self_->GetInstrumentationStack();
124 DCHECK(!self_->IsExceptionPending());
125 if (!instrumentation_->HasMethodUnwindListeners()) {
126 pop_until_ = stack_pointer;
127 return true;
128 }
129 if (kVerboseInstrumentation) {
130 LOG(INFO) << "Popping frames for exception " << exception->Dump();
131 }
132 // The instrumentation events expect the exception to be set.
133 self_->SetException(exception.Get());
134 bool new_exception_thrown = false;
135 auto i = stack->upper_bound(pop_until_);
136
137 // Now pop all frames until reaching stack_pointer, or a new exception is
138 // thrown. Note that `stack_pointer` doesn't need to be a return PC address
139 // (in fact the exception handling code passes the start of the frame where
140 // the catch handler is).
141 for (; i != stack->end() && i->first <= stack_pointer; i++) {
142 const InstrumentationStackFrame& frame = i->second;
143 ArtMethod* method = frame.method_;
144 // Notify listeners of method unwind.
145 // TODO: improve the dex_pc information here.
146 uint32_t dex_pc = dex::kDexNoIndex;
147 if (kVerboseInstrumentation) {
148 LOG(INFO) << "Popping for unwind " << method->PrettyMethod();
149 }
150 if (!method->IsRuntimeMethod() && !frame.interpreter_entry_) {
151 instrumentation_->MethodUnwindEvent(self_, frame.this_object_, method, dex_pc);
152 new_exception_thrown = self_->GetException() != exception.Get();
153 if (new_exception_thrown) {
154 pop_until_ = i->first;
155 break;
156 }
157 }
158 }
159 if (!new_exception_thrown) {
160 pop_until_ = stack_pointer;
161 }
162 exception.Assign(self_->GetException());
163 self_->ClearException();
164 if (kVerboseInstrumentation && new_exception_thrown) {
165 LOG(INFO) << "Did partial pop of frames due to new exception";
166 }
167 return !new_exception_thrown;
168 }
169
Instrumentation()170 Instrumentation::Instrumentation()
171 : current_force_deopt_id_(0),
172 instrumentation_stubs_installed_(false),
173 entry_exit_stubs_installed_(false),
174 interpreter_stubs_installed_(false),
175 interpret_only_(false),
176 forced_interpret_only_(false),
177 have_method_entry_listeners_(false),
178 have_method_exit_listeners_(false),
179 have_method_unwind_listeners_(false),
180 have_dex_pc_listeners_(false),
181 have_field_read_listeners_(false),
182 have_field_write_listeners_(false),
183 have_exception_thrown_listeners_(false),
184 have_watched_frame_pop_listeners_(false),
185 have_branch_listeners_(false),
186 have_exception_handled_listeners_(false),
187 deoptimized_methods_lock_(new ReaderWriterMutex("deoptimized methods lock",
188 kGenericBottomLock)),
189 deoptimization_enabled_(false),
190 interpreter_handler_table_(kMainHandlerTable),
191 quick_alloc_entry_points_instrumentation_counter_(0),
192 alloc_entrypoints_instrumented_(false),
193 can_use_instrumentation_trampolines_(true) {
194 }
195
InstallStubsForClass(ObjPtr<mirror::Class> klass)196 void Instrumentation::InstallStubsForClass(ObjPtr<mirror::Class> klass) {
197 if (!klass->IsResolved()) {
198 // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
199 // could not be initialized or linked with regards to class inheritance.
200 } else if (klass->IsErroneousResolved()) {
201 // We can't execute code in a erroneous class: do nothing.
202 } else {
203 for (ArtMethod& method : klass->GetMethods(kRuntimePointerSize)) {
204 InstallStubsForMethod(&method);
205 }
206 }
207 }
208
UpdateEntrypoints(ArtMethod * method,const void * quick_code)209 static void UpdateEntrypoints(ArtMethod* method, const void* quick_code)
210 REQUIRES_SHARED(Locks::mutator_lock_) {
211 if (kIsDebugBuild) {
212 jit::Jit* jit = Runtime::Current()->GetJit();
213 if (jit != nullptr && jit->GetCodeCache()->ContainsPc(quick_code)) {
214 // Ensure we always have the thumb entrypoint for JIT on arm32.
215 if (kRuntimeISA == InstructionSet::kArm) {
216 CHECK_EQ(reinterpret_cast<uintptr_t>(quick_code) & 1, 1u);
217 }
218 }
219 }
220 method->SetEntryPointFromQuickCompiledCode(quick_code);
221 }
222
NeedDebugVersionFor(ArtMethod * method) const223 bool Instrumentation::NeedDebugVersionFor(ArtMethod* method) const
224 REQUIRES_SHARED(Locks::mutator_lock_) {
225 art::Runtime* runtime = Runtime::Current();
226 // If anything says we need the debug version or we are debuggable we will need the debug version
227 // of the method.
228 return (runtime->GetRuntimeCallbacks()->MethodNeedsDebugVersion(method) ||
229 runtime->IsJavaDebuggable()) &&
230 !method->IsNative() &&
231 !method->IsProxyMethod();
232 }
233
InstallStubsForMethod(ArtMethod * method)234 void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
235 if (!method->IsInvokable() || method->IsProxyMethod()) {
236 // Do not change stubs for these methods.
237 return;
238 }
239 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
240 // TODO We should remove the need for this since it means we cannot always correctly detect calls
241 // to Proxy.<init>
242 // Annoyingly this can be called before we have actually initialized WellKnownClasses so therefore
243 // we also need to check this based on the declaring-class descriptor. The check is valid because
244 // Proxy only has a single constructor.
245 ArtMethod* well_known_proxy_init = jni::DecodeArtMethod(
246 WellKnownClasses::java_lang_reflect_Proxy_init);
247 if ((LIKELY(well_known_proxy_init != nullptr) && UNLIKELY(method == well_known_proxy_init)) ||
248 UNLIKELY(method->IsConstructor() &&
249 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;"))) {
250 return;
251 }
252 const void* new_quick_code;
253 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
254 Runtime* const runtime = Runtime::Current();
255 ClassLinker* const class_linker = runtime->GetClassLinker();
256 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
257 if (uninstall) {
258 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
259 new_quick_code = GetQuickToInterpreterBridge();
260 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
261 new_quick_code = GetCodeForInvoke(method);
262 } else {
263 new_quick_code = GetQuickResolutionStub();
264 }
265 } else { // !uninstall
266 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
267 !method->IsNative()) {
268 new_quick_code = GetQuickToInterpreterBridge();
269 } else {
270 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
271 // class, all its static methods code will be set to the instrumentation entry point.
272 // For more details, see ClassLinker::FixupStaticTrampolines.
273 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
274 if (entry_exit_stubs_installed_) {
275 // This needs to be checked first since the instrumentation entrypoint will be able to
276 // find the actual JIT compiled code that corresponds to this method.
277 new_quick_code = GetQuickInstrumentationEntryPoint();
278 } else if (NeedDebugVersionFor(method)) {
279 // It would be great to search the JIT for its implementation here but we cannot due to
280 // the locks we hold. Instead just set to the interpreter bridge and that code will search
281 // the JIT when it gets called and replace the entrypoint then.
282 new_quick_code = GetQuickToInterpreterBridge();
283 } else {
284 new_quick_code = class_linker->GetQuickOatCodeFor(method);
285 }
286 } else {
287 new_quick_code = GetQuickResolutionStub();
288 }
289 }
290 }
291 UpdateEntrypoints(method, new_quick_code);
292 }
293
294 // Places the instrumentation exit pc as the return PC for every quick frame. This also allows
295 // deoptimization of quick frames to interpreter frames.
296 // Since we may already have done this previously, we need to push new instrumentation frame before
297 // existing instrumentation frames.
InstrumentationInstallStack(Thread * thread,void * arg)298 void InstrumentationInstallStack(Thread* thread, void* arg)
299 REQUIRES(Locks::mutator_lock_) {
300 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
301 struct InstallStackVisitor final : public StackVisitor {
302 InstallStackVisitor(Thread* thread_in,
303 Context* context,
304 uintptr_t instrumentation_exit_pc,
305 uint64_t force_deopt_id)
306 : StackVisitor(thread_in, context, kInstrumentationStackWalk),
307 instrumentation_stack_(thread_in->GetInstrumentationStack()),
308 instrumentation_exit_pc_(instrumentation_exit_pc),
309 reached_existing_instrumentation_frames_(false),
310 last_return_pc_(0),
311 force_deopt_id_(force_deopt_id) {}
312
313 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
314 ArtMethod* m = GetMethod();
315 if (m == nullptr) {
316 if (kVerboseInstrumentation) {
317 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
318 }
319 last_return_pc_ = 0;
320 return true; // Ignore upcalls.
321 }
322 if (GetCurrentQuickFrame() == nullptr) {
323 bool interpreter_frame = true;
324 InstrumentationStackFrame instrumentation_frame(GetThisObject().Ptr(),
325 m,
326 /*return_pc=*/ 0,
327 GetFrameId(),
328 interpreter_frame,
329 force_deopt_id_);
330 if (kVerboseInstrumentation) {
331 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
332 }
333 shadow_stack_.push_back(instrumentation_frame);
334 return true; // Continue.
335 }
336 uintptr_t return_pc = GetReturnPc();
337 if (kVerboseInstrumentation) {
338 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
339 }
340 if (return_pc == instrumentation_exit_pc_) {
341 auto it = instrumentation_stack_->find(GetReturnPcAddr());
342 CHECK(it != instrumentation_stack_->end());
343 const InstrumentationStackFrame& frame = it->second;
344 if (m->IsRuntimeMethod()) {
345 if (frame.interpreter_entry_) {
346 // This instrumentation frame is for an interpreter bridge and is
347 // pushed when executing the instrumented interpreter bridge. So method
348 // enter event must have been reported. However we need to push a DEX pc
349 // into the dex_pcs_ list to match size of instrumentation stack.
350 uint32_t dex_pc = dex::kDexNoIndex;
351 dex_pcs_.push_back(dex_pc);
352 last_return_pc_ = frame.return_pc_;
353 return true;
354 }
355 }
356
357 // We've reached a frame which has already been installed with instrumentation exit stub.
358 // We should have already installed instrumentation or be interpreter on previous frames.
359 reached_existing_instrumentation_frames_ = true;
360
361 CHECK_EQ(m->GetNonObsoleteMethod(), frame.method_->GetNonObsoleteMethod())
362 << "Expected " << ArtMethod::PrettyMethod(m)
363 << ", Found " << ArtMethod::PrettyMethod(frame.method_);
364 return_pc = frame.return_pc_;
365 if (kVerboseInstrumentation) {
366 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
367 }
368 } else {
369 CHECK_NE(return_pc, 0U);
370 if (UNLIKELY(reached_existing_instrumentation_frames_ && !m->IsRuntimeMethod())) {
371 // We already saw an existing instrumentation frame so this should be a runtime-method
372 // inserted by the interpreter or runtime.
373 std::string thread_name;
374 GetThread()->GetThreadName(thread_name);
375 uint32_t dex_pc = dex::kDexNoIndex;
376 if (last_return_pc_ != 0 && GetCurrentOatQuickMethodHeader() != nullptr) {
377 dex_pc = GetCurrentOatQuickMethodHeader()->ToDexPc(
378 GetCurrentQuickFrame(), last_return_pc_);
379 }
380 LOG(FATAL) << "While walking " << thread_name << " found unexpected non-runtime method"
381 << " without instrumentation exit return or interpreter frame."
382 << " method is " << GetMethod()->PrettyMethod()
383 << " return_pc is " << std::hex << return_pc
384 << " dex pc: " << dex_pc;
385 UNREACHABLE();
386 }
387 InstrumentationStackFrame instrumentation_frame(
388 m->IsRuntimeMethod() ? nullptr : GetThisObject().Ptr(),
389 m,
390 return_pc,
391 GetFrameId(), // A runtime method still gets a frame id.
392 false,
393 force_deopt_id_);
394 if (kVerboseInstrumentation) {
395 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
396 }
397
398 instrumentation_stack_->insert({GetReturnPcAddr(), instrumentation_frame});
399 SetReturnPc(instrumentation_exit_pc_);
400 }
401 uint32_t dex_pc = dex::kDexNoIndex;
402 if (last_return_pc_ != 0 && GetCurrentOatQuickMethodHeader() != nullptr) {
403 dex_pc = GetCurrentOatQuickMethodHeader()->ToDexPc(GetCurrentQuickFrame(), last_return_pc_);
404 }
405 dex_pcs_.push_back(dex_pc);
406 last_return_pc_ = return_pc;
407 return true; // Continue.
408 }
409 std::map<uintptr_t, InstrumentationStackFrame>* const instrumentation_stack_;
410 std::vector<InstrumentationStackFrame> shadow_stack_;
411 std::vector<uint32_t> dex_pcs_;
412 const uintptr_t instrumentation_exit_pc_;
413 bool reached_existing_instrumentation_frames_;
414 uintptr_t last_return_pc_;
415 uint64_t force_deopt_id_;
416 };
417 if (kVerboseInstrumentation) {
418 std::string thread_name;
419 thread->GetThreadName(thread_name);
420 LOG(INFO) << "Installing exit stubs in " << thread_name;
421 }
422
423 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
424 std::unique_ptr<Context> context(Context::Create());
425 uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
426 InstallStackVisitor visitor(
427 thread, context.get(), instrumentation_exit_pc, instrumentation->current_force_deopt_id_);
428 visitor.WalkStack(true);
429 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
430
431 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
432 // Create method enter events for all methods currently on the thread's stack. We only do this
433 // if no debugger is attached to prevent from posting events twice.
434 // TODO: This is the only place we make use of frame_id_. We should create a
435 // std::vector instead and populate it as we walk the stack.
436 auto ssi = visitor.shadow_stack_.rbegin();
437 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
438 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
439 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < isi->second.frame_id_) {
440 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
441 ++ssi;
442 }
443 uint32_t dex_pc = visitor.dex_pcs_.back();
444 visitor.dex_pcs_.pop_back();
445 if (!isi->second.interpreter_entry_ && !isi->second.method_->IsRuntimeMethod()) {
446 instrumentation->MethodEnterEvent(
447 thread, isi->second.this_object_, isi->second.method_, dex_pc);
448 }
449 }
450 }
451 thread->VerifyStack();
452 }
453
InstrumentThreadStack(Thread * thread)454 void Instrumentation::InstrumentThreadStack(Thread* thread) {
455 instrumentation_stubs_installed_ = true;
456 InstrumentationInstallStack(thread, this);
457 }
458
459 // Removes the instrumentation exit pc as the return PC for every quick frame.
InstrumentationRestoreStack(Thread * thread,void * arg)460 static void InstrumentationRestoreStack(Thread* thread, void* arg)
461 REQUIRES(Locks::mutator_lock_) {
462 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
463
464 struct RestoreStackVisitor final : public StackVisitor {
465 RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
466 Instrumentation* instrumentation)
467 : StackVisitor(thread_in, nullptr, kInstrumentationStackWalk),
468 thread_(thread_in),
469 instrumentation_exit_pc_(instrumentation_exit_pc),
470 instrumentation_(instrumentation),
471 instrumentation_stack_(thread_in->GetInstrumentationStack()),
472 frames_removed_(0) {}
473
474 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
475 if (instrumentation_stack_->size() == 0) {
476 return false; // Stop.
477 }
478 ArtMethod* m = GetMethod();
479 if (GetCurrentQuickFrame() == nullptr) {
480 if (kVerboseInstrumentation) {
481 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
482 << " Method=" << ArtMethod::PrettyMethod(m);
483 }
484 return true; // Ignore shadow frames.
485 }
486 if (m == nullptr) {
487 if (kVerboseInstrumentation) {
488 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
489 }
490 return true; // Ignore upcalls.
491 }
492 auto it = instrumentation_stack_->find(GetReturnPcAddr());
493 if (it != instrumentation_stack_->end()) {
494 const InstrumentationStackFrame& instrumentation_frame = it->second;
495 if (kVerboseInstrumentation) {
496 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
497 }
498 if (instrumentation_frame.interpreter_entry_) {
499 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
500 } else {
501 CHECK_EQ(m->GetNonObsoleteMethod(),
502 instrumentation_frame.method_->GetNonObsoleteMethod())
503 << ArtMethod::PrettyMethod(m)
504 << " and " << instrumentation_frame.method_->GetNonObsoleteMethod()->PrettyMethod();
505 }
506 SetReturnPc(instrumentation_frame.return_pc_);
507 if (instrumentation_->ShouldNotifyMethodEnterExitEvents() &&
508 !m->IsRuntimeMethod()) {
509 // Create the method exit events. As the methods didn't really exit the result is 0.
510 // We only do this if no debugger is attached to prevent from posting events twice.
511 JValue val;
512 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
513 GetDexPc(), OptionalFrame{}, val);
514 }
515 frames_removed_++;
516 } else {
517 if (kVerboseInstrumentation) {
518 LOG(INFO) << " No exit stub in " << DescribeLocation();
519 }
520 }
521 return true; // Continue.
522 }
523 Thread* const thread_;
524 const uintptr_t instrumentation_exit_pc_;
525 Instrumentation* const instrumentation_;
526 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
527 size_t frames_removed_;
528 };
529 if (kVerboseInstrumentation) {
530 std::string thread_name;
531 thread->GetThreadName(thread_name);
532 LOG(INFO) << "Removing exit stubs in " << thread_name;
533 }
534 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
535 thread->GetInstrumentationStack();
536 if (stack->size() > 0) {
537 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
538 uintptr_t instrumentation_exit_pc =
539 reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
540 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
541 visitor.WalkStack(true);
542 CHECK_EQ(visitor.frames_removed_, stack->size());
543 stack->clear();
544 }
545 }
546
DeoptimizeAllThreadFrames()547 void Instrumentation::DeoptimizeAllThreadFrames() {
548 Thread* self = Thread::Current();
549 MutexLock mu(self, *Locks::thread_list_lock_);
550 ThreadList* tl = Runtime::Current()->GetThreadList();
551 tl->ForEach([&](Thread* t) {
552 Locks::mutator_lock_->AssertExclusiveHeld(self);
553 InstrumentThreadStack(t);
554 });
555 current_force_deopt_id_++;
556 }
557
HasEvent(Instrumentation::InstrumentationEvent expected,uint32_t events)558 static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
559 return (events & expected) != 0;
560 }
561
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)562 static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
563 uint32_t events,
564 std::list<InstrumentationListener*>& list,
565 InstrumentationListener* listener,
566 bool* has_listener)
567 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
568 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
569 if (!HasEvent(event, events)) {
570 return;
571 }
572 // If there is a free slot in the list, we insert the listener in that slot.
573 // Otherwise we add it to the end of the list.
574 auto it = std::find(list.begin(), list.end(), nullptr);
575 if (it != list.end()) {
576 *it = listener;
577 } else {
578 list.push_back(listener);
579 }
580 Runtime::DoAndMaybeSwitchInterpreter([=](){ *has_listener = true; });
581 }
582
AddListener(InstrumentationListener * listener,uint32_t events)583 void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
584 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
585 PotentiallyAddListenerTo(kMethodEntered,
586 events,
587 method_entry_listeners_,
588 listener,
589 &have_method_entry_listeners_);
590 PotentiallyAddListenerTo(kMethodExited,
591 events,
592 method_exit_listeners_,
593 listener,
594 &have_method_exit_listeners_);
595 PotentiallyAddListenerTo(kMethodUnwind,
596 events,
597 method_unwind_listeners_,
598 listener,
599 &have_method_unwind_listeners_);
600 PotentiallyAddListenerTo(kBranch,
601 events,
602 branch_listeners_,
603 listener,
604 &have_branch_listeners_);
605 PotentiallyAddListenerTo(kDexPcMoved,
606 events,
607 dex_pc_listeners_,
608 listener,
609 &have_dex_pc_listeners_);
610 PotentiallyAddListenerTo(kFieldRead,
611 events,
612 field_read_listeners_,
613 listener,
614 &have_field_read_listeners_);
615 PotentiallyAddListenerTo(kFieldWritten,
616 events,
617 field_write_listeners_,
618 listener,
619 &have_field_write_listeners_);
620 PotentiallyAddListenerTo(kExceptionThrown,
621 events,
622 exception_thrown_listeners_,
623 listener,
624 &have_exception_thrown_listeners_);
625 PotentiallyAddListenerTo(kWatchedFramePop,
626 events,
627 watched_frame_pop_listeners_,
628 listener,
629 &have_watched_frame_pop_listeners_);
630 PotentiallyAddListenerTo(kExceptionHandled,
631 events,
632 exception_handled_listeners_,
633 listener,
634 &have_exception_handled_listeners_);
635 UpdateInterpreterHandlerTable();
636 }
637
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)638 static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
639 uint32_t events,
640 std::list<InstrumentationListener*>& list,
641 InstrumentationListener* listener,
642 bool* has_listener)
643 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
644 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
645 if (!HasEvent(event, events)) {
646 return;
647 }
648 auto it = std::find(list.begin(), list.end(), listener);
649 if (it != list.end()) {
650 // Just update the entry, do not remove from the list. Removing entries in the list
651 // is unsafe when mutators are iterating over it.
652 *it = nullptr;
653 }
654
655 // Check if the list contains any non-null listener, and update 'has_listener'.
656 for (InstrumentationListener* l : list) {
657 if (l != nullptr) {
658 Runtime::DoAndMaybeSwitchInterpreter([=](){ *has_listener = true; });
659 return;
660 }
661 }
662 Runtime::DoAndMaybeSwitchInterpreter([=](){ *has_listener = false; });
663 }
664
RemoveListener(InstrumentationListener * listener,uint32_t events)665 void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
666 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
667 PotentiallyRemoveListenerFrom(kMethodEntered,
668 events,
669 method_entry_listeners_,
670 listener,
671 &have_method_entry_listeners_);
672 PotentiallyRemoveListenerFrom(kMethodExited,
673 events,
674 method_exit_listeners_,
675 listener,
676 &have_method_exit_listeners_);
677 PotentiallyRemoveListenerFrom(kMethodUnwind,
678 events,
679 method_unwind_listeners_,
680 listener,
681 &have_method_unwind_listeners_);
682 PotentiallyRemoveListenerFrom(kBranch,
683 events,
684 branch_listeners_,
685 listener,
686 &have_branch_listeners_);
687 PotentiallyRemoveListenerFrom(kDexPcMoved,
688 events,
689 dex_pc_listeners_,
690 listener,
691 &have_dex_pc_listeners_);
692 PotentiallyRemoveListenerFrom(kFieldRead,
693 events,
694 field_read_listeners_,
695 listener,
696 &have_field_read_listeners_);
697 PotentiallyRemoveListenerFrom(kFieldWritten,
698 events,
699 field_write_listeners_,
700 listener,
701 &have_field_write_listeners_);
702 PotentiallyRemoveListenerFrom(kExceptionThrown,
703 events,
704 exception_thrown_listeners_,
705 listener,
706 &have_exception_thrown_listeners_);
707 PotentiallyRemoveListenerFrom(kWatchedFramePop,
708 events,
709 watched_frame_pop_listeners_,
710 listener,
711 &have_watched_frame_pop_listeners_);
712 PotentiallyRemoveListenerFrom(kExceptionHandled,
713 events,
714 exception_handled_listeners_,
715 listener,
716 &have_exception_handled_listeners_);
717 UpdateInterpreterHandlerTable();
718 }
719
GetCurrentInstrumentationLevel() const720 Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
721 if (interpreter_stubs_installed_) {
722 return InstrumentationLevel::kInstrumentWithInterpreter;
723 } else if (entry_exit_stubs_installed_) {
724 return InstrumentationLevel::kInstrumentWithInstrumentationStubs;
725 } else {
726 return InstrumentationLevel::kInstrumentNothing;
727 }
728 }
729
RequiresInstrumentationInstallation(InstrumentationLevel new_level) const730 bool Instrumentation::RequiresInstrumentationInstallation(InstrumentationLevel new_level) const {
731 // We need to reinstall instrumentation if we go to a different level.
732 return GetCurrentInstrumentationLevel() != new_level;
733 }
734
UpdateInstrumentationLevels(InstrumentationLevel level)735 void Instrumentation::UpdateInstrumentationLevels(InstrumentationLevel level) {
736 if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
737 can_use_instrumentation_trampolines_ = false;
738 }
739 if (UNLIKELY(!can_use_instrumentation_trampolines_)) {
740 for (auto& p : requested_instrumentation_levels_) {
741 if (p.second == InstrumentationLevel::kInstrumentWithInstrumentationStubs) {
742 p.second = InstrumentationLevel::kInstrumentWithInterpreter;
743 }
744 }
745 }
746 }
747
ConfigureStubs(const char * key,InstrumentationLevel desired_level)748 void Instrumentation::ConfigureStubs(const char* key, InstrumentationLevel desired_level) {
749 // Store the instrumentation level for this key or remove it.
750 if (desired_level == InstrumentationLevel::kInstrumentNothing) {
751 // The client no longer needs instrumentation.
752 requested_instrumentation_levels_.erase(key);
753 } else {
754 // The client needs instrumentation.
755 requested_instrumentation_levels_.Overwrite(key, desired_level);
756 }
757
758 UpdateInstrumentationLevels(desired_level);
759 UpdateStubs();
760 }
761
EnableSingleThreadDeopt()762 void Instrumentation::EnableSingleThreadDeopt() {
763 // Single-thread deopt only uses interpreter.
764 can_use_instrumentation_trampolines_ = false;
765 UpdateInstrumentationLevels(InstrumentationLevel::kInstrumentWithInterpreter);
766 UpdateStubs();
767 }
768
UpdateStubs()769 void Instrumentation::UpdateStubs() {
770 // Look for the highest required instrumentation level.
771 InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
772 for (const auto& v : requested_instrumentation_levels_) {
773 requested_level = std::max(requested_level, v.second);
774 }
775
776 DCHECK(can_use_instrumentation_trampolines_ ||
777 requested_level != InstrumentationLevel::kInstrumentWithInstrumentationStubs)
778 << "Use trampolines: " << can_use_instrumentation_trampolines_ << " level "
779 << requested_level;
780
781 interpret_only_ = (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) ||
782 forced_interpret_only_;
783
784 if (!RequiresInstrumentationInstallation(requested_level)) {
785 // We're already set.
786 return;
787 }
788 Thread* const self = Thread::Current();
789 Runtime* runtime = Runtime::Current();
790 Locks::mutator_lock_->AssertExclusiveHeld(self);
791 Locks::thread_list_lock_->AssertNotHeld(self);
792 if (requested_level > InstrumentationLevel::kInstrumentNothing) {
793 if (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) {
794 interpreter_stubs_installed_ = true;
795 entry_exit_stubs_installed_ = true;
796 } else {
797 CHECK_EQ(requested_level, InstrumentationLevel::kInstrumentWithInstrumentationStubs);
798 entry_exit_stubs_installed_ = true;
799 interpreter_stubs_installed_ = false;
800 }
801 InstallStubsClassVisitor visitor(this);
802 runtime->GetClassLinker()->VisitClasses(&visitor);
803 instrumentation_stubs_installed_ = true;
804 MutexLock mu(self, *Locks::thread_list_lock_);
805 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
806 } else {
807 interpreter_stubs_installed_ = false;
808 entry_exit_stubs_installed_ = false;
809 InstallStubsClassVisitor visitor(this);
810 runtime->GetClassLinker()->VisitClasses(&visitor);
811 // Restore stack only if there is no method currently deoptimized.
812 bool empty;
813 {
814 ReaderMutexLock mu(self, *GetDeoptimizedMethodsLock());
815 empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation.
816 }
817 if (empty) {
818 MutexLock mu(self, *Locks::thread_list_lock_);
819 bool no_remaining_deopts = true;
820 // Check that there are no other forced deoptimizations. Do it here so we only need to lock
821 // thread_list_lock once.
822 // The compiler gets confused on the thread annotations, so use
823 // NO_THREAD_SAFETY_ANALYSIS. Note that we hold the mutator lock
824 // exclusively at this point.
825 Locks::mutator_lock_->AssertExclusiveHeld(self);
826 runtime->GetThreadList()->ForEach([&](Thread* t) NO_THREAD_SAFETY_ANALYSIS {
827 no_remaining_deopts =
828 no_remaining_deopts && !t->IsForceInterpreter() &&
829 std::all_of(t->GetInstrumentationStack()->cbegin(),
830 t->GetInstrumentationStack()->cend(),
831 [&](const auto& frame) REQUIRES_SHARED(Locks::mutator_lock_) {
832 return frame.second.force_deopt_id_ == current_force_deopt_id_;
833 });
834 });
835 if (no_remaining_deopts) {
836 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
837 // Only do this after restoring, as walking the stack when restoring will see
838 // the instrumentation exit pc.
839 instrumentation_stubs_installed_ = false;
840 }
841 }
842 }
843 }
844
ResetQuickAllocEntryPointsForThread(Thread * thread,void * arg ATTRIBUTE_UNUSED)845 static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg ATTRIBUTE_UNUSED) {
846 thread->ResetQuickAllocEntryPointsForThread();
847 }
848
SetEntrypointsInstrumented(bool instrumented)849 void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
850 Thread* self = Thread::Current();
851 Runtime* runtime = Runtime::Current();
852 Locks::mutator_lock_->AssertNotHeld(self);
853 Locks::instrument_entrypoints_lock_->AssertHeld(self);
854 if (runtime->IsStarted()) {
855 ScopedSuspendAll ssa(__FUNCTION__);
856 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
857 SetQuickAllocEntryPointsInstrumented(instrumented);
858 ResetQuickAllocEntryPoints();
859 alloc_entrypoints_instrumented_ = instrumented;
860 } else {
861 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
862 SetQuickAllocEntryPointsInstrumented(instrumented);
863
864 // Note: ResetQuickAllocEntryPoints only works when the runtime is started. Manually run the
865 // update for just this thread.
866 // Note: self may be null. One of those paths is setting instrumentation in the Heap
867 // constructor for gcstress mode.
868 if (self != nullptr) {
869 ResetQuickAllocEntryPointsForThread(self, nullptr);
870 }
871
872 alloc_entrypoints_instrumented_ = instrumented;
873 }
874 }
875
InstrumentQuickAllocEntryPoints()876 void Instrumentation::InstrumentQuickAllocEntryPoints() {
877 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
878 InstrumentQuickAllocEntryPointsLocked();
879 }
880
UninstrumentQuickAllocEntryPoints()881 void Instrumentation::UninstrumentQuickAllocEntryPoints() {
882 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
883 UninstrumentQuickAllocEntryPointsLocked();
884 }
885
InstrumentQuickAllocEntryPointsLocked()886 void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
887 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
888 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
889 SetEntrypointsInstrumented(true);
890 }
891 ++quick_alloc_entry_points_instrumentation_counter_;
892 }
893
UninstrumentQuickAllocEntryPointsLocked()894 void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
895 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
896 CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
897 --quick_alloc_entry_points_instrumentation_counter_;
898 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
899 SetEntrypointsInstrumented(false);
900 }
901 }
902
ResetQuickAllocEntryPoints()903 void Instrumentation::ResetQuickAllocEntryPoints() {
904 Runtime* runtime = Runtime::Current();
905 if (runtime->IsStarted()) {
906 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
907 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
908 }
909 }
910
UpdateMethodsCodeImpl(ArtMethod * method,const void * quick_code)911 void Instrumentation::UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code) {
912 const void* new_quick_code;
913 if (LIKELY(!instrumentation_stubs_installed_)) {
914 new_quick_code = quick_code;
915 } else {
916 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
917 new_quick_code = GetQuickToInterpreterBridge();
918 } else {
919 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
920 if (class_linker->IsQuickResolutionStub(quick_code) ||
921 class_linker->IsQuickToInterpreterBridge(quick_code)) {
922 new_quick_code = quick_code;
923 } else if (entry_exit_stubs_installed_ &&
924 // We need to make sure not to replace anything that InstallStubsForMethod
925 // wouldn't. Specifically we cannot stub out Proxy.<init> since subtypes copy the
926 // implementation directly and this will confuse the instrumentation trampolines.
927 // TODO We should remove the need for this since it makes it impossible to profile
928 // Proxy.<init> correctly in all cases.
929 method != jni::DecodeArtMethod(WellKnownClasses::java_lang_reflect_Proxy_init)) {
930 new_quick_code = GetQuickInstrumentationEntryPoint();
931 } else {
932 new_quick_code = quick_code;
933 }
934 }
935 }
936 UpdateEntrypoints(method, new_quick_code);
937 }
938
UpdateNativeMethodsCodeToJitCode(ArtMethod * method,const void * quick_code)939 void Instrumentation::UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* quick_code) {
940 // We don't do any read barrier on `method`'s declaring class in this code, as the JIT might
941 // enter here on a soon-to-be deleted ArtMethod. Updating the entrypoint is OK though, as
942 // the ArtMethod is still in memory.
943 const void* new_quick_code = quick_code;
944 if (UNLIKELY(instrumentation_stubs_installed_) && entry_exit_stubs_installed_) {
945 new_quick_code = GetQuickInstrumentationEntryPoint();
946 }
947 UpdateEntrypoints(method, new_quick_code);
948 }
949
UpdateMethodsCode(ArtMethod * method,const void * quick_code)950 void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* quick_code) {
951 DCHECK(method->GetDeclaringClass()->IsResolved());
952 UpdateMethodsCodeImpl(method, quick_code);
953 }
954
UpdateMethodsCodeToInterpreterEntryPoint(ArtMethod * method)955 void Instrumentation::UpdateMethodsCodeToInterpreterEntryPoint(ArtMethod* method) {
956 UpdateMethodsCodeImpl(method, GetQuickToInterpreterBridge());
957 }
958
UpdateMethodsCodeForJavaDebuggable(ArtMethod * method,const void * quick_code)959 void Instrumentation::UpdateMethodsCodeForJavaDebuggable(ArtMethod* method,
960 const void* quick_code) {
961 // When the runtime is set to Java debuggable, we may update the entry points of
962 // all methods of a class to the interpreter bridge. A method's declaring class
963 // might not be in resolved state yet in that case, so we bypass the DCHECK in
964 // UpdateMethodsCode.
965 UpdateMethodsCodeImpl(method, quick_code);
966 }
967
AddDeoptimizedMethod(ArtMethod * method)968 bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
969 if (IsDeoptimizedMethod(method)) {
970 // Already in the map. Return.
971 return false;
972 }
973 // Not found. Add it.
974 deoptimized_methods_.insert(method);
975 return true;
976 }
977
IsDeoptimizedMethod(ArtMethod * method)978 bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
979 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
980 }
981
BeginDeoptimizedMethod()982 ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
983 if (deoptimized_methods_.empty()) {
984 // Empty.
985 return nullptr;
986 }
987 return *deoptimized_methods_.begin();
988 }
989
RemoveDeoptimizedMethod(ArtMethod * method)990 bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
991 auto it = deoptimized_methods_.find(method);
992 if (it == deoptimized_methods_.end()) {
993 return false;
994 }
995 deoptimized_methods_.erase(it);
996 return true;
997 }
998
IsDeoptimizedMethodsEmpty() const999 bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
1000 return deoptimized_methods_.empty();
1001 }
1002
Deoptimize(ArtMethod * method)1003 void Instrumentation::Deoptimize(ArtMethod* method) {
1004 CHECK(!method->IsNative());
1005 CHECK(!method->IsProxyMethod());
1006 CHECK(method->IsInvokable());
1007
1008 Thread* self = Thread::Current();
1009 {
1010 WriterMutexLock mu(self, *GetDeoptimizedMethodsLock());
1011 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
1012 CHECK(has_not_been_deoptimized) << "Method " << ArtMethod::PrettyMethod(method)
1013 << " is already deoptimized";
1014 }
1015 if (!interpreter_stubs_installed_) {
1016 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint());
1017
1018 // Install instrumentation exit stub and instrumentation frames. We may already have installed
1019 // these previously so it will only cover the newly created frames.
1020 instrumentation_stubs_installed_ = true;
1021 MutexLock mu(self, *Locks::thread_list_lock_);
1022 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
1023 }
1024 }
1025
Undeoptimize(ArtMethod * method)1026 void Instrumentation::Undeoptimize(ArtMethod* method) {
1027 CHECK(!method->IsNative());
1028 CHECK(!method->IsProxyMethod());
1029 CHECK(method->IsInvokable());
1030
1031 Thread* self = Thread::Current();
1032 bool empty;
1033 {
1034 WriterMutexLock mu(self, *GetDeoptimizedMethodsLock());
1035 bool found_and_erased = RemoveDeoptimizedMethod(method);
1036 CHECK(found_and_erased) << "Method " << ArtMethod::PrettyMethod(method)
1037 << " is not deoptimized";
1038 empty = IsDeoptimizedMethodsEmpty();
1039 }
1040
1041 // Restore code and possibly stack only if we did not deoptimize everything.
1042 if (!interpreter_stubs_installed_) {
1043 // Restore its code or resolution trampoline.
1044 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1045 if (method->IsStatic() && !method->IsConstructor() &&
1046 !method->GetDeclaringClass()->IsInitialized()) {
1047 UpdateEntrypoints(method, GetQuickResolutionStub());
1048 } else {
1049 const void* quick_code = NeedDebugVersionFor(method)
1050 ? GetQuickToInterpreterBridge()
1051 : class_linker->GetQuickOatCodeFor(method);
1052 UpdateEntrypoints(method, quick_code);
1053 }
1054
1055 // If there is no deoptimized method left, we can restore the stack of each thread.
1056 if (empty && !entry_exit_stubs_installed_) {
1057 MutexLock mu(self, *Locks::thread_list_lock_);
1058 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
1059 instrumentation_stubs_installed_ = false;
1060 }
1061 }
1062 }
1063
IsDeoptimized(ArtMethod * method)1064 bool Instrumentation::IsDeoptimized(ArtMethod* method) {
1065 DCHECK(method != nullptr);
1066 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
1067 return IsDeoptimizedMethod(method);
1068 }
1069
EnableDeoptimization()1070 void Instrumentation::EnableDeoptimization() {
1071 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
1072 CHECK(IsDeoptimizedMethodsEmpty());
1073 CHECK_EQ(deoptimization_enabled_, false);
1074 deoptimization_enabled_ = true;
1075 }
1076
DisableDeoptimization(const char * key)1077 void Instrumentation::DisableDeoptimization(const char* key) {
1078 CHECK_EQ(deoptimization_enabled_, true);
1079 // If we deoptimized everything, undo it.
1080 InstrumentationLevel level = GetCurrentInstrumentationLevel();
1081 if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
1082 UndeoptimizeEverything(key);
1083 }
1084 // Undeoptimized selected methods.
1085 while (true) {
1086 ArtMethod* method;
1087 {
1088 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
1089 if (IsDeoptimizedMethodsEmpty()) {
1090 break;
1091 }
1092 method = BeginDeoptimizedMethod();
1093 CHECK(method != nullptr);
1094 }
1095 Undeoptimize(method);
1096 }
1097 deoptimization_enabled_ = false;
1098 }
1099
1100 // Indicates if instrumentation should notify method enter/exit events to the listeners.
ShouldNotifyMethodEnterExitEvents() const1101 bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
1102 if (!HasMethodEntryListeners() && !HasMethodExitListeners()) {
1103 return false;
1104 }
1105 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
1106 }
1107
DeoptimizeEverything(const char * key)1108 void Instrumentation::DeoptimizeEverything(const char* key) {
1109 CHECK(deoptimization_enabled_);
1110 ConfigureStubs(key, InstrumentationLevel::kInstrumentWithInterpreter);
1111 }
1112
UndeoptimizeEverything(const char * key)1113 void Instrumentation::UndeoptimizeEverything(const char* key) {
1114 CHECK(interpreter_stubs_installed_);
1115 CHECK(deoptimization_enabled_);
1116 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
1117 }
1118
EnableMethodTracing(const char * key,bool needs_interpreter)1119 void Instrumentation::EnableMethodTracing(const char* key, bool needs_interpreter) {
1120 InstrumentationLevel level;
1121 if (needs_interpreter) {
1122 level = InstrumentationLevel::kInstrumentWithInterpreter;
1123 } else {
1124 level = InstrumentationLevel::kInstrumentWithInstrumentationStubs;
1125 }
1126 ConfigureStubs(key, level);
1127 }
1128
DisableMethodTracing(const char * key)1129 void Instrumentation::DisableMethodTracing(const char* key) {
1130 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
1131 }
1132
GetCodeForInvoke(ArtMethod * method) const1133 const void* Instrumentation::GetCodeForInvoke(ArtMethod* method) const {
1134 // This is called by instrumentation entry only and that should never be getting proxy methods.
1135 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1136 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1137 if (LIKELY(!instrumentation_stubs_installed_ && !interpreter_stubs_installed_)) {
1138 // In general we just return whatever the method thinks its entrypoint is here. The only
1139 // exception is if it still has the instrumentation entrypoint. That means we are racing another
1140 // thread getting rid of instrumentation which is unexpected but possible. In that case we want
1141 // to wait and try to get it from the oat file or jit.
1142 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
1143 DCHECK(code != nullptr);
1144 if (code != GetQuickInstrumentationEntryPoint()) {
1145 return code;
1146 } else if (method->IsNative()) {
1147 return class_linker->GetQuickOatCodeFor(method);
1148 }
1149 // We don't know what it is. Fallthough to try to find the code from the JIT or Oat file.
1150 } else if (method->IsNative()) {
1151 // TODO We could have JIT compiled native entrypoints. It might be worth it to find these.
1152 return class_linker->GetQuickOatCodeFor(method);
1153 } else if (UNLIKELY(interpreter_stubs_installed_)) {
1154 return GetQuickToInterpreterBridge();
1155 }
1156 // Since the method cannot be native due to ifs above we can always fall back to interpreter
1157 // bridge.
1158 const void* result = GetQuickToInterpreterBridge();
1159 if (!NeedDebugVersionFor(method)) {
1160 // If we don't need a debug version we should see what the oat file/class linker has to say.
1161 result = class_linker->GetQuickOatCodeFor(method);
1162 }
1163 return result;
1164 }
1165
GetQuickCodeFor(ArtMethod * method,PointerSize pointer_size) const1166 const void* Instrumentation::GetQuickCodeFor(ArtMethod* method, PointerSize pointer_size) const {
1167 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1168 if (LIKELY(!instrumentation_stubs_installed_)) {
1169 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
1170 DCHECK(code != nullptr);
1171 if (LIKELY(!class_linker->IsQuickResolutionStub(code) &&
1172 !class_linker->IsQuickToInterpreterBridge(code)) &&
1173 !class_linker->IsQuickResolutionStub(code) &&
1174 !class_linker->IsQuickToInterpreterBridge(code)) {
1175 return code;
1176 }
1177 }
1178 return class_linker->GetQuickOatCodeFor(method);
1179 }
1180
MethodEnterEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1181 void Instrumentation::MethodEnterEventImpl(Thread* thread,
1182 ObjPtr<mirror::Object> this_object,
1183 ArtMethod* method,
1184 uint32_t dex_pc) const {
1185 DCHECK(!method->IsRuntimeMethod());
1186 if (HasMethodEntryListeners()) {
1187 Thread* self = Thread::Current();
1188 StackHandleScope<1> hs(self);
1189 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1190 for (InstrumentationListener* listener : method_entry_listeners_) {
1191 if (listener != nullptr) {
1192 listener->MethodEntered(thread, thiz, method, dex_pc);
1193 }
1194 }
1195 }
1196 }
1197
1198 template <>
MethodExitEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,OptionalFrame frame,MutableHandle<mirror::Object> & return_value) const1199 void Instrumentation::MethodExitEventImpl(Thread* thread,
1200 ObjPtr<mirror::Object> this_object,
1201 ArtMethod* method,
1202 uint32_t dex_pc,
1203 OptionalFrame frame,
1204 MutableHandle<mirror::Object>& return_value) const {
1205 if (HasMethodExitListeners()) {
1206 Thread* self = Thread::Current();
1207 StackHandleScope<1> hs(self);
1208 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1209 for (InstrumentationListener* listener : method_exit_listeners_) {
1210 if (listener != nullptr) {
1211 listener->MethodExited(thread, thiz, method, dex_pc, frame, return_value);
1212 }
1213 }
1214 }
1215 }
1216
MethodExitEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,OptionalFrame frame,JValue & return_value) const1217 template<> void Instrumentation::MethodExitEventImpl(Thread* thread,
1218 ObjPtr<mirror::Object> this_object,
1219 ArtMethod* method,
1220 uint32_t dex_pc,
1221 OptionalFrame frame,
1222 JValue& return_value) const {
1223 if (HasMethodExitListeners()) {
1224 Thread* self = Thread::Current();
1225 StackHandleScope<2> hs(self);
1226 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1227 if (method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive() !=
1228 Primitive::kPrimNot) {
1229 for (InstrumentationListener* listener : method_exit_listeners_) {
1230 if (listener != nullptr) {
1231 listener->MethodExited(thread, thiz, method, dex_pc, frame, return_value);
1232 }
1233 }
1234 } else {
1235 MutableHandle<mirror::Object> ret(hs.NewHandle(return_value.GetL()));
1236 MethodExitEventImpl(thread, thiz.Get(), method, dex_pc, frame, ret);
1237 return_value.SetL(ret.Get());
1238 }
1239 }
1240 }
1241
MethodUnwindEvent(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1242 void Instrumentation::MethodUnwindEvent(Thread* thread,
1243 ObjPtr<mirror::Object> this_object,
1244 ArtMethod* method,
1245 uint32_t dex_pc) const {
1246 if (HasMethodUnwindListeners()) {
1247 Thread* self = Thread::Current();
1248 StackHandleScope<1> hs(self);
1249 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1250 for (InstrumentationListener* listener : method_unwind_listeners_) {
1251 if (listener != nullptr) {
1252 listener->MethodUnwind(thread, thiz, method, dex_pc);
1253 }
1254 }
1255 }
1256 }
1257
DexPcMovedEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1258 void Instrumentation::DexPcMovedEventImpl(Thread* thread,
1259 ObjPtr<mirror::Object> this_object,
1260 ArtMethod* method,
1261 uint32_t dex_pc) const {
1262 Thread* self = Thread::Current();
1263 StackHandleScope<1> hs(self);
1264 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1265 for (InstrumentationListener* listener : dex_pc_listeners_) {
1266 if (listener != nullptr) {
1267 listener->DexPcMoved(thread, thiz, method, dex_pc);
1268 }
1269 }
1270 }
1271
BranchImpl(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset) const1272 void Instrumentation::BranchImpl(Thread* thread,
1273 ArtMethod* method,
1274 uint32_t dex_pc,
1275 int32_t offset) const {
1276 for (InstrumentationListener* listener : branch_listeners_) {
1277 if (listener != nullptr) {
1278 listener->Branch(thread, method, dex_pc, offset);
1279 }
1280 }
1281 }
1282
WatchedFramePopImpl(Thread * thread,const ShadowFrame & frame) const1283 void Instrumentation::WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const {
1284 for (InstrumentationListener* listener : watched_frame_pop_listeners_) {
1285 if (listener != nullptr) {
1286 listener->WatchedFramePop(thread, frame);
1287 }
1288 }
1289 }
1290
FieldReadEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field) const1291 void Instrumentation::FieldReadEventImpl(Thread* thread,
1292 ObjPtr<mirror::Object> this_object,
1293 ArtMethod* method,
1294 uint32_t dex_pc,
1295 ArtField* field) const {
1296 Thread* self = Thread::Current();
1297 StackHandleScope<1> hs(self);
1298 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1299 for (InstrumentationListener* listener : field_read_listeners_) {
1300 if (listener != nullptr) {
1301 listener->FieldRead(thread, thiz, method, dex_pc, field);
1302 }
1303 }
1304 }
1305
FieldWriteEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value) const1306 void Instrumentation::FieldWriteEventImpl(Thread* thread,
1307 ObjPtr<mirror::Object> this_object,
1308 ArtMethod* method,
1309 uint32_t dex_pc,
1310 ArtField* field,
1311 const JValue& field_value) const {
1312 Thread* self = Thread::Current();
1313 StackHandleScope<2> hs(self);
1314 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1315 if (field->IsPrimitiveType()) {
1316 for (InstrumentationListener* listener : field_write_listeners_) {
1317 if (listener != nullptr) {
1318 listener->FieldWritten(thread, thiz, method, dex_pc, field, field_value);
1319 }
1320 }
1321 } else {
1322 Handle<mirror::Object> val(hs.NewHandle(field_value.GetL()));
1323 for (InstrumentationListener* listener : field_write_listeners_) {
1324 if (listener != nullptr) {
1325 listener->FieldWritten(thread, thiz, method, dex_pc, field, val);
1326 }
1327 }
1328 }
1329 }
1330
ExceptionThrownEvent(Thread * thread,ObjPtr<mirror::Throwable> exception_object) const1331 void Instrumentation::ExceptionThrownEvent(Thread* thread,
1332 ObjPtr<mirror::Throwable> exception_object) const {
1333 Thread* self = Thread::Current();
1334 StackHandleScope<1> hs(self);
1335 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1336 if (HasExceptionThrownListeners()) {
1337 DCHECK_EQ(thread->GetException(), h_exception.Get());
1338 thread->ClearException();
1339 for (InstrumentationListener* listener : exception_thrown_listeners_) {
1340 if (listener != nullptr) {
1341 listener->ExceptionThrown(thread, h_exception);
1342 }
1343 }
1344 // See b/65049545 for discussion about this behavior.
1345 thread->AssertNoPendingException();
1346 thread->SetException(h_exception.Get());
1347 }
1348 }
1349
ExceptionHandledEvent(Thread * thread,ObjPtr<mirror::Throwable> exception_object) const1350 void Instrumentation::ExceptionHandledEvent(Thread* thread,
1351 ObjPtr<mirror::Throwable> exception_object) const {
1352 Thread* self = Thread::Current();
1353 StackHandleScope<1> hs(self);
1354 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1355 if (HasExceptionHandledListeners()) {
1356 // We should have cleared the exception so that callers can detect a new one.
1357 DCHECK(thread->GetException() == nullptr);
1358 for (InstrumentationListener* listener : exception_handled_listeners_) {
1359 if (listener != nullptr) {
1360 listener->ExceptionHandled(thread, h_exception);
1361 }
1362 }
1363 }
1364 }
1365
PushInstrumentationStackFrame(Thread * self,ObjPtr<mirror::Object> this_object,ArtMethod * method,uintptr_t stack_ptr,uintptr_t lr,bool interpreter_entry)1366 void Instrumentation::PushInstrumentationStackFrame(Thread* self,
1367 ObjPtr<mirror::Object> this_object,
1368 ArtMethod* method,
1369 uintptr_t stack_ptr,
1370 uintptr_t lr,
1371 bool interpreter_entry) {
1372 DCHECK(!self->IsExceptionPending());
1373 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
1374 self->GetInstrumentationStack();
1375 if (kVerboseInstrumentation) {
1376 LOG(INFO) << "Entering " << ArtMethod::PrettyMethod(method) << " from PC "
1377 << reinterpret_cast<void*>(lr);
1378 }
1379
1380 // We send the enter event before pushing the instrumentation frame to make cleanup easier. If the
1381 // event causes an exception we can simply send the unwind event and return.
1382 StackHandleScope<1> hs(self);
1383 Handle<mirror::Object> h_this(hs.NewHandle(this_object));
1384 if (!interpreter_entry) {
1385 MethodEnterEvent(self, h_this.Get(), method, 0);
1386 if (self->IsExceptionPending()) {
1387 MethodUnwindEvent(self, h_this.Get(), method, 0);
1388 return;
1389 }
1390 }
1391
1392 // We have a callee-save frame meaning this value is guaranteed to never be 0.
1393 DCHECK(!self->IsExceptionPending());
1394 size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk);
1395
1396 instrumentation::InstrumentationStackFrame instrumentation_frame(
1397 h_this.Get(), method, lr, frame_id, interpreter_entry, current_force_deopt_id_);
1398 stack->insert({stack_ptr, instrumentation_frame});
1399 }
1400
GetDeoptimizationMethodType(ArtMethod * method)1401 DeoptimizationMethodType Instrumentation::GetDeoptimizationMethodType(ArtMethod* method) {
1402 if (method->IsRuntimeMethod()) {
1403 // Certain methods have strict requirement on whether the dex instruction
1404 // should be re-executed upon deoptimization.
1405 if (method == Runtime::Current()->GetCalleeSaveMethod(
1406 CalleeSaveType::kSaveEverythingForClinit)) {
1407 return DeoptimizationMethodType::kKeepDexPc;
1408 }
1409 if (method == Runtime::Current()->GetCalleeSaveMethod(
1410 CalleeSaveType::kSaveEverythingForSuspendCheck)) {
1411 return DeoptimizationMethodType::kKeepDexPc;
1412 }
1413 }
1414 return DeoptimizationMethodType::kDefault;
1415 }
1416
1417 // Try to get the shorty of a runtime method if it's an invocation stub.
GetRuntimeMethodShorty(Thread * thread)1418 static char GetRuntimeMethodShorty(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_) {
1419 char shorty = 'V';
1420 StackVisitor::WalkStack(
1421 [&shorty](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1422 ArtMethod* m = stack_visitor->GetMethod();
1423 if (m == nullptr || m->IsRuntimeMethod()) {
1424 return true;
1425 }
1426 // The first Java method.
1427 if (m->IsNative()) {
1428 // Use JNI method's shorty for the jni stub.
1429 shorty = m->GetShorty()[0];
1430 } else if (m->IsProxyMethod()) {
1431 // Proxy method just invokes its proxied method via
1432 // art_quick_proxy_invoke_handler.
1433 shorty = m->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty()[0];
1434 } else {
1435 const Instruction& instr = m->DexInstructions().InstructionAt(stack_visitor->GetDexPc());
1436 if (instr.IsInvoke()) {
1437 uint16_t method_index = static_cast<uint16_t>(instr.VRegB());
1438 const DexFile* dex_file = m->GetDexFile();
1439 if (interpreter::IsStringInit(dex_file, method_index)) {
1440 // Invoking string init constructor is turned into invoking
1441 // StringFactory.newStringFromChars() which returns a string.
1442 shorty = 'L';
1443 } else {
1444 shorty = dex_file->GetMethodShorty(method_index)[0];
1445 }
1446
1447 } else {
1448 // It could be that a non-invoke opcode invokes a stub, which in turn
1449 // invokes Java code. In such cases, we should never expect a return
1450 // value from the stub.
1451 }
1452 }
1453 // Stop stack walking since we've seen a Java frame.
1454 return false;
1455 },
1456 thread,
1457 /* context= */ nullptr,
1458 art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
1459 return shorty;
1460 }
1461
PopInstrumentationStackFrame(Thread * self,uintptr_t * return_pc_addr,uint64_t * gpr_result,uint64_t * fpr_result)1462 TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self,
1463 uintptr_t* return_pc_addr,
1464 uint64_t* gpr_result,
1465 uint64_t* fpr_result) {
1466 DCHECK(gpr_result != nullptr);
1467 DCHECK(fpr_result != nullptr);
1468 // Do the pop.
1469 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
1470 self->GetInstrumentationStack();
1471 CHECK_GT(stack->size(), 0U);
1472 auto it = stack->find(reinterpret_cast<uintptr_t>(return_pc_addr));
1473 CHECK(it != stack->end());
1474 InstrumentationStackFrame instrumentation_frame = it->second;
1475 stack->erase(it);
1476
1477 // Set return PC and check the consistency of the stack.
1478 // We don't cache the return pc value in a local as it may change after
1479 // sending a method exit event.
1480 *return_pc_addr = instrumentation_frame.return_pc_;
1481 self->VerifyStack();
1482
1483 ArtMethod* method = instrumentation_frame.method_;
1484 uint32_t length;
1485 const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1486 char return_shorty;
1487
1488 // Runtime method does not call into MethodExitEvent() so there should not be
1489 // suspension point below.
1490 ScopedAssertNoThreadSuspension ants(__FUNCTION__, method->IsRuntimeMethod());
1491 if (method->IsRuntimeMethod()) {
1492 if (method != Runtime::Current()->GetCalleeSaveMethod(
1493 CalleeSaveType::kSaveEverythingForClinit)) {
1494 // If the caller is at an invocation point and the runtime method is not
1495 // for clinit, we need to pass return results to the caller.
1496 // We need the correct shorty to decide whether we need to pass the return
1497 // result for deoptimization below.
1498 return_shorty = GetRuntimeMethodShorty(self);
1499 } else {
1500 // Some runtime methods such as allocations, unresolved field getters, etc.
1501 // have return value. We don't need to set return_value since MethodExitEvent()
1502 // below isn't called for runtime methods. Deoptimization doesn't need the
1503 // value either since the dex instruction will be re-executed by the
1504 // interpreter, except these two cases:
1505 // (1) For an invoke, which is handled above to get the correct shorty.
1506 // (2) For MONITOR_ENTER/EXIT, which cannot be re-executed since it's not
1507 // idempotent. However there is no return value for it anyway.
1508 return_shorty = 'V';
1509 }
1510 } else {
1511 return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
1512 }
1513
1514 bool is_ref = return_shorty == '[' || return_shorty == 'L';
1515 StackHandleScope<1> hs(self);
1516 MutableHandle<mirror::Object> res(hs.NewHandle<mirror::Object>(nullptr));
1517 JValue return_value;
1518 if (return_shorty == 'V') {
1519 return_value.SetJ(0);
1520 } else if (return_shorty == 'F' || return_shorty == 'D') {
1521 return_value.SetJ(*fpr_result);
1522 } else {
1523 return_value.SetJ(*gpr_result);
1524 }
1525 if (is_ref) {
1526 // Take a handle to the return value so we won't lose it if we suspend.
1527 res.Assign(return_value.GetL());
1528 }
1529 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1530 // return_pc.
1531 uint32_t dex_pc = dex::kDexNoIndex;
1532 if (!method->IsRuntimeMethod() && !instrumentation_frame.interpreter_entry_) {
1533 ObjPtr<mirror::Object> this_object = instrumentation_frame.this_object_;
1534 // Note that sending the event may change the contents of *return_pc_addr.
1535 MethodExitEvent(
1536 self, this_object, instrumentation_frame.method_, dex_pc, OptionalFrame{}, return_value);
1537 }
1538
1539 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1540 // back to an upcall.
1541 NthCallerVisitor visitor(self, 1, true);
1542 visitor.WalkStack(true);
1543 bool deoptimize = (visitor.caller != nullptr) &&
1544 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller) ||
1545 self->IsForceInterpreter() ||
1546 // NB Since structurally obsolete compiled methods might have the offsets of
1547 // methods/fields compiled in we need to go back to interpreter whenever we hit
1548 // them.
1549 visitor.caller->GetDeclaringClass()->IsObsoleteObject() ||
1550 // Check if we forced all threads to deoptimize in the time between this frame
1551 // being created and now.
1552 instrumentation_frame.force_deopt_id_ != current_force_deopt_id_ ||
1553 Dbg::IsForcedInterpreterNeededForUpcall(self, visitor.caller));
1554 if (is_ref) {
1555 // Restore the return value if it's a reference since it might have moved.
1556 *reinterpret_cast<mirror::Object**>(gpr_result) = res.Get();
1557 }
1558 if (deoptimize && Runtime::Current()->IsAsyncDeoptimizeable(*return_pc_addr)) {
1559 if (kVerboseInstrumentation) {
1560 LOG(INFO) << "Deoptimizing "
1561 << visitor.caller->PrettyMethod()
1562 << " by returning from "
1563 << method->PrettyMethod()
1564 << " with result "
1565 << std::hex << return_value.GetJ() << std::dec
1566 << " in "
1567 << *self;
1568 }
1569 DeoptimizationMethodType deopt_method_type = GetDeoptimizationMethodType(method);
1570 self->PushDeoptimizationContext(return_value,
1571 return_shorty == 'L' || return_shorty == '[',
1572 /* exception= */ nullptr ,
1573 /* from_code= */ false,
1574 deopt_method_type);
1575 return GetTwoWordSuccessValue(*return_pc_addr,
1576 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
1577 } else {
1578 if (deoptimize && !Runtime::Current()->IsAsyncDeoptimizeable(*return_pc_addr)) {
1579 VLOG(deopt) << "Got a deoptimization request on un-deoptimizable " << method->PrettyMethod()
1580 << " at PC " << reinterpret_cast<void*>(*return_pc_addr);
1581 }
1582 if (kVerboseInstrumentation) {
1583 LOG(INFO) << "Returning from " << method->PrettyMethod()
1584 << " to PC " << reinterpret_cast<void*>(*return_pc_addr);
1585 }
1586 return GetTwoWordSuccessValue(0, *return_pc_addr);
1587 }
1588 }
1589
PopFramesForDeoptimization(Thread * self,uintptr_t pop_until) const1590 uintptr_t Instrumentation::PopFramesForDeoptimization(Thread* self, uintptr_t pop_until) const {
1591 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
1592 self->GetInstrumentationStack();
1593 // Pop all instrumentation frames below `pop_until`.
1594 uintptr_t return_pc = 0u;
1595 for (auto i = stack->begin(); i != stack->end() && i->first <= pop_until;) {
1596 auto e = i;
1597 ++i;
1598 if (kVerboseInstrumentation) {
1599 LOG(INFO) << "Popping for deoptimization " << e->second.method_->PrettyMethod();
1600 }
1601 return_pc = e->second.return_pc_;
1602 stack->erase(e);
1603 }
1604 return return_pc;
1605 }
1606
Dump() const1607 std::string InstrumentationStackFrame::Dump() const {
1608 std::ostringstream os;
1609 os << "Frame " << frame_id_ << " " << ArtMethod::PrettyMethod(method_) << ":"
1610 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_)
1611 << " force_deopt_id=" << force_deopt_id_;
1612 return os.str();
1613 }
1614
1615 } // namespace instrumentation
1616 } // namespace art
1617