1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "instrumentation.h"
18
19 #include <functional>
20 #include <optional>
21 #include <sstream>
22
23 #include <android-base/logging.h>
24
25 #include "arch/context.h"
26 #include "art_field-inl.h"
27 #include "art_method-inl.h"
28 #include "base/atomic.h"
29 #include "base/callee_save_type.h"
30 #include "class_linker.h"
31 #include "debugger.h"
32 #include "dex/dex_file-inl.h"
33 #include "dex/dex_file_types.h"
34 #include "dex/dex_instruction-inl.h"
35 #include "entrypoints/quick/quick_alloc_entrypoints.h"
36 #include "entrypoints/quick/quick_entrypoints.h"
37 #include "entrypoints/runtime_asm_entrypoints.h"
38 #include "gc_root-inl.h"
39 #include "interpreter/interpreter.h"
40 #include "interpreter/interpreter_common.h"
41 #include "jit/jit.h"
42 #include "jit/jit_code_cache.h"
43 #include "jvalue-inl.h"
44 #include "jvalue.h"
45 #include "mirror/class-inl.h"
46 #include "mirror/dex_cache.h"
47 #include "mirror/object-inl.h"
48 #include "mirror/object_array-inl.h"
49 #include "nterp_helpers.h"
50 #include "nth_caller_visitor.h"
51 #include "oat_file_manager.h"
52 #include "oat_quick_method_header.h"
53 #include "runtime-inl.h"
54 #include "thread.h"
55 #include "thread_list.h"
56
57 namespace art {
58 extern "C" NO_RETURN void artDeoptimize(Thread* self, bool skip_method_exit_callbacks);
59 extern "C" NO_RETURN void artDeliverPendingExceptionFromCode(Thread* self);
60
61 namespace instrumentation {
62
63 constexpr bool kVerboseInstrumentation = false;
64
MethodExited(Thread * thread,ArtMethod * method,OptionalFrame frame,MutableHandle<mirror::Object> & return_value)65 void InstrumentationListener::MethodExited(
66 Thread* thread,
67 ArtMethod* method,
68 OptionalFrame frame,
69 MutableHandle<mirror::Object>& return_value) {
70 DCHECK_EQ(method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive(),
71 Primitive::kPrimNot);
72 const void* original_ret = return_value.Get();
73 JValue v;
74 v.SetL(return_value.Get());
75 MethodExited(thread, method, frame, v);
76 DCHECK(original_ret == v.GetL()) << "Return value changed";
77 }
78
FieldWritten(Thread * thread,Handle<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,Handle<mirror::Object> field_value)79 void InstrumentationListener::FieldWritten(Thread* thread,
80 Handle<mirror::Object> this_object,
81 ArtMethod* method,
82 uint32_t dex_pc,
83 ArtField* field,
84 Handle<mirror::Object> field_value) {
85 DCHECK(!field->IsPrimitiveType());
86 JValue v;
87 v.SetL(field_value.Get());
88 FieldWritten(thread, this_object, method, dex_pc, field, v);
89 }
90
91 // Instrumentation works on non-inlined frames by updating returned PCs
92 // of compiled frames.
93 static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
94 StackVisitor::StackWalkKind::kSkipInlinedFrames;
95
96 class InstallStubsClassVisitor : public ClassVisitor {
97 public:
InstallStubsClassVisitor(Instrumentation * instrumentation)98 explicit InstallStubsClassVisitor(Instrumentation* instrumentation)
99 : instrumentation_(instrumentation) {}
100
operator ()(ObjPtr<mirror::Class> klass)101 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES(Locks::mutator_lock_) {
102 instrumentation_->InstallStubsForClass(klass.Ptr());
103 return true; // we visit all classes.
104 }
105
106 private:
107 Instrumentation* const instrumentation_;
108 };
109
Instrumentation()110 Instrumentation::Instrumentation()
111 : run_exit_hooks_(false),
112 instrumentation_level_(InstrumentationLevel::kInstrumentNothing),
113 forced_interpret_only_(false),
114 have_method_entry_listeners_(false),
115 have_method_exit_listeners_(false),
116 have_method_unwind_listeners_(false),
117 have_dex_pc_listeners_(false),
118 have_field_read_listeners_(false),
119 have_field_write_listeners_(false),
120 have_exception_thrown_listeners_(false),
121 have_watched_frame_pop_listeners_(false),
122 have_branch_listeners_(false),
123 have_exception_handled_listeners_(false),
124 quick_alloc_entry_points_instrumentation_counter_(0),
125 alloc_entrypoints_instrumented_(false) {}
126
ProcessMethodUnwindCallbacks(Thread * self,std::queue<ArtMethod * > & methods,MutableHandle<mirror::Throwable> & exception)127 bool Instrumentation::ProcessMethodUnwindCallbacks(Thread* self,
128 std::queue<ArtMethod*>& methods,
129 MutableHandle<mirror::Throwable>& exception) {
130 DCHECK(!self->IsExceptionPending());
131 if (!HasMethodUnwindListeners()) {
132 return true;
133 }
134 if (kVerboseInstrumentation) {
135 LOG(INFO) << "Popping frames for exception " << exception->Dump();
136 }
137 // The instrumentation events expect the exception to be set.
138 self->SetException(exception.Get());
139 bool new_exception_thrown = false;
140
141 // Process callbacks for all methods that would be unwound until a new exception is thrown.
142 while (!methods.empty()) {
143 ArtMethod* method = methods.front();
144 methods.pop();
145 if (kVerboseInstrumentation) {
146 LOG(INFO) << "Popping for unwind " << method->PrettyMethod();
147 }
148
149 if (method->IsRuntimeMethod()) {
150 continue;
151 }
152
153 // Notify listeners of method unwind.
154 // TODO: improve the dex_pc information here.
155 uint32_t dex_pc = dex::kDexNoIndex;
156 MethodUnwindEvent(self, method, dex_pc);
157 new_exception_thrown = self->GetException() != exception.Get();
158 if (new_exception_thrown) {
159 break;
160 }
161 }
162
163 exception.Assign(self->GetException());
164 self->ClearException();
165 if (kVerboseInstrumentation && new_exception_thrown) {
166 LOG(INFO) << "Did partial pop of frames due to new exception";
167 }
168 return !new_exception_thrown;
169 }
170
InstallStubsForClass(ObjPtr<mirror::Class> klass)171 void Instrumentation::InstallStubsForClass(ObjPtr<mirror::Class> klass) {
172 if (!klass->IsResolved()) {
173 // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
174 // could not be initialized or linked with regards to class inheritance.
175 } else if (klass->IsErroneousResolved()) {
176 // We can't execute code in a erroneous class: do nothing.
177 } else {
178 for (ArtMethod& method : klass->GetMethods(kRuntimePointerSize)) {
179 InstallStubsForMethod(&method);
180 }
181 }
182 }
183
CanHandleInitializationCheck(const void * code)184 static bool CanHandleInitializationCheck(const void* code) {
185 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
186 return class_linker->IsQuickResolutionStub(code) ||
187 class_linker->IsQuickToInterpreterBridge(code) ||
188 class_linker->IsQuickGenericJniStub(code) ||
189 (code == interpreter::GetNterpWithClinitEntryPoint());
190 }
191
IsProxyInit(ArtMethod * method)192 static bool IsProxyInit(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
193 // Annoyingly this can be called before we have actually initialized WellKnownClasses so therefore
194 // we also need to check this based on the declaring-class descriptor. The check is valid because
195 // Proxy only has a single constructor.
196 ArtMethod* well_known_proxy_init = WellKnownClasses::java_lang_reflect_Proxy_init;
197 if (well_known_proxy_init == method) {
198 return true;
199 }
200
201 if (well_known_proxy_init != nullptr) {
202 return false;
203 }
204
205 return method->IsConstructor() && !method->IsStatic() &&
206 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;");
207 }
208
209 // Returns true if we need entry exit stub to call entry hooks. JITed code
210 // directly call entry / exit hooks and don't need the stub.
CodeSupportsEntryExitHooks(const void * entry_point,ArtMethod * method)211 static bool CodeSupportsEntryExitHooks(const void* entry_point, ArtMethod* method)
212 REQUIRES_SHARED(Locks::mutator_lock_) {
213 // Proxy.init should always run with the switch interpreter where entry / exit hooks are
214 // supported.
215 if (IsProxyInit(method)) {
216 return true;
217 }
218
219 // In some tests runtime isn't setup fully and hence the entry points could be nullptr.
220 // just be conservative and return false here.
221 if (entry_point == nullptr) {
222 return false;
223 }
224
225 ClassLinker* linker = Runtime::Current()->GetClassLinker();
226 // Interpreter supports entry / exit hooks. Resolution stubs fetch code that supports entry / exit
227 // hooks when required. So return true for both cases.
228 if (linker->IsQuickToInterpreterBridge(entry_point) ||
229 linker->IsQuickResolutionStub(entry_point)) {
230 return true;
231 }
232
233 // When jiting code for debuggable runtimes / instrumentation is active we generate the code to
234 // call method entry / exit hooks when required.
235 jit::Jit* jit = Runtime::Current()->GetJit();
236 if (jit != nullptr && jit->GetCodeCache()->ContainsPc(entry_point)) {
237 // If JITed code was compiled with instrumentation support we support entry / exit hooks.
238 OatQuickMethodHeader* header = OatQuickMethodHeader::FromEntryPoint(entry_point);
239 return CodeInfo::IsDebuggable(header->GetOptimizedCodeInfoPtr());
240 }
241
242 // GenericJni trampoline can handle entry / exit hooks.
243 if (linker->IsQuickGenericJniStub(entry_point)) {
244 return true;
245 }
246
247 // The remaining cases are nterp / oat code / JIT code that isn't compiled with instrumentation
248 // support.
249 return false;
250 }
251
UpdateEntryPoints(ArtMethod * method,const void * quick_code)252 static void UpdateEntryPoints(ArtMethod* method, const void* quick_code)
253 REQUIRES_SHARED(Locks::mutator_lock_) {
254 if (kIsDebugBuild) {
255 if (method->StillNeedsClinitCheckMayBeDead()) {
256 CHECK(CanHandleInitializationCheck(quick_code));
257 }
258 jit::Jit* jit = Runtime::Current()->GetJit();
259 if (jit != nullptr && jit->GetCodeCache()->ContainsPc(quick_code)) {
260 // Ensure we always have the thumb entrypoint for JIT on arm32.
261 if (kRuntimeISA == InstructionSet::kArm) {
262 CHECK_EQ(reinterpret_cast<uintptr_t>(quick_code) & 1, 1u);
263 }
264 }
265 const Instrumentation* instr = Runtime::Current()->GetInstrumentation();
266 if (instr->EntryExitStubsInstalled()) {
267 DCHECK(CodeSupportsEntryExitHooks(quick_code, method));
268 }
269 }
270 // If the method is from a boot image, don't dirty it if the entrypoint
271 // doesn't change.
272 if (method->GetEntryPointFromQuickCompiledCode() != quick_code) {
273 method->SetEntryPointFromQuickCompiledCode(quick_code);
274 }
275 }
276
NeedsDexPcEvents(ArtMethod * method,Thread * thread)277 bool Instrumentation::NeedsDexPcEvents(ArtMethod* method, Thread* thread) {
278 return (InterpretOnly(method) || thread->IsForceInterpreter()) && HasDexPcListeners();
279 }
280
InterpretOnly(ArtMethod * method)281 bool Instrumentation::InterpretOnly(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
282 if (method->IsNative()) {
283 return false;
284 }
285 return InterpretOnly() || IsDeoptimized(method);
286 }
287
CanUseAotCode(const void * quick_code)288 static bool CanUseAotCode(const void* quick_code)
289 REQUIRES_SHARED(Locks::mutator_lock_) {
290 if (quick_code == nullptr) {
291 return false;
292 }
293 Runtime* runtime = Runtime::Current();
294 // For simplicity, we never use AOT code for debuggable.
295 if (runtime->IsJavaDebuggable()) {
296 return false;
297 }
298
299 if (runtime->IsNativeDebuggable()) {
300 DCHECK(runtime->UseJitCompilation() && runtime->GetJit()->JitAtFirstUse());
301 // If we are doing native debugging, ignore application's AOT code,
302 // since we want to JIT it (at first use) with extra stackmaps for native
303 // debugging. We keep however all AOT code from the boot image,
304 // since the JIT-at-first-use is blocking and would result in non-negligible
305 // startup performance impact.
306 return runtime->GetHeap()->IsInBootImageOatFile(quick_code);
307 }
308
309 return true;
310 }
311
CanUseNterp(ArtMethod * method)312 static bool CanUseNterp(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
313 return interpreter::CanRuntimeUseNterp() &&
314 CanMethodUseNterp(method) &&
315 method->IsDeclaringClassVerifiedMayBeDead();
316 }
317
GetOptimizedCodeFor(ArtMethod * method)318 static const void* GetOptimizedCodeFor(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
319 DCHECK(!Runtime::Current()->GetInstrumentation()->InterpretOnly(method));
320 CHECK(method->IsInvokable()) << method->PrettyMethod();
321 if (method->IsProxyMethod()) {
322 return GetQuickProxyInvokeHandler();
323 }
324
325 // In debuggable mode, we can only use AOT code for native methods.
326 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
327 const void* aot_code = method->GetOatMethodQuickCode(class_linker->GetImagePointerSize());
328 if (CanUseAotCode(aot_code)) {
329 return aot_code;
330 }
331
332 // If the method has been precompiled, there can be a JIT version.
333 jit::Jit* jit = Runtime::Current()->GetJit();
334 if (jit != nullptr) {
335 const void* code = jit->GetCodeCache()->GetSavedEntryPointOfPreCompiledMethod(method);
336 if (code != nullptr) {
337 return code;
338 }
339 }
340
341 // We need to check if the class has been verified for setting up nterp, as
342 // the verifier could punt the method to the switch interpreter in case we
343 // need to do lock counting.
344 if (CanUseNterp(method)) {
345 return interpreter::GetNterpEntryPoint();
346 }
347
348 return method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge();
349 }
350
InitializeMethodsCode(ArtMethod * method,const void * aot_code)351 void Instrumentation::InitializeMethodsCode(ArtMethod* method, const void* aot_code)
352 REQUIRES_SHARED(Locks::mutator_lock_) {
353 if (!method->IsInvokable()) {
354 DCHECK(method->GetEntryPointFromQuickCompiledCode() == nullptr ||
355 Runtime::Current()->GetClassLinker()->IsQuickToInterpreterBridge(
356 method->GetEntryPointFromQuickCompiledCode()));
357 UpdateEntryPoints(method, GetQuickToInterpreterBridge());
358 return;
359 }
360
361 // Use instrumentation entrypoints if instrumentation is installed.
362 if (UNLIKELY(EntryExitStubsInstalled() || IsForcedInterpretOnly() || IsDeoptimized(method))) {
363 UpdateEntryPoints(
364 method, method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
365 return;
366 }
367
368 // Special case if we need an initialization check.
369 // The method and its declaring class may be dead when starting JIT GC during managed heap GC.
370 if (method->StillNeedsClinitCheckMayBeDead()) {
371 // If we have code but the method needs a class initialization check before calling
372 // that code, install the resolution stub that will perform the check.
373 // It will be replaced by the proper entry point by ClassLinker::FixupStaticTrampolines
374 // after initializing class (see ClassLinker::InitializeClass method).
375 // Note: this mimics the logic in image_writer.cc that installs the resolution
376 // stub only if we have compiled code or we can execute nterp, and the method needs a class
377 // initialization check.
378 if (aot_code != nullptr || method->IsNative() || CanUseNterp(method)) {
379 if (kIsDebugBuild && CanUseNterp(method)) {
380 // Adds some test coverage for the nterp clinit entrypoint.
381 UpdateEntryPoints(method, interpreter::GetNterpWithClinitEntryPoint());
382 } else {
383 UpdateEntryPoints(method, GetQuickResolutionStub());
384 }
385 } else {
386 UpdateEntryPoints(method, GetQuickToInterpreterBridge());
387 }
388 return;
389 }
390
391 // Use the provided AOT code if possible.
392 if (CanUseAotCode(aot_code)) {
393 UpdateEntryPoints(method, aot_code);
394 return;
395 }
396
397 // We check if the class is verified as we need the slow interpreter for lock verification.
398 // If the class is not verified, This will be updated in
399 // ClassLinker::UpdateClassAfterVerification.
400 if (CanUseNterp(method)) {
401 UpdateEntryPoints(method, interpreter::GetNterpEntryPoint());
402 return;
403 }
404
405 // Use default entrypoints.
406 UpdateEntryPoints(
407 method, method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
408 }
409
InstallStubsForMethod(ArtMethod * method)410 void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
411 if (!method->IsInvokable() || method->IsProxyMethod()) {
412 // Do not change stubs for these methods.
413 return;
414 }
415 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
416 // TODO We should remove the need for this since it means we cannot always correctly detect calls
417 // to Proxy.<init>
418 if (IsProxyInit(method)) {
419 return;
420 }
421
422 // If the instrumentation needs to go through the interpreter, just update the
423 // entrypoint to interpreter.
424 if (InterpretOnly(method)) {
425 UpdateEntryPoints(method, GetQuickToInterpreterBridge());
426 return;
427 }
428
429 if (EntryExitStubsInstalled()) {
430 // Install interpreter bridge / GenericJni stub if the existing code doesn't support
431 // entry / exit hooks.
432 if (!CodeSupportsEntryExitHooks(method->GetEntryPointFromQuickCompiledCode(), method)) {
433 UpdateEntryPoints(
434 method, method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
435 }
436 return;
437 }
438
439 // We're being asked to restore the entrypoints after instrumentation.
440 CHECK_EQ(instrumentation_level_, InstrumentationLevel::kInstrumentNothing);
441 // We need to have the resolution stub still if the class is not initialized.
442 if (method->StillNeedsClinitCheck()) {
443 UpdateEntryPoints(method, GetQuickResolutionStub());
444 return;
445 }
446 UpdateEntryPoints(method, GetOptimizedCodeFor(method));
447 }
448
UpdateEntrypointsForDebuggable()449 void Instrumentation::UpdateEntrypointsForDebuggable() {
450 Runtime* runtime = Runtime::Current();
451 // If we are transitioning from non-debuggable to debuggable, we patch
452 // entry points of methods to remove any aot / JITed entry points.
453 InstallStubsClassVisitor visitor(this);
454 runtime->GetClassLinker()->VisitClasses(&visitor);
455 }
456
MethodSupportsExitEvents(ArtMethod * method,const OatQuickMethodHeader * header)457 bool Instrumentation::MethodSupportsExitEvents(ArtMethod* method,
458 const OatQuickMethodHeader* header) {
459 if (header == nullptr) {
460 // Header can be a nullptr for runtime / proxy methods that doesn't support method exit hooks
461 // or for native methods that use generic jni stubs. Generic jni stubs support method exit
462 // hooks.
463 return method->IsNative();
464 }
465
466 if (header->IsNterpMethodHeader()) {
467 // Nterp doesn't support method exit events
468 return false;
469 }
470
471 DCHECK(header->IsOptimized());
472 if (CodeInfo::IsDebuggable(header->GetOptimizedCodeInfoPtr())) {
473 // For optimized code, we only support method entry / exit hooks if they are compiled as
474 // debuggable.
475 return true;
476 }
477
478 return false;
479 }
480
481 // Updates on stack frames to support any changes related to instrumentation.
482 // For JITed frames, DeoptimizeFlag is updated to enable deoptimization of
483 // methods when necessary. Shadow frames are updated if dex pc event
484 // notification has changed. When force_deopt is true then DeoptimizationFlag is
485 // updated to force a deoptimization.
InstrumentationInstallStack(Thread * thread,bool deopt_all_frames)486 void InstrumentationInstallStack(Thread* thread, bool deopt_all_frames)
487 REQUIRES(Locks::mutator_lock_) {
488 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
489 struct InstallStackVisitor final : public StackVisitor {
490 InstallStackVisitor(Thread* thread_in,
491 Context* context,
492 bool deopt_all_frames)
493 : StackVisitor(thread_in, context, kInstrumentationStackWalk),
494 deopt_all_frames_(deopt_all_frames),
495 runtime_methods_need_deopt_check_(false) {}
496
497 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
498 ArtMethod* m = GetMethod();
499 if (m == nullptr || m->IsRuntimeMethod()) {
500 if (kVerboseInstrumentation) {
501 LOG(INFO) << " Skipping upcall / runtime method. Frame " << GetFrameId();
502 }
503 return true; // Ignore upcalls and runtime methods.
504 }
505
506 bool is_shadow_frame = GetCurrentQuickFrame() == nullptr;
507 if (kVerboseInstrumentation) {
508 LOG(INFO) << "Processing frame: method: " << m->PrettyMethod()
509 << " is_shadow_frame: " << is_shadow_frame;
510 }
511
512 // Handle interpreter frame.
513 if (is_shadow_frame) {
514 // Since we are updating the instrumentation related information we have to recalculate
515 // NeedsDexPcEvents. For example, when a new method or thread is deoptimized / interpreter
516 // stubs are installed the NeedsDexPcEvents could change for the shadow frames on the stack.
517 // If we don't update it here we would miss reporting dex pc events which is incorrect.
518 ShadowFrame* shadow_frame = GetCurrentShadowFrame();
519 DCHECK(shadow_frame != nullptr);
520 shadow_frame->SetNotifyDexPcMoveEvents(
521 Runtime::Current()->GetInstrumentation()->NeedsDexPcEvents(GetMethod(), GetThread()));
522 return true; // Continue.
523 }
524
525 DCHECK(!m->IsRuntimeMethod());
526 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
527 // If it is a JITed frame then just set the deopt bit if required otherwise continue.
528 // We need kForceDeoptForRedefinition to ensure we don't use any JITed code after a
529 // redefinition. We support redefinition only if the runtime has started off as a
530 // debuggable runtime which makes sure we don't use any AOT or Nterp code.
531 // The CheckCallerForDeopt is an optimization which we only do for non-native JITed code for
532 // now. We can extend it to native methods but that needs reserving an additional stack slot.
533 // We don't do it currently since that wasn't important for debugger performance.
534 if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
535 if (deopt_all_frames_) {
536 runtime_methods_need_deopt_check_ = true;
537 SetShouldDeoptimizeFlag(DeoptimizeFlagValue::kForceDeoptForRedefinition);
538 }
539 SetShouldDeoptimizeFlag(DeoptimizeFlagValue::kCheckCallerForDeopt);
540 }
541
542 return true; // Continue.
543 }
544 bool deopt_all_frames_;
545 bool runtime_methods_need_deopt_check_;
546 };
547 if (kVerboseInstrumentation) {
548 std::string thread_name;
549 thread->GetThreadName(thread_name);
550 LOG(INFO) << "Installing exit stubs in " << thread_name;
551 }
552
553 std::unique_ptr<Context> context(Context::Create());
554 InstallStackVisitor visitor(thread,
555 context.get(),
556 deopt_all_frames);
557 visitor.WalkStack(true);
558
559 if (visitor.runtime_methods_need_deopt_check_) {
560 thread->SetDeoptCheckRequired(true);
561 }
562
563 thread->VerifyStack();
564 }
565
UpdateNeedsDexPcEventsOnStack(Thread * thread)566 void UpdateNeedsDexPcEventsOnStack(Thread* thread) REQUIRES(Locks::mutator_lock_) {
567 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
568
569 struct InstallStackVisitor final : public StackVisitor {
570 InstallStackVisitor(Thread* thread_in, Context* context)
571 : StackVisitor(thread_in, context, kInstrumentationStackWalk) {}
572
573 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
574 ShadowFrame* shadow_frame = GetCurrentShadowFrame();
575 if (shadow_frame != nullptr) {
576 shadow_frame->SetNotifyDexPcMoveEvents(
577 Runtime::Current()->GetInstrumentation()->NeedsDexPcEvents(GetMethod(), GetThread()));
578 }
579 return true;
580 }
581 };
582
583 std::unique_ptr<Context> context(Context::Create());
584 InstallStackVisitor visitor(thread, context.get());
585 visitor.WalkStack(true);
586 }
587
ReportMethodEntryForOnStackMethods(InstrumentationListener * listener,Thread * thread)588 void ReportMethodEntryForOnStackMethods(InstrumentationListener* listener, Thread* thread)
589 REQUIRES(Locks::mutator_lock_) {
590 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
591
592 struct InstallStackVisitor final : public StackVisitor {
593 InstallStackVisitor(Thread* thread_in, Context* context)
594 : StackVisitor(thread_in, context, kInstrumentationStackWalk) {}
595
596 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
597 ArtMethod* m = GetMethod();
598 if (m == nullptr || m->IsRuntimeMethod()) {
599 // Skip upcall / runtime methods
600 return true;
601 }
602
603 if (GetCurrentShadowFrame() != nullptr) {
604 stack_methods_.push_back(m);
605 } else {
606 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
607 if (Runtime::Current()->GetInstrumentation()->MethodSupportsExitEvents(m, method_header)) {
608 // It is unexpected to see a method enter event but not a method exit event so record
609 // stack methods only for frames that support method exit events. Even if we deoptimize we
610 // make sure that we only call method exit event if the frame supported it in the first
611 // place. For ex: deoptimizing from JITed code with debug support calls a method exit hook
612 // but deoptimizing from nterp doesn't.
613 stack_methods_.push_back(m);
614 }
615 }
616 return true;
617 }
618
619 std::vector<ArtMethod*> stack_methods_;
620 };
621
622 if (kVerboseInstrumentation) {
623 std::string thread_name;
624 thread->GetThreadName(thread_name);
625 LOG(INFO) << "Updating DexPcMoveEvents on shadow frames on stack " << thread_name;
626 }
627
628 std::unique_ptr<Context> context(Context::Create());
629 InstallStackVisitor visitor(thread, context.get());
630 visitor.WalkStack(true);
631
632 // Create method enter events for all methods currently on the thread's stack.
633 for (auto smi = visitor.stack_methods_.rbegin(); smi != visitor.stack_methods_.rend(); smi++) {
634 listener->MethodEntered(thread, *smi);
635 }
636 }
637
InstrumentThreadStack(Thread * thread,bool force_deopt)638 void Instrumentation::InstrumentThreadStack(Thread* thread, bool force_deopt) {
639 run_exit_hooks_ = true;
640 InstrumentationInstallStack(thread, force_deopt);
641 }
642
InstrumentAllThreadStacks(bool force_deopt)643 void Instrumentation::InstrumentAllThreadStacks(bool force_deopt) {
644 run_exit_hooks_ = true;
645 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
646 for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
647 InstrumentThreadStack(thread, force_deopt);
648 }
649 }
650
InstrumentationRestoreStack(Thread * thread)651 static void InstrumentationRestoreStack(Thread* thread) REQUIRES(Locks::mutator_lock_) {
652 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
653
654 struct RestoreStackVisitor final : public StackVisitor {
655 RestoreStackVisitor(Thread* thread)
656 : StackVisitor(thread, nullptr, kInstrumentationStackWalk), thread_(thread) {}
657
658 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
659 if (GetCurrentQuickFrame() == nullptr) {
660 return true;
661 }
662
663 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
664 if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
665 // We shouldn't restore stack if any of the frames need a force deopt
666 DCHECK(!ShouldForceDeoptForRedefinition());
667 UnsetShouldDeoptimizeFlag(DeoptimizeFlagValue::kCheckCallerForDeopt);
668 }
669 return true; // Continue.
670 }
671 Thread* const thread_;
672 };
673
674 if (kVerboseInstrumentation) {
675 std::string thread_name;
676 thread->GetThreadName(thread_name);
677 LOG(INFO) << "Restoring stack for " << thread_name;
678 }
679 DCHECK(!thread->IsDeoptCheckRequired());
680 RestoreStackVisitor visitor(thread);
681 visitor.WalkStack(true);
682 }
683
HasFramesNeedingForceDeopt(Thread * thread)684 static bool HasFramesNeedingForceDeopt(Thread* thread) REQUIRES(Locks::mutator_lock_) {
685 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
686
687 struct CheckForForceDeoptStackVisitor final : public StackVisitor {
688 CheckForForceDeoptStackVisitor(Thread* thread)
689 : StackVisitor(thread, nullptr, kInstrumentationStackWalk),
690 thread_(thread),
691 force_deopt_check_needed_(false) {}
692
693 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
694 if (GetCurrentQuickFrame() == nullptr) {
695 return true;
696 }
697
698 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
699 if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
700 if (ShouldForceDeoptForRedefinition()) {
701 force_deopt_check_needed_ = true;
702 return false;
703 }
704 }
705 return true; // Continue.
706 }
707 Thread* const thread_;
708 bool force_deopt_check_needed_;
709 };
710
711 CheckForForceDeoptStackVisitor visitor(thread);
712 visitor.WalkStack(true);
713 // If there is a frame that requires a force deopt we should have set the IsDeoptCheckRequired
714 // bit. We don't check if the bit needs to be reset on every method exit / deoptimization. We
715 // only check when we no longer need instrumentation support. So it is possible that the bit is
716 // set but we don't find any frames that need a force deopt on the stack so reverse implication
717 // doesn't hold.
718 DCHECK_IMPLIES(visitor.force_deopt_check_needed_, thread->IsDeoptCheckRequired());
719 return visitor.force_deopt_check_needed_;
720 }
721
DeoptimizeAllThreadFrames()722 void Instrumentation::DeoptimizeAllThreadFrames() {
723 InstrumentAllThreadStacks(/* force_deopt= */ true);
724 }
725
HasEvent(Instrumentation::InstrumentationEvent expected,uint32_t events)726 static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
727 return (events & expected) != 0;
728 }
729
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)730 static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
731 uint32_t events,
732 std::list<InstrumentationListener*>& list,
733 InstrumentationListener* listener,
734 bool* has_listener)
735 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
736 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
737 if (!HasEvent(event, events)) {
738 return;
739 }
740 // If there is a free slot in the list, we insert the listener in that slot.
741 // Otherwise we add it to the end of the list.
742 auto it = std::find(list.begin(), list.end(), nullptr);
743 if (it != list.end()) {
744 *it = listener;
745 } else {
746 list.push_back(listener);
747 }
748 *has_listener = true;
749 }
750
AddListener(InstrumentationListener * listener,uint32_t events)751 void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
752 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
753 PotentiallyAddListenerTo(kMethodEntered,
754 events,
755 method_entry_listeners_,
756 listener,
757 &have_method_entry_listeners_);
758 PotentiallyAddListenerTo(kMethodExited,
759 events,
760 method_exit_listeners_,
761 listener,
762 &have_method_exit_listeners_);
763 PotentiallyAddListenerTo(kMethodUnwind,
764 events,
765 method_unwind_listeners_,
766 listener,
767 &have_method_unwind_listeners_);
768 PotentiallyAddListenerTo(kBranch,
769 events,
770 branch_listeners_,
771 listener,
772 &have_branch_listeners_);
773 PotentiallyAddListenerTo(kDexPcMoved,
774 events,
775 dex_pc_listeners_,
776 listener,
777 &have_dex_pc_listeners_);
778 PotentiallyAddListenerTo(kFieldRead,
779 events,
780 field_read_listeners_,
781 listener,
782 &have_field_read_listeners_);
783 PotentiallyAddListenerTo(kFieldWritten,
784 events,
785 field_write_listeners_,
786 listener,
787 &have_field_write_listeners_);
788 PotentiallyAddListenerTo(kExceptionThrown,
789 events,
790 exception_thrown_listeners_,
791 listener,
792 &have_exception_thrown_listeners_);
793 PotentiallyAddListenerTo(kWatchedFramePop,
794 events,
795 watched_frame_pop_listeners_,
796 listener,
797 &have_watched_frame_pop_listeners_);
798 PotentiallyAddListenerTo(kExceptionHandled,
799 events,
800 exception_handled_listeners_,
801 listener,
802 &have_exception_handled_listeners_);
803 if (HasEvent(kDexPcMoved, events)) {
804 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
805 for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
806 UpdateNeedsDexPcEventsOnStack(thread);
807 }
808 }
809 }
810
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)811 static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
812 uint32_t events,
813 std::list<InstrumentationListener*>& list,
814 InstrumentationListener* listener,
815 bool* has_listener)
816 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
817 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
818 if (!HasEvent(event, events)) {
819 return;
820 }
821 auto it = std::find(list.begin(), list.end(), listener);
822 if (it != list.end()) {
823 // Just update the entry, do not remove from the list. Removing entries in the list
824 // is unsafe when mutators are iterating over it.
825 *it = nullptr;
826 }
827
828 // Check if the list contains any non-null listener, and update 'has_listener'.
829 for (InstrumentationListener* l : list) {
830 if (l != nullptr) {
831 *has_listener = true;
832 return;
833 }
834 }
835 *has_listener = false;
836 }
837
RemoveListener(InstrumentationListener * listener,uint32_t events)838 void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
839 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
840 PotentiallyRemoveListenerFrom(kMethodEntered,
841 events,
842 method_entry_listeners_,
843 listener,
844 &have_method_entry_listeners_);
845 PotentiallyRemoveListenerFrom(kMethodExited,
846 events,
847 method_exit_listeners_,
848 listener,
849 &have_method_exit_listeners_);
850 PotentiallyRemoveListenerFrom(kMethodUnwind,
851 events,
852 method_unwind_listeners_,
853 listener,
854 &have_method_unwind_listeners_);
855 PotentiallyRemoveListenerFrom(kBranch,
856 events,
857 branch_listeners_,
858 listener,
859 &have_branch_listeners_);
860 PotentiallyRemoveListenerFrom(kDexPcMoved,
861 events,
862 dex_pc_listeners_,
863 listener,
864 &have_dex_pc_listeners_);
865 PotentiallyRemoveListenerFrom(kFieldRead,
866 events,
867 field_read_listeners_,
868 listener,
869 &have_field_read_listeners_);
870 PotentiallyRemoveListenerFrom(kFieldWritten,
871 events,
872 field_write_listeners_,
873 listener,
874 &have_field_write_listeners_);
875 PotentiallyRemoveListenerFrom(kExceptionThrown,
876 events,
877 exception_thrown_listeners_,
878 listener,
879 &have_exception_thrown_listeners_);
880 PotentiallyRemoveListenerFrom(kWatchedFramePop,
881 events,
882 watched_frame_pop_listeners_,
883 listener,
884 &have_watched_frame_pop_listeners_);
885 PotentiallyRemoveListenerFrom(kExceptionHandled,
886 events,
887 exception_handled_listeners_,
888 listener,
889 &have_exception_handled_listeners_);
890 if (HasEvent(kDexPcMoved, events)) {
891 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
892 for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
893 UpdateNeedsDexPcEventsOnStack(thread);
894 }
895 }
896 }
897
GetCurrentInstrumentationLevel() const898 Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
899 return instrumentation_level_;
900 }
901
RequiresInstrumentationInstallation(InstrumentationLevel new_level) const902 bool Instrumentation::RequiresInstrumentationInstallation(InstrumentationLevel new_level) const {
903 // We need to reinstall instrumentation if we go to a different level.
904 return GetCurrentInstrumentationLevel() != new_level;
905 }
906
ConfigureStubs(const char * key,InstrumentationLevel desired_level)907 void Instrumentation::ConfigureStubs(const char* key, InstrumentationLevel desired_level) {
908 // Store the instrumentation level for this key or remove it.
909 if (desired_level == InstrumentationLevel::kInstrumentNothing) {
910 // The client no longer needs instrumentation.
911 requested_instrumentation_levels_.erase(key);
912 } else {
913 // The client needs instrumentation.
914 requested_instrumentation_levels_.Overwrite(key, desired_level);
915 }
916
917 UpdateStubs();
918 }
919
UpdateInstrumentationLevel(InstrumentationLevel requested_level)920 void Instrumentation::UpdateInstrumentationLevel(InstrumentationLevel requested_level) {
921 instrumentation_level_ = requested_level;
922 }
923
EnableEntryExitHooks(const char * key)924 void Instrumentation::EnableEntryExitHooks(const char* key) {
925 DCHECK(Runtime::Current()->IsJavaDebuggable());
926 ConfigureStubs(key, InstrumentationLevel::kInstrumentWithEntryExitHooks);
927 }
928
MaybeRestoreInstrumentationStack()929 void Instrumentation::MaybeRestoreInstrumentationStack() {
930 // Restore stack only if there is no method currently deoptimized.
931 if (!IsDeoptimizedMethodsEmpty()) {
932 return;
933 }
934
935 Thread* self = Thread::Current();
936 MutexLock mu(self, *Locks::thread_list_lock_);
937 bool no_remaining_deopts = true;
938 // Check that there are no other forced deoptimizations. Do it here so we only need to lock
939 // thread_list_lock once.
940 // The compiler gets confused on the thread annotations, so use
941 // NO_THREAD_SAFETY_ANALYSIS. Note that we hold the mutator lock
942 // exclusively at this point.
943 Locks::mutator_lock_->AssertExclusiveHeld(self);
944 Runtime::Current()->GetThreadList()->ForEach([&](Thread* t) NO_THREAD_SAFETY_ANALYSIS {
945 bool has_force_deopt_frames = HasFramesNeedingForceDeopt(t);
946 if (!has_force_deopt_frames) {
947 // We no longer have any frames that require a force deopt check. If the bit was true then we
948 // had some frames earlier but they already got deoptimized and are no longer on stack.
949 t->SetDeoptCheckRequired(false);
950 }
951 no_remaining_deopts =
952 no_remaining_deopts &&
953 !t->IsForceInterpreter() &&
954 !t->HasDebuggerShadowFrames() &&
955 !has_force_deopt_frames;
956 });
957 if (no_remaining_deopts) {
958 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack);
959 run_exit_hooks_ = false;
960 }
961 }
962
UpdateStubs()963 void Instrumentation::UpdateStubs() {
964 // Look for the highest required instrumentation level.
965 InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
966 for (const auto& v : requested_instrumentation_levels_) {
967 requested_level = std::max(requested_level, v.second);
968 }
969
970 if (!RequiresInstrumentationInstallation(requested_level)) {
971 // We're already set.
972 return;
973 }
974 Thread* const self = Thread::Current();
975 Runtime* runtime = Runtime::Current();
976 Locks::mutator_lock_->AssertExclusiveHeld(self);
977 Locks::thread_list_lock_->AssertNotHeld(self);
978 UpdateInstrumentationLevel(requested_level);
979 InstallStubsClassVisitor visitor(this);
980 runtime->GetClassLinker()->VisitClasses(&visitor);
981 if (requested_level > InstrumentationLevel::kInstrumentNothing) {
982 InstrumentAllThreadStacks(/* force_deopt= */ false);
983 } else {
984 MaybeRestoreInstrumentationStack();
985 }
986 }
987
ResetQuickAllocEntryPointsForThread(Thread * thread,void * arg ATTRIBUTE_UNUSED)988 static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg ATTRIBUTE_UNUSED) {
989 thread->ResetQuickAllocEntryPointsForThread();
990 }
991
SetEntrypointsInstrumented(bool instrumented)992 void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
993 Thread* self = Thread::Current();
994 Runtime* runtime = Runtime::Current();
995 Locks::mutator_lock_->AssertNotHeld(self);
996 Locks::instrument_entrypoints_lock_->AssertHeld(self);
997 if (runtime->IsStarted()) {
998 ScopedSuspendAll ssa(__FUNCTION__);
999 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
1000 SetQuickAllocEntryPointsInstrumented(instrumented);
1001 ResetQuickAllocEntryPoints();
1002 alloc_entrypoints_instrumented_ = instrumented;
1003 } else {
1004 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
1005 SetQuickAllocEntryPointsInstrumented(instrumented);
1006
1007 // Note: ResetQuickAllocEntryPoints only works when the runtime is started. Manually run the
1008 // update for just this thread.
1009 // Note: self may be null. One of those paths is setting instrumentation in the Heap
1010 // constructor for gcstress mode.
1011 if (self != nullptr) {
1012 ResetQuickAllocEntryPointsForThread(self, nullptr);
1013 }
1014
1015 alloc_entrypoints_instrumented_ = instrumented;
1016 }
1017 }
1018
InstrumentQuickAllocEntryPoints()1019 void Instrumentation::InstrumentQuickAllocEntryPoints() {
1020 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
1021 InstrumentQuickAllocEntryPointsLocked();
1022 }
1023
UninstrumentQuickAllocEntryPoints()1024 void Instrumentation::UninstrumentQuickAllocEntryPoints() {
1025 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
1026 UninstrumentQuickAllocEntryPointsLocked();
1027 }
1028
InstrumentQuickAllocEntryPointsLocked()1029 void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
1030 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
1031 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
1032 SetEntrypointsInstrumented(true);
1033 }
1034 ++quick_alloc_entry_points_instrumentation_counter_;
1035 }
1036
UninstrumentQuickAllocEntryPointsLocked()1037 void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
1038 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
1039 CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
1040 --quick_alloc_entry_points_instrumentation_counter_;
1041 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
1042 SetEntrypointsInstrumented(false);
1043 }
1044 }
1045
ResetQuickAllocEntryPoints()1046 void Instrumentation::ResetQuickAllocEntryPoints() {
1047 Runtime* runtime = Runtime::Current();
1048 if (runtime->IsStarted()) {
1049 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
1050 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
1051 }
1052 }
1053
EntryPointString(const void * code)1054 std::string Instrumentation::EntryPointString(const void* code) {
1055 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1056 jit::Jit* jit = Runtime::Current()->GetJit();
1057 if (class_linker->IsQuickToInterpreterBridge(code)) {
1058 return "interpreter";
1059 } else if (class_linker->IsQuickResolutionStub(code)) {
1060 return "resolution";
1061 } else if (jit != nullptr && jit->GetCodeCache()->ContainsPc(code)) {
1062 return "jit";
1063 } else if (code == GetInvokeObsoleteMethodStub()) {
1064 return "obsolete";
1065 } else if (code == interpreter::GetNterpEntryPoint()) {
1066 return "nterp";
1067 } else if (code == interpreter::GetNterpWithClinitEntryPoint()) {
1068 return "nterp with clinit";
1069 } else if (class_linker->IsQuickGenericJniStub(code)) {
1070 return "generic jni";
1071 } else if (Runtime::Current()->GetOatFileManager().ContainsPc(code)) {
1072 return "oat";
1073 }
1074 return "unknown";
1075 }
1076
UpdateMethodsCodeImpl(ArtMethod * method,const void * new_code)1077 void Instrumentation::UpdateMethodsCodeImpl(ArtMethod* method, const void* new_code) {
1078 if (!EntryExitStubsInstalled()) {
1079 // Fast path: no instrumentation.
1080 DCHECK(!IsDeoptimized(method));
1081 UpdateEntryPoints(method, new_code);
1082 return;
1083 }
1084
1085 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1086 if (class_linker->IsQuickToInterpreterBridge(new_code)) {
1087 // It's always OK to update to the interpreter.
1088 UpdateEntryPoints(method, new_code);
1089 return;
1090 }
1091
1092 if (IsDeoptimized(method)) {
1093 DCHECK(class_linker->IsQuickToInterpreterBridge(method->GetEntryPointFromQuickCompiledCode()))
1094 << EntryPointString(method->GetEntryPointFromQuickCompiledCode());
1095 // Don't update, stay deoptimized.
1096 return;
1097 }
1098
1099 if (EntryExitStubsInstalled() && !CodeSupportsEntryExitHooks(new_code, method)) {
1100 DCHECK(CodeSupportsEntryExitHooks(method->GetEntryPointFromQuickCompiledCode(), method))
1101 << EntryPointString(method->GetEntryPointFromQuickCompiledCode()) << " "
1102 << method->PrettyMethod();
1103 // If we need entry / exit stubs but the new_code doesn't support entry / exit hooks just skip.
1104 return;
1105 }
1106
1107 // At this point, we can update as asked.
1108 UpdateEntryPoints(method, new_code);
1109 }
1110
UpdateNativeMethodsCodeToJitCode(ArtMethod * method,const void * new_code)1111 void Instrumentation::UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* new_code) {
1112 // We don't do any read barrier on `method`'s declaring class in this code, as the JIT might
1113 // enter here on a soon-to-be deleted ArtMethod. Updating the entrypoint is OK though, as
1114 // the ArtMethod is still in memory.
1115 if (EntryExitStubsInstalled() && !CodeSupportsEntryExitHooks(new_code, method)) {
1116 // If the new code doesn't support entry exit hooks but we need them don't update with the new
1117 // code.
1118 return;
1119 }
1120 UpdateEntryPoints(method, new_code);
1121 }
1122
UpdateMethodsCode(ArtMethod * method,const void * new_code)1123 void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* new_code) {
1124 DCHECK(method->GetDeclaringClass()->IsResolved());
1125 UpdateMethodsCodeImpl(method, new_code);
1126 }
1127
AddDeoptimizedMethod(ArtMethod * method)1128 bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
1129 if (IsDeoptimizedMethod(method)) {
1130 // Already in the map. Return.
1131 return false;
1132 }
1133 // Not found. Add it.
1134 deoptimized_methods_.insert(method);
1135 return true;
1136 }
1137
IsDeoptimizedMethod(ArtMethod * method)1138 bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
1139 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
1140 }
1141
RemoveDeoptimizedMethod(ArtMethod * method)1142 bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
1143 auto it = deoptimized_methods_.find(method);
1144 if (it == deoptimized_methods_.end()) {
1145 return false;
1146 }
1147 deoptimized_methods_.erase(it);
1148 return true;
1149 }
1150
Deoptimize(ArtMethod * method)1151 void Instrumentation::Deoptimize(ArtMethod* method) {
1152 CHECK(!method->IsNative());
1153 CHECK(!method->IsProxyMethod());
1154 CHECK(method->IsInvokable());
1155
1156 {
1157 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
1158 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
1159 CHECK(has_not_been_deoptimized) << "Method " << ArtMethod::PrettyMethod(method)
1160 << " is already deoptimized";
1161 }
1162 if (!InterpreterStubsInstalled()) {
1163 UpdateEntryPoints(method, GetQuickToInterpreterBridge());
1164
1165 // Instrument thread stacks to request a check if the caller needs a deoptimization.
1166 // This isn't a strong deopt. We deopt this method if it is still in the deopt methods list.
1167 // If by the time we hit this frame we no longer need a deopt it is safe to continue.
1168 InstrumentAllThreadStacks(/* force_deopt= */ false);
1169 }
1170 }
1171
Undeoptimize(ArtMethod * method)1172 void Instrumentation::Undeoptimize(ArtMethod* method) {
1173 CHECK(!method->IsNative());
1174 CHECK(!method->IsProxyMethod());
1175 CHECK(method->IsInvokable());
1176
1177 {
1178 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
1179 bool found_and_erased = RemoveDeoptimizedMethod(method);
1180 CHECK(found_and_erased) << "Method " << ArtMethod::PrettyMethod(method)
1181 << " is not deoptimized";
1182 }
1183
1184 // If interpreter stubs are still needed nothing to do.
1185 if (InterpreterStubsInstalled()) {
1186 return;
1187 }
1188
1189 if (method->IsObsolete()) {
1190 // Don't update entry points for obsolete methods. The entrypoint should
1191 // have been set to InvokeObsoleteMethoStub.
1192 DCHECK_EQ(method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize),
1193 GetInvokeObsoleteMethodStub());
1194 return;
1195 }
1196
1197 // We are not using interpreter stubs for deoptimization. Restore the code of the method.
1198 // We still retain interpreter bridge if we need it for other reasons.
1199 if (InterpretOnly(method)) {
1200 UpdateEntryPoints(method, GetQuickToInterpreterBridge());
1201 } else if (method->StillNeedsClinitCheck()) {
1202 UpdateEntryPoints(method, GetQuickResolutionStub());
1203 } else {
1204 UpdateEntryPoints(method, GetMaybeInstrumentedCodeForInvoke(method));
1205 }
1206
1207 // If there is no deoptimized method left, we can restore the stack of each thread.
1208 if (!EntryExitStubsInstalled()) {
1209 MaybeRestoreInstrumentationStack();
1210 }
1211 }
1212
IsDeoptimizedMethodsEmpty() const1213 bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
1214 return deoptimized_methods_.empty();
1215 }
1216
IsDeoptimized(ArtMethod * method)1217 bool Instrumentation::IsDeoptimized(ArtMethod* method) {
1218 DCHECK(method != nullptr);
1219 return IsDeoptimizedMethod(method);
1220 }
1221
DisableDeoptimization(const char * key)1222 void Instrumentation::DisableDeoptimization(const char* key) {
1223 // Remove any instrumentation support added for deoptimization.
1224 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
1225 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
1226 // Undeoptimized selected methods.
1227 while (true) {
1228 ArtMethod* method;
1229 {
1230 if (deoptimized_methods_.empty()) {
1231 break;
1232 }
1233 method = *deoptimized_methods_.begin();
1234 CHECK(method != nullptr);
1235 }
1236 Undeoptimize(method);
1237 }
1238 }
1239
MaybeSwitchRuntimeDebugState(Thread * self)1240 void Instrumentation::MaybeSwitchRuntimeDebugState(Thread* self) {
1241 Runtime* runtime = Runtime::Current();
1242 // Return early if runtime is shutting down.
1243 if (runtime->IsShuttingDown(self)) {
1244 return;
1245 }
1246
1247 // Don't switch the state if we started off as JavaDebuggable or if we still need entry / exit
1248 // hooks for other reasons.
1249 if (EntryExitStubsInstalled() || runtime->IsJavaDebuggableAtInit()) {
1250 return;
1251 }
1252
1253 art::jit::Jit* jit = runtime->GetJit();
1254 if (jit != nullptr) {
1255 jit->GetCodeCache()->InvalidateAllCompiledCode();
1256 jit->GetJitCompiler()->SetDebuggableCompilerOption(false);
1257 }
1258 runtime->SetRuntimeDebugState(art::Runtime::RuntimeDebugState::kNonJavaDebuggable);
1259 }
1260
DeoptimizeEverything(const char * key)1261 void Instrumentation::DeoptimizeEverything(const char* key) {
1262 ConfigureStubs(key, InstrumentationLevel::kInstrumentWithInterpreter);
1263 }
1264
UndeoptimizeEverything(const char * key)1265 void Instrumentation::UndeoptimizeEverything(const char* key) {
1266 CHECK(InterpreterStubsInstalled());
1267 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
1268 }
1269
EnableMethodTracing(const char * key,InstrumentationListener * listener,bool needs_interpreter)1270 void Instrumentation::EnableMethodTracing(const char* key,
1271 InstrumentationListener* listener,
1272 bool needs_interpreter) {
1273 InstrumentationLevel level;
1274 if (needs_interpreter) {
1275 level = InstrumentationLevel::kInstrumentWithInterpreter;
1276 } else {
1277 level = InstrumentationLevel::kInstrumentWithEntryExitHooks;
1278 }
1279 ConfigureStubs(key, level);
1280
1281 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
1282 for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
1283 ReportMethodEntryForOnStackMethods(listener, thread);
1284 }
1285 }
1286
DisableMethodTracing(const char * key)1287 void Instrumentation::DisableMethodTracing(const char* key) {
1288 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
1289 }
1290
GetCodeForInvoke(ArtMethod * method)1291 const void* Instrumentation::GetCodeForInvoke(ArtMethod* method) {
1292 // This is called by instrumentation and resolution trampolines
1293 // and that should never be getting proxy methods.
1294 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1295 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1296 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
1297 // If we don't have the instrumentation, the resolution stub, or the
1298 // interpreter, just return the current entrypoint,
1299 // assuming it's the most optimized.
1300 if (!class_linker->IsQuickResolutionStub(code) &&
1301 !class_linker->IsQuickToInterpreterBridge(code)) {
1302 return code;
1303 }
1304
1305 if (InterpretOnly(method)) {
1306 // If we're forced into interpreter just use it.
1307 return GetQuickToInterpreterBridge();
1308 }
1309
1310 return GetOptimizedCodeFor(method);
1311 }
1312
GetMaybeInstrumentedCodeForInvoke(ArtMethod * method)1313 const void* Instrumentation::GetMaybeInstrumentedCodeForInvoke(ArtMethod* method) {
1314 // This is called by resolution trampolines and that should never be getting proxy methods.
1315 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1316 const void* code = GetCodeForInvoke(method);
1317 if (EntryExitStubsInstalled() && !CodeSupportsEntryExitHooks(code, method)) {
1318 return method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge();
1319 }
1320 return code;
1321 }
1322
MethodEnterEventImpl(Thread * thread,ArtMethod * method) const1323 void Instrumentation::MethodEnterEventImpl(Thread* thread, ArtMethod* method) const {
1324 DCHECK(!method->IsRuntimeMethod());
1325 if (HasMethodEntryListeners()) {
1326 for (InstrumentationListener* listener : method_entry_listeners_) {
1327 if (listener != nullptr) {
1328 listener->MethodEntered(thread, method);
1329 }
1330 }
1331 }
1332 }
1333
1334 template <>
MethodExitEventImpl(Thread * thread,ArtMethod * method,OptionalFrame frame,MutableHandle<mirror::Object> & return_value) const1335 void Instrumentation::MethodExitEventImpl(Thread* thread,
1336 ArtMethod* method,
1337 OptionalFrame frame,
1338 MutableHandle<mirror::Object>& return_value) const {
1339 if (HasMethodExitListeners()) {
1340 for (InstrumentationListener* listener : method_exit_listeners_) {
1341 if (listener != nullptr) {
1342 listener->MethodExited(thread, method, frame, return_value);
1343 }
1344 }
1345 }
1346 }
1347
MethodExitEventImpl(Thread * thread,ArtMethod * method,OptionalFrame frame,JValue & return_value) const1348 template<> void Instrumentation::MethodExitEventImpl(Thread* thread,
1349 ArtMethod* method,
1350 OptionalFrame frame,
1351 JValue& return_value) const {
1352 if (HasMethodExitListeners()) {
1353 Thread* self = Thread::Current();
1354 StackHandleScope<1> hs(self);
1355 if (method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive() !=
1356 Primitive::kPrimNot) {
1357 for (InstrumentationListener* listener : method_exit_listeners_) {
1358 if (listener != nullptr) {
1359 listener->MethodExited(thread, method, frame, return_value);
1360 }
1361 }
1362 } else {
1363 MutableHandle<mirror::Object> ret(hs.NewHandle(return_value.GetL()));
1364 MethodExitEventImpl(thread, method, frame, ret);
1365 return_value.SetL(ret.Get());
1366 }
1367 }
1368 }
1369
MethodUnwindEvent(Thread * thread,ArtMethod * method,uint32_t dex_pc) const1370 void Instrumentation::MethodUnwindEvent(Thread* thread,
1371 ArtMethod* method,
1372 uint32_t dex_pc) const {
1373 if (HasMethodUnwindListeners()) {
1374 for (InstrumentationListener* listener : method_unwind_listeners_) {
1375 if (listener != nullptr) {
1376 listener->MethodUnwind(thread, method, dex_pc);
1377 }
1378 }
1379 }
1380 }
1381
DexPcMovedEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1382 void Instrumentation::DexPcMovedEventImpl(Thread* thread,
1383 ObjPtr<mirror::Object> this_object,
1384 ArtMethod* method,
1385 uint32_t dex_pc) const {
1386 Thread* self = Thread::Current();
1387 StackHandleScope<1> hs(self);
1388 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1389 for (InstrumentationListener* listener : dex_pc_listeners_) {
1390 if (listener != nullptr) {
1391 listener->DexPcMoved(thread, thiz, method, dex_pc);
1392 }
1393 }
1394 }
1395
BranchImpl(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset) const1396 void Instrumentation::BranchImpl(Thread* thread,
1397 ArtMethod* method,
1398 uint32_t dex_pc,
1399 int32_t offset) const {
1400 for (InstrumentationListener* listener : branch_listeners_) {
1401 if (listener != nullptr) {
1402 listener->Branch(thread, method, dex_pc, offset);
1403 }
1404 }
1405 }
1406
WatchedFramePopImpl(Thread * thread,const ShadowFrame & frame) const1407 void Instrumentation::WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const {
1408 for (InstrumentationListener* listener : watched_frame_pop_listeners_) {
1409 if (listener != nullptr) {
1410 listener->WatchedFramePop(thread, frame);
1411 }
1412 }
1413 }
1414
FieldReadEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field) const1415 void Instrumentation::FieldReadEventImpl(Thread* thread,
1416 ObjPtr<mirror::Object> this_object,
1417 ArtMethod* method,
1418 uint32_t dex_pc,
1419 ArtField* field) const {
1420 Thread* self = Thread::Current();
1421 StackHandleScope<1> hs(self);
1422 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1423 for (InstrumentationListener* listener : field_read_listeners_) {
1424 if (listener != nullptr) {
1425 listener->FieldRead(thread, thiz, method, dex_pc, field);
1426 }
1427 }
1428 }
1429
FieldWriteEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value) const1430 void Instrumentation::FieldWriteEventImpl(Thread* thread,
1431 ObjPtr<mirror::Object> this_object,
1432 ArtMethod* method,
1433 uint32_t dex_pc,
1434 ArtField* field,
1435 const JValue& field_value) const {
1436 Thread* self = Thread::Current();
1437 StackHandleScope<2> hs(self);
1438 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1439 if (field->IsPrimitiveType()) {
1440 for (InstrumentationListener* listener : field_write_listeners_) {
1441 if (listener != nullptr) {
1442 listener->FieldWritten(thread, thiz, method, dex_pc, field, field_value);
1443 }
1444 }
1445 } else {
1446 Handle<mirror::Object> val(hs.NewHandle(field_value.GetL()));
1447 for (InstrumentationListener* listener : field_write_listeners_) {
1448 if (listener != nullptr) {
1449 listener->FieldWritten(thread, thiz, method, dex_pc, field, val);
1450 }
1451 }
1452 }
1453 }
1454
ExceptionThrownEvent(Thread * thread,ObjPtr<mirror::Throwable> exception_object) const1455 void Instrumentation::ExceptionThrownEvent(Thread* thread,
1456 ObjPtr<mirror::Throwable> exception_object) const {
1457 Thread* self = Thread::Current();
1458 StackHandleScope<1> hs(self);
1459 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1460 if (HasExceptionThrownListeners()) {
1461 DCHECK_EQ(thread->GetException(), h_exception.Get());
1462 thread->ClearException();
1463 for (InstrumentationListener* listener : exception_thrown_listeners_) {
1464 if (listener != nullptr) {
1465 listener->ExceptionThrown(thread, h_exception);
1466 }
1467 }
1468 // See b/65049545 for discussion about this behavior.
1469 thread->AssertNoPendingException();
1470 thread->SetException(h_exception.Get());
1471 }
1472 }
1473
ExceptionHandledEvent(Thread * thread,ObjPtr<mirror::Throwable> exception_object) const1474 void Instrumentation::ExceptionHandledEvent(Thread* thread,
1475 ObjPtr<mirror::Throwable> exception_object) const {
1476 Thread* self = Thread::Current();
1477 StackHandleScope<1> hs(self);
1478 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1479 if (HasExceptionHandledListeners()) {
1480 // We should have cleared the exception so that callers can detect a new one.
1481 DCHECK(thread->GetException() == nullptr);
1482 for (InstrumentationListener* listener : exception_handled_listeners_) {
1483 if (listener != nullptr) {
1484 listener->ExceptionHandled(thread, h_exception);
1485 }
1486 }
1487 }
1488 }
1489
GetDeoptimizationMethodType(ArtMethod * method)1490 DeoptimizationMethodType Instrumentation::GetDeoptimizationMethodType(ArtMethod* method) {
1491 if (method->IsRuntimeMethod()) {
1492 // Certain methods have strict requirement on whether the dex instruction
1493 // should be re-executed upon deoptimization.
1494 if (method == Runtime::Current()->GetCalleeSaveMethod(
1495 CalleeSaveType::kSaveEverythingForClinit)) {
1496 return DeoptimizationMethodType::kKeepDexPc;
1497 }
1498 if (method == Runtime::Current()->GetCalleeSaveMethod(
1499 CalleeSaveType::kSaveEverythingForSuspendCheck)) {
1500 return DeoptimizationMethodType::kKeepDexPc;
1501 }
1502 }
1503 return DeoptimizationMethodType::kDefault;
1504 }
1505
GetReturnValue(ArtMethod * method,bool * is_ref,uint64_t * gpr_result,uint64_t * fpr_result)1506 JValue Instrumentation::GetReturnValue(ArtMethod* method,
1507 bool* is_ref,
1508 uint64_t* gpr_result,
1509 uint64_t* fpr_result) {
1510 uint32_t length;
1511 const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1512
1513 // Runtime method does not call into MethodExitEvent() so there should not be
1514 // suspension point below.
1515 ScopedAssertNoThreadSuspension ants(__FUNCTION__, method->IsRuntimeMethod());
1516 DCHECK(!method->IsRuntimeMethod());
1517 char return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
1518
1519 *is_ref = return_shorty == '[' || return_shorty == 'L';
1520 JValue return_value;
1521 if (return_shorty == 'V') {
1522 return_value.SetJ(0);
1523 } else if (return_shorty == 'F' || return_shorty == 'D') {
1524 return_value.SetJ(*fpr_result);
1525 } else {
1526 return_value.SetJ(*gpr_result);
1527 }
1528 return return_value;
1529 }
1530
PushDeoptContextIfNeeded(Thread * self,DeoptimizationMethodType deopt_type,bool is_ref,const JValue & return_value)1531 bool Instrumentation::PushDeoptContextIfNeeded(Thread* self,
1532 DeoptimizationMethodType deopt_type,
1533 bool is_ref,
1534 const JValue& return_value)
1535 REQUIRES_SHARED(Locks::mutator_lock_) {
1536 if (self->IsExceptionPending()) {
1537 return false;
1538 }
1539
1540 ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame();
1541 DCHECK(sp != nullptr && (*sp)->IsRuntimeMethod());
1542 if (!ShouldDeoptimizeCaller(self, sp)) {
1543 return false;
1544 }
1545
1546 // TODO(mythria): The current deopt behaviour is we just re-execute the
1547 // alloc instruction so we don't need the return value. For instrumentation
1548 // related deopts, we actually don't need to and can use the result we got
1549 // here. Since this is a debug only feature it is not very important but
1550 // consider reusing the result in future.
1551 self->PushDeoptimizationContext(
1552 return_value, is_ref, nullptr, /* from_code= */ false, deopt_type);
1553 self->SetException(Thread::GetDeoptimizationException());
1554 return true;
1555 }
1556
DeoptimizeIfNeeded(Thread * self,ArtMethod ** sp,DeoptimizationMethodType type,JValue return_value,bool is_reference)1557 void Instrumentation::DeoptimizeIfNeeded(Thread* self,
1558 ArtMethod** sp,
1559 DeoptimizationMethodType type,
1560 JValue return_value,
1561 bool is_reference) {
1562 if (self->IsAsyncExceptionPending() || ShouldDeoptimizeCaller(self, sp)) {
1563 self->PushDeoptimizationContext(return_value,
1564 is_reference,
1565 nullptr,
1566 /* from_code= */ false,
1567 type);
1568 // This is requested from suspend points or when returning from runtime methods so exit
1569 // callbacks wouldn't be run yet. So don't skip method callbacks.
1570 artDeoptimize(self, /* skip_method_exit_callbacks= */ false);
1571 }
1572 }
1573
NeedsSlowInterpreterForMethod(Thread * self,ArtMethod * method)1574 bool Instrumentation::NeedsSlowInterpreterForMethod(Thread* self, ArtMethod* method) {
1575 return (method != nullptr) &&
1576 (InterpreterStubsInstalled() ||
1577 IsDeoptimized(method) ||
1578 self->IsForceInterpreter() ||
1579 // NB Since structurally obsolete compiled methods might have the offsets of
1580 // methods/fields compiled in we need to go back to interpreter whenever we hit
1581 // them.
1582 method->GetDeclaringClass()->IsObsoleteObject() ||
1583 Dbg::IsForcedInterpreterNeededForUpcall(self, method));
1584 }
1585
ShouldDeoptimizeCaller(Thread * self,ArtMethod ** sp)1586 bool Instrumentation::ShouldDeoptimizeCaller(Thread* self, ArtMethod** sp) {
1587 // When exit stubs aren't called we don't need to check for any instrumentation related
1588 // deoptimizations.
1589 if (!RunExitHooks()) {
1590 return false;
1591 }
1592
1593 ArtMethod* runtime_method = *sp;
1594 DCHECK(runtime_method->IsRuntimeMethod());
1595 QuickMethodFrameInfo frame_info = Runtime::Current()->GetRuntimeMethodFrameInfo(runtime_method);
1596 return ShouldDeoptimizeCaller(self, sp, frame_info.FrameSizeInBytes());
1597 }
1598
ShouldDeoptimizeCaller(Thread * self,ArtMethod ** sp,size_t frame_size)1599 bool Instrumentation::ShouldDeoptimizeCaller(Thread* self, ArtMethod** sp, size_t frame_size) {
1600 uintptr_t caller_sp = reinterpret_cast<uintptr_t>(sp) + frame_size;
1601 ArtMethod* caller = *(reinterpret_cast<ArtMethod**>(caller_sp));
1602 uintptr_t caller_pc_addr = reinterpret_cast<uintptr_t>(sp) + (frame_size - sizeof(void*));
1603 uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(caller_pc_addr);
1604
1605 if (caller == nullptr ||
1606 caller->IsNative() ||
1607 caller->IsRuntimeMethod()) {
1608 // We need to check for a deoptimization here because when a redefinition happens it is
1609 // not safe to use any compiled code because the field offsets might change. For native
1610 // methods, we don't embed any field offsets so no need to check for a deoptimization.
1611 // If the caller is null we don't need to do anything. This can happen when the caller
1612 // is being interpreted by the switch interpreter (when called from
1613 // artQuickToInterpreterBridge) / during shutdown / early startup.
1614 return false;
1615 }
1616
1617 bool needs_deopt = NeedsSlowInterpreterForMethod(self, caller);
1618
1619 // Non java debuggable apps don't support redefinition and hence it isn't required to check if
1620 // frame needs to be deoptimized. Even in debuggable apps, we only need this check when a
1621 // redefinition has actually happened. This is indicated by IsDeoptCheckRequired flag. We also
1622 // want to avoid getting method header when we need a deopt anyway.
1623 if (Runtime::Current()->IsJavaDebuggable() && !needs_deopt && self->IsDeoptCheckRequired()) {
1624 const OatQuickMethodHeader* header = caller->GetOatQuickMethodHeader(caller_pc);
1625 if (header != nullptr && header->HasShouldDeoptimizeFlag()) {
1626 DCHECK(header->IsOptimized());
1627 uint8_t* should_deopt_flag_addr =
1628 reinterpret_cast<uint8_t*>(caller_sp) + header->GetShouldDeoptimizeFlagOffset();
1629 if ((*should_deopt_flag_addr &
1630 static_cast<uint8_t>(DeoptimizeFlagValue::kForceDeoptForRedefinition)) != 0) {
1631 needs_deopt = true;
1632 }
1633 }
1634 }
1635
1636 if (needs_deopt) {
1637 if (!Runtime::Current()->IsAsyncDeoptimizeable(caller, caller_pc)) {
1638 LOG(WARNING) << "Got a deoptimization request on un-deoptimizable method "
1639 << caller->PrettyMethod();
1640 return false;
1641 }
1642 return true;
1643 }
1644
1645 return false;
1646 }
1647
1648 } // namespace instrumentation
1649 } // namespace art
1650