1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "instrumentation.h"
18
19 #include <functional>
20 #include <optional>
21 #include <sstream>
22
23 #include <android-base/logging.h>
24
25 #include "arch/context.h"
26 #include "art_field-inl.h"
27 #include "art_method-inl.h"
28 #include "base/atomic.h"
29 #include "base/callee_save_type.h"
30 #include "class_linker.h"
31 #include "debugger.h"
32 #include "dex/dex_file-inl.h"
33 #include "dex/dex_file_types.h"
34 #include "dex/dex_instruction-inl.h"
35 #include "entrypoints/quick/quick_alloc_entrypoints.h"
36 #include "entrypoints/quick/quick_entrypoints.h"
37 #include "entrypoints/runtime_asm_entrypoints.h"
38 #include "gc_root-inl.h"
39 #include "interpreter/interpreter.h"
40 #include "interpreter/interpreter_common.h"
41 #include "jit/jit.h"
42 #include "jit/jit_code_cache.h"
43 #include "jvalue-inl.h"
44 #include "jvalue.h"
45 #include "mirror/class-inl.h"
46 #include "mirror/dex_cache.h"
47 #include "mirror/object-inl.h"
48 #include "mirror/object_array-inl.h"
49 #include "nterp_helpers.h"
50 #include "nth_caller_visitor.h"
51 #include "oat_file_manager.h"
52 #include "oat_quick_method_header.h"
53 #include "runtime-inl.h"
54 #include "thread.h"
55 #include "thread_list.h"
56
57 namespace art {
58 namespace instrumentation {
59
60 constexpr bool kVerboseInstrumentation = false;
61
MethodExited(Thread * thread,ArtMethod * method,OptionalFrame frame,MutableHandle<mirror::Object> & return_value)62 void InstrumentationListener::MethodExited(
63 Thread* thread,
64 ArtMethod* method,
65 OptionalFrame frame,
66 MutableHandle<mirror::Object>& return_value) {
67 DCHECK_EQ(method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive(),
68 Primitive::kPrimNot);
69 const void* original_ret = return_value.Get();
70 JValue v;
71 v.SetL(return_value.Get());
72 MethodExited(thread, method, frame, v);
73 DCHECK(original_ret == v.GetL()) << "Return value changed";
74 }
75
FieldWritten(Thread * thread,Handle<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,Handle<mirror::Object> field_value)76 void InstrumentationListener::FieldWritten(Thread* thread,
77 Handle<mirror::Object> this_object,
78 ArtMethod* method,
79 uint32_t dex_pc,
80 ArtField* field,
81 Handle<mirror::Object> field_value) {
82 DCHECK(!field->IsPrimitiveType());
83 JValue v;
84 v.SetL(field_value.Get());
85 FieldWritten(thread, this_object, method, dex_pc, field, v);
86 }
87
88 // Instrumentation works on non-inlined frames by updating returned PCs
89 // of compiled frames.
90 static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
91 StackVisitor::StackWalkKind::kSkipInlinedFrames;
92
93 class InstallStubsClassVisitor : public ClassVisitor {
94 public:
InstallStubsClassVisitor(Instrumentation * instrumentation)95 explicit InstallStubsClassVisitor(Instrumentation* instrumentation)
96 : instrumentation_(instrumentation) {}
97
operator ()(ObjPtr<mirror::Class> klass)98 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES(Locks::mutator_lock_) {
99 instrumentation_->InstallStubsForClass(klass.Ptr());
100 return true; // we visit all classes.
101 }
102
103 private:
104 Instrumentation* const instrumentation_;
105 };
106
InstrumentationStackPopper(Thread * self)107 InstrumentationStackPopper::InstrumentationStackPopper(Thread* self)
108 : self_(self),
109 instrumentation_(Runtime::Current()->GetInstrumentation()),
110 pop_until_(0u) {}
111
~InstrumentationStackPopper()112 InstrumentationStackPopper::~InstrumentationStackPopper() {
113 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
114 self_->GetInstrumentationStack();
115 for (auto i = stack->begin(); i != stack->end() && i->first <= pop_until_;) {
116 i = stack->erase(i);
117 }
118 }
119
PopFramesTo(uintptr_t stack_pointer,MutableHandle<mirror::Throwable> & exception)120 bool InstrumentationStackPopper::PopFramesTo(uintptr_t stack_pointer,
121 MutableHandle<mirror::Throwable>& exception) {
122 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
123 self_->GetInstrumentationStack();
124 DCHECK(!self_->IsExceptionPending());
125 if (!instrumentation_->HasMethodUnwindListeners()) {
126 pop_until_ = stack_pointer;
127 return true;
128 }
129 if (kVerboseInstrumentation) {
130 LOG(INFO) << "Popping frames for exception " << exception->Dump();
131 }
132 // The instrumentation events expect the exception to be set.
133 self_->SetException(exception.Get());
134 bool new_exception_thrown = false;
135 auto i = stack->upper_bound(pop_until_);
136
137 // Now pop all frames until reaching stack_pointer, or a new exception is
138 // thrown. Note that `stack_pointer` doesn't need to be a return PC address
139 // (in fact the exception handling code passes the start of the frame where
140 // the catch handler is).
141 for (; i != stack->end() && i->first <= stack_pointer; i++) {
142 const InstrumentationStackFrame& frame = i->second;
143 ArtMethod* method = frame.method_;
144 // Notify listeners of method unwind.
145 // TODO: improve the dex_pc information here.
146 uint32_t dex_pc = dex::kDexNoIndex;
147 if (kVerboseInstrumentation) {
148 LOG(INFO) << "Popping for unwind " << method->PrettyMethod();
149 }
150 if (!method->IsRuntimeMethod() && !frame.interpreter_entry_) {
151 instrumentation_->MethodUnwindEvent(self_, frame.this_object_, method, dex_pc);
152 new_exception_thrown = self_->GetException() != exception.Get();
153 if (new_exception_thrown) {
154 pop_until_ = i->first;
155 break;
156 }
157 }
158 }
159 if (!new_exception_thrown) {
160 pop_until_ = stack_pointer;
161 }
162 exception.Assign(self_->GetException());
163 self_->ClearException();
164 if (kVerboseInstrumentation && new_exception_thrown) {
165 LOG(INFO) << "Did partial pop of frames due to new exception";
166 }
167 return !new_exception_thrown;
168 }
169
Instrumentation()170 Instrumentation::Instrumentation()
171 : current_force_deopt_id_(0),
172 instrumentation_stubs_installed_(false),
173 instrumentation_level_(InstrumentationLevel::kInstrumentNothing),
174 forced_interpret_only_(false),
175 have_method_entry_listeners_(false),
176 have_method_exit_listeners_(false),
177 have_method_unwind_listeners_(false),
178 have_dex_pc_listeners_(false),
179 have_field_read_listeners_(false),
180 have_field_write_listeners_(false),
181 have_exception_thrown_listeners_(false),
182 have_watched_frame_pop_listeners_(false),
183 have_branch_listeners_(false),
184 have_exception_handled_listeners_(false),
185 deoptimized_methods_lock_(new ReaderWriterMutex("deoptimized methods lock",
186 kGenericBottomLock)),
187 quick_alloc_entry_points_instrumentation_counter_(0),
188 alloc_entrypoints_instrumented_(false) {
189 }
190
InstallStubsForClass(ObjPtr<mirror::Class> klass)191 void Instrumentation::InstallStubsForClass(ObjPtr<mirror::Class> klass) {
192 if (!klass->IsResolved()) {
193 // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
194 // could not be initialized or linked with regards to class inheritance.
195 } else if (klass->IsErroneousResolved()) {
196 // We can't execute code in a erroneous class: do nothing.
197 } else {
198 for (ArtMethod& method : klass->GetMethods(kRuntimePointerSize)) {
199 InstallStubsForMethod(&method);
200 }
201 }
202 }
203
CanHandleInitializationCheck(const void * code)204 static bool CanHandleInitializationCheck(const void* code) {
205 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
206 return class_linker->IsQuickResolutionStub(code) ||
207 class_linker->IsQuickToInterpreterBridge(code) ||
208 class_linker->IsQuickGenericJniStub(code) ||
209 (code == GetQuickInstrumentationEntryPoint());
210 }
211
IsProxyInit(ArtMethod * method)212 static bool IsProxyInit(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
213 // Annoyingly this can be called before we have actually initialized WellKnownClasses so therefore
214 // we also need to check this based on the declaring-class descriptor. The check is valid because
215 // Proxy only has a single constructor.
216 ArtMethod* well_known_proxy_init = jni::DecodeArtMethod(
217 WellKnownClasses::java_lang_reflect_Proxy_init);
218 if (well_known_proxy_init == method) {
219 return true;
220 }
221
222 if (well_known_proxy_init != nullptr) {
223 return false;
224 }
225
226 return method->IsConstructor() && !method->IsStatic() &&
227 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;");
228 }
229
UpdateEntryPoints(ArtMethod * method,const void * quick_code)230 static void UpdateEntryPoints(ArtMethod* method, const void* quick_code)
231 REQUIRES_SHARED(Locks::mutator_lock_) {
232 if (kIsDebugBuild) {
233 if (NeedsClinitCheckBeforeCall(method) &&
234 !method->GetDeclaringClass()->IsVisiblyInitialized()) {
235 CHECK(CanHandleInitializationCheck(quick_code));
236 }
237 jit::Jit* jit = Runtime::Current()->GetJit();
238 if (jit != nullptr && jit->GetCodeCache()->ContainsPc(quick_code)) {
239 // Ensure we always have the thumb entrypoint for JIT on arm32.
240 if (kRuntimeISA == InstructionSet::kArm) {
241 CHECK_EQ(reinterpret_cast<uintptr_t>(quick_code) & 1, 1u);
242 }
243 }
244 if (IsProxyInit(method)) {
245 CHECK_NE(quick_code, GetQuickInstrumentationEntryPoint());
246 }
247 }
248 // If the method is from a boot image, don't dirty it if the entrypoint
249 // doesn't change.
250 if (method->GetEntryPointFromQuickCompiledCode() != quick_code) {
251 method->SetEntryPointFromQuickCompiledCode(quick_code);
252 }
253 }
254
CodeNeedsEntryExitStub(const void * code,ArtMethod * method)255 bool Instrumentation::CodeNeedsEntryExitStub(const void* code, ArtMethod* method)
256 REQUIRES_SHARED(Locks::mutator_lock_) {
257 // Proxy.init should never have entry/exit stubs.
258 if (IsProxyInit(method)) {
259 return false;
260 }
261
262 // In some tests runtime isn't setup fully and hence the entry points could
263 // be nullptr.
264 if (code == nullptr) {
265 return true;
266 }
267
268 // Code running in the interpreter doesn't need entry/exit stubs.
269 if (Runtime::Current()->GetClassLinker()->IsQuickToInterpreterBridge(code)) {
270 return false;
271 }
272
273 // When jiting code for debuggable apps we generate the code to call method
274 // entry / exit hooks when required. Hence it is not required to update
275 // to instrumentation entry point for JITed code in debuggable mode.
276 if (!Runtime::Current()->IsJavaDebuggable()) {
277 return true;
278 }
279
280 // Native functions can have JITed entry points but we don't include support
281 // for calling entry / exit hooks directly from the JITed code for native
282 // functions. So we still have to install entry exit stubs for such cases.
283 if (method->IsNative()) {
284 return true;
285 }
286
287 jit::Jit* jit = Runtime::Current()->GetJit();
288 if (jit != nullptr && jit->GetCodeCache()->ContainsPc(code)) {
289 return false;
290 }
291 return true;
292 }
293
InterpretOnly(ArtMethod * method)294 bool Instrumentation::InterpretOnly(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
295 if (method->IsNative()) {
296 return false;
297 }
298 return InterpretOnly() ||
299 IsDeoptimized(method) ||
300 Runtime::Current()->GetRuntimeCallbacks()->IsMethodBeingInspected(method);
301 }
302
CanUseAotCode(ArtMethod * method,const void * quick_code)303 static bool CanUseAotCode(ArtMethod* method, const void* quick_code)
304 REQUIRES_SHARED(Locks::mutator_lock_) {
305 if (quick_code == nullptr) {
306 return false;
307 }
308 if (method->IsNative()) {
309 // AOT code for native methods can always be used.
310 return true;
311 }
312
313 Runtime* runtime = Runtime::Current();
314 // For simplicity, we never use AOT code for debuggable.
315 if (runtime->IsJavaDebuggable()) {
316 return false;
317 }
318
319 if (runtime->IsNativeDebuggable()) {
320 DCHECK(runtime->UseJitCompilation() && runtime->GetJit()->JitAtFirstUse());
321 // If we are doing native debugging, ignore application's AOT code,
322 // since we want to JIT it (at first use) with extra stackmaps for native
323 // debugging. We keep however all AOT code from the boot image,
324 // since the JIT-at-first-use is blocking and would result in non-negligible
325 // startup performance impact.
326 return runtime->GetHeap()->IsInBootImageOatFile(quick_code);
327 }
328
329 return true;
330 }
331
CanUseNterp(ArtMethod * method)332 static bool CanUseNterp(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
333 return interpreter::CanRuntimeUseNterp() &&
334 CanMethodUseNterp(method) &&
335 method->GetDeclaringClass()->IsVerified();
336 }
337
GetOptimizedCodeFor(ArtMethod * method)338 static const void* GetOptimizedCodeFor(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
339 DCHECK(!Runtime::Current()->GetInstrumentation()->InterpretOnly(method));
340 CHECK(method->IsInvokable()) << method->PrettyMethod();
341 if (method->IsProxyMethod()) {
342 return GetQuickProxyInvokeHandler();
343 }
344
345 // In debuggable mode, we can only use AOT code for native methods.
346 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
347 const void* aot_code = method->GetOatMethodQuickCode(class_linker->GetImagePointerSize());
348 if (CanUseAotCode(method, aot_code)) {
349 return aot_code;
350 }
351
352 // If the method has been precompiled, there can be a JIT version.
353 jit::Jit* jit = Runtime::Current()->GetJit();
354 if (jit != nullptr) {
355 const void* code = jit->GetCodeCache()->GetSavedEntryPointOfPreCompiledMethod(method);
356 if (code != nullptr) {
357 return code;
358 }
359 }
360
361 // We need to check if the class has been verified for setting up nterp, as
362 // the verifier could punt the method to the switch interpreter in case we
363 // need to do lock counting.
364 if (CanUseNterp(method)) {
365 return interpreter::GetNterpEntryPoint();
366 }
367
368 return method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge();
369 }
370
InitializeMethodsCode(ArtMethod * method,const void * aot_code)371 void Instrumentation::InitializeMethodsCode(ArtMethod* method, const void* aot_code)
372 REQUIRES_SHARED(Locks::mutator_lock_) {
373 // Use instrumentation entrypoints if instrumentation is installed.
374 if (UNLIKELY(EntryExitStubsInstalled()) && !IsProxyInit(method)) {
375 if (!method->IsNative() && InterpretOnly(method)) {
376 UpdateEntryPoints(method, GetQuickToInterpreterBridge());
377 } else {
378 UpdateEntryPoints(method, GetQuickInstrumentationEntryPoint());
379 }
380 return;
381 }
382
383 if (UNLIKELY(IsForcedInterpretOnly() || IsDeoptimized(method))) {
384 UpdateEntryPoints(
385 method, method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
386 return;
387 }
388
389 // Special case if we need an initialization check.
390 if (NeedsClinitCheckBeforeCall(method) && !method->GetDeclaringClass()->IsVisiblyInitialized()) {
391 // If we have code but the method needs a class initialization check before calling
392 // that code, install the resolution stub that will perform the check.
393 // It will be replaced by the proper entry point by ClassLinker::FixupStaticTrampolines
394 // after initializing class (see ClassLinker::InitializeClass method).
395 // Note: this mimics the logic in image_writer.cc that installs the resolution
396 // stub only if we have compiled code or we can execute nterp, and the method needs a class
397 // initialization check.
398 if (aot_code != nullptr || method->IsNative() || CanUseNterp(method)) {
399 UpdateEntryPoints(method, GetQuickResolutionStub());
400 } else {
401 UpdateEntryPoints(method, GetQuickToInterpreterBridge());
402 }
403 return;
404 }
405
406 // Use the provided AOT code if possible.
407 if (CanUseAotCode(method, aot_code)) {
408 UpdateEntryPoints(method, aot_code);
409 return;
410 }
411
412 // We check if the class is verified as we need the slow interpreter for lock verification.
413 // If the class is not verified, This will be updated in
414 // ClassLinker::UpdateClassAfterVerification.
415 if (CanUseNterp(method)) {
416 UpdateEntryPoints(method, interpreter::GetNterpEntryPoint());
417 return;
418 }
419
420 // Use default entrypoints.
421 UpdateEntryPoints(
422 method, method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
423 }
424
InstallStubsForMethod(ArtMethod * method)425 void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
426 if (!method->IsInvokable() || method->IsProxyMethod()) {
427 // Do not change stubs for these methods.
428 return;
429 }
430 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
431 // TODO We should remove the need for this since it means we cannot always correctly detect calls
432 // to Proxy.<init>
433 if (IsProxyInit(method)) {
434 return;
435 }
436
437 // If the instrumentation needs to go through the interpreter, just update the
438 // entrypoint to interpreter.
439 if (InterpretOnly(method)) {
440 UpdateEntryPoints(method, GetQuickToInterpreterBridge());
441 return;
442 }
443
444 if (EntryExitStubsInstalled()) {
445 // Install the instrumentation entry point if needed.
446 if (CodeNeedsEntryExitStub(method->GetEntryPointFromQuickCompiledCode(), method)) {
447 UpdateEntryPoints(method, GetQuickInstrumentationEntryPoint());
448 }
449 return;
450 }
451
452 // We're being asked to restore the entrypoints after instrumentation.
453 CHECK_EQ(instrumentation_level_, InstrumentationLevel::kInstrumentNothing);
454 // We need to have the resolution stub still if the class is not initialized.
455 if (NeedsClinitCheckBeforeCall(method) && !method->GetDeclaringClass()->IsVisiblyInitialized()) {
456 UpdateEntryPoints(method, GetQuickResolutionStub());
457 return;
458 }
459 UpdateEntryPoints(method, GetOptimizedCodeFor(method));
460 }
461
462 // Places the instrumentation exit pc as the return PC for every quick frame. This also allows
463 // deoptimization of quick frames to interpreter frames. When force_deopt is
464 // true the frames have to be deoptimized. If the frame has a deoptimization
465 // stack slot (all Jited frames), it is set to true to indicate this. For frames
466 // that do not have this slot, the force_deopt_id on the InstrumentationStack is
467 // used to check if the frame needs to be deoptimized. When force_deopt is false
468 // we just instrument the stack for method entry / exit hooks.
469 // Since we may already have done this previously, we need to push new instrumentation frame before
470 // existing instrumentation frames.
InstrumentationInstallStack(Thread * thread,void * arg,bool deopt_all_frames)471 void InstrumentationInstallStack(Thread* thread, void* arg, bool deopt_all_frames)
472 REQUIRES(Locks::mutator_lock_) {
473 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
474 struct InstallStackVisitor final : public StackVisitor {
475 InstallStackVisitor(Thread* thread_in,
476 Context* context,
477 uintptr_t instrumentation_exit_pc,
478 uint64_t force_deopt_id,
479 bool deopt_all_frames)
480 : StackVisitor(thread_in, context, kInstrumentationStackWalk),
481 instrumentation_stack_(thread_in->GetInstrumentationStack()),
482 instrumentation_exit_pc_(instrumentation_exit_pc),
483 reached_existing_instrumentation_frames_(false),
484 force_deopt_id_(force_deopt_id),
485 deopt_all_frames_(deopt_all_frames) {}
486
487 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
488 ArtMethod* m = GetMethod();
489 if (m == nullptr) {
490 if (kVerboseInstrumentation) {
491 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
492 }
493 return true; // Ignore upcalls.
494 }
495 if (GetCurrentQuickFrame() == nullptr) {
496 if (kVerboseInstrumentation) {
497 LOG(INFO) << "Pushing shadow frame method " << m->PrettyMethod();
498 }
499 stack_methods_.push_back(m);
500 return true; // Continue.
501 }
502 uintptr_t return_pc = GetReturnPc();
503 if (kVerboseInstrumentation) {
504 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
505 }
506 if (return_pc == instrumentation_exit_pc_) {
507 auto it = instrumentation_stack_->find(GetReturnPcAddr());
508 CHECK(it != instrumentation_stack_->end());
509 const InstrumentationStackFrame& frame = it->second;
510 if (m->IsRuntimeMethod()) {
511 if (frame.interpreter_entry_) {
512 return true;
513 }
514 }
515
516 // We've reached a frame which has already been installed with instrumentation exit stub.
517 // We should have already installed instrumentation or be interpreter on previous frames.
518 reached_existing_instrumentation_frames_ = true;
519
520 // Trampolines get replaced with their actual method in the stack,
521 // so don't do the check below for runtime methods.
522 if (!frame.method_->IsRuntimeMethod()) {
523 CHECK_EQ(m->GetNonObsoleteMethod(), frame.method_->GetNonObsoleteMethod())
524 << "Expected " << ArtMethod::PrettyMethod(m)
525 << ", Found " << ArtMethod::PrettyMethod(frame.method_);
526 }
527 return_pc = frame.return_pc_;
528 if (kVerboseInstrumentation) {
529 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
530 }
531 } else {
532 // If it is a JITed frame then just set the deopt bit if required
533 // otherwise continue
534 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
535 if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
536 if (deopt_all_frames_) {
537 SetShouldDeoptimizeFlag(DeoptimizeFlagValue::kDebug);
538 }
539 return true;
540 }
541 CHECK_NE(return_pc, 0U);
542 if (UNLIKELY(reached_existing_instrumentation_frames_ && !m->IsRuntimeMethod())) {
543 // We already saw an existing instrumentation frame so this should be a runtime-method
544 // inserted by the interpreter or runtime.
545 std::string thread_name;
546 GetThread()->GetThreadName(thread_name);
547 LOG(FATAL) << "While walking " << thread_name << " found unexpected non-runtime method"
548 << " without instrumentation exit return or interpreter frame."
549 << " method is " << GetMethod()->PrettyMethod()
550 << " return_pc is " << std::hex << return_pc;
551 UNREACHABLE();
552 }
553 if (m->IsRuntimeMethod()) {
554 size_t frame_size = GetCurrentQuickFrameInfo().FrameSizeInBytes();
555 ArtMethod** caller_frame = reinterpret_cast<ArtMethod**>(
556 reinterpret_cast<uint8_t*>(GetCurrentQuickFrame()) + frame_size);
557 if (*caller_frame != nullptr && (*caller_frame)->IsNative()) {
558 // Do not install instrumentation exit on return to JNI stubs.
559 return true;
560 }
561 }
562 InstrumentationStackFrame instrumentation_frame(
563 m->IsRuntimeMethod() ? nullptr : GetThisObject().Ptr(),
564 m,
565 return_pc,
566 false,
567 force_deopt_id_);
568 if (kVerboseInstrumentation) {
569 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
570 }
571
572 if (!m->IsRuntimeMethod()) {
573 // Runtime methods don't need to run method entry callbacks.
574 stack_methods_.push_back(m);
575 }
576 instrumentation_stack_->insert({GetReturnPcAddr(), instrumentation_frame});
577 SetReturnPc(instrumentation_exit_pc_);
578 }
579 return true; // Continue.
580 }
581 std::map<uintptr_t, InstrumentationStackFrame>* const instrumentation_stack_;
582 std::vector<ArtMethod*> stack_methods_;
583 const uintptr_t instrumentation_exit_pc_;
584 bool reached_existing_instrumentation_frames_;
585 uint64_t force_deopt_id_;
586 bool deopt_all_frames_;
587 };
588 if (kVerboseInstrumentation) {
589 std::string thread_name;
590 thread->GetThreadName(thread_name);
591 LOG(INFO) << "Installing exit stubs in " << thread_name;
592 }
593
594 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
595 std::unique_ptr<Context> context(Context::Create());
596 uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
597 InstallStackVisitor visitor(thread,
598 context.get(),
599 instrumentation_exit_pc,
600 instrumentation->current_force_deopt_id_,
601 deopt_all_frames);
602 visitor.WalkStack(true);
603
604 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
605 // Create method enter events for all methods currently on the thread's stack. We only do this
606 // if we haven't already processed the method enter events.
607 for (auto smi = visitor.stack_methods_.rbegin(); smi != visitor.stack_methods_.rend(); smi++) {
608 instrumentation->MethodEnterEvent(thread, *smi);
609 }
610 }
611 thread->VerifyStack();
612 }
613
InstrumentThreadStack(Thread * thread,bool force_deopt)614 void Instrumentation::InstrumentThreadStack(Thread* thread, bool force_deopt) {
615 instrumentation_stubs_installed_ = true;
616 InstrumentationInstallStack(thread, this, force_deopt);
617 }
618
619 // Removes the instrumentation exit pc as the return PC for every quick frame.
InstrumentationRestoreStack(Thread * thread,void * arg)620 static void InstrumentationRestoreStack(Thread* thread, void* arg)
621 REQUIRES(Locks::mutator_lock_) {
622 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
623
624 struct RestoreStackVisitor final : public StackVisitor {
625 RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
626 Instrumentation* instrumentation)
627 : StackVisitor(thread_in, nullptr, kInstrumentationStackWalk),
628 thread_(thread_in),
629 instrumentation_exit_pc_(instrumentation_exit_pc),
630 instrumentation_(instrumentation),
631 instrumentation_stack_(thread_in->GetInstrumentationStack()),
632 frames_removed_(0) {}
633
634 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
635 if (instrumentation_stack_->size() == 0) {
636 return false; // Stop.
637 }
638 ArtMethod* m = GetMethod();
639 if (GetCurrentQuickFrame() == nullptr) {
640 if (kVerboseInstrumentation) {
641 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
642 << " Method=" << ArtMethod::PrettyMethod(m);
643 }
644 return true; // Ignore shadow frames.
645 }
646 if (m == nullptr) {
647 if (kVerboseInstrumentation) {
648 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
649 }
650 return true; // Ignore upcalls.
651 }
652 auto it = instrumentation_stack_->find(GetReturnPcAddr());
653 if (it != instrumentation_stack_->end()) {
654 const InstrumentationStackFrame& instrumentation_frame = it->second;
655 if (kVerboseInstrumentation) {
656 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
657 }
658 if (instrumentation_frame.interpreter_entry_) {
659 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
660 } else {
661 CHECK_EQ(m->GetNonObsoleteMethod(),
662 instrumentation_frame.method_->GetNonObsoleteMethod())
663 << ArtMethod::PrettyMethod(m)
664 << " and " << instrumentation_frame.method_->GetNonObsoleteMethod()->PrettyMethod();
665 }
666 SetReturnPc(instrumentation_frame.return_pc_);
667 if (instrumentation_->ShouldNotifyMethodEnterExitEvents() &&
668 !m->IsRuntimeMethod()) {
669 // Create the method exit events. As the methods didn't really exit the result is 0.
670 // We only do this if no debugger is attached to prevent from posting events twice.
671 JValue val;
672 instrumentation_->MethodExitEvent(thread_, m, OptionalFrame{}, val);
673 }
674 frames_removed_++;
675 } else {
676 if (kVerboseInstrumentation) {
677 LOG(INFO) << " No exit stub in " << DescribeLocation();
678 }
679 }
680 return true; // Continue.
681 }
682 Thread* const thread_;
683 const uintptr_t instrumentation_exit_pc_;
684 Instrumentation* const instrumentation_;
685 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
686 size_t frames_removed_;
687 };
688 if (kVerboseInstrumentation) {
689 std::string thread_name;
690 thread->GetThreadName(thread_name);
691 LOG(INFO) << "Removing exit stubs in " << thread_name;
692 }
693 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
694 thread->GetInstrumentationStack();
695 if (stack->size() > 0) {
696 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
697 uintptr_t instrumentation_exit_pc =
698 reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
699 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
700 visitor.WalkStack(true);
701 CHECK_EQ(visitor.frames_removed_, stack->size());
702 stack->clear();
703 }
704 }
705
DeoptimizeAllThreadFrames()706 void Instrumentation::DeoptimizeAllThreadFrames() {
707 Thread* self = Thread::Current();
708 MutexLock mu(self, *Locks::thread_list_lock_);
709 ThreadList* tl = Runtime::Current()->GetThreadList();
710 tl->ForEach([&](Thread* t) {
711 Locks::mutator_lock_->AssertExclusiveHeld(self);
712 InstrumentThreadStack(t, /* deopt_all_frames= */ true);
713 });
714 current_force_deopt_id_++;
715 }
716
HasEvent(Instrumentation::InstrumentationEvent expected,uint32_t events)717 static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
718 return (events & expected) != 0;
719 }
720
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)721 static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
722 uint32_t events,
723 std::list<InstrumentationListener*>& list,
724 InstrumentationListener* listener,
725 bool* has_listener)
726 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
727 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
728 if (!HasEvent(event, events)) {
729 return;
730 }
731 // If there is a free slot in the list, we insert the listener in that slot.
732 // Otherwise we add it to the end of the list.
733 auto it = std::find(list.begin(), list.end(), nullptr);
734 if (it != list.end()) {
735 *it = listener;
736 } else {
737 list.push_back(listener);
738 }
739 *has_listener = true;
740 }
741
AddListener(InstrumentationListener * listener,uint32_t events)742 void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
743 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
744 PotentiallyAddListenerTo(kMethodEntered,
745 events,
746 method_entry_listeners_,
747 listener,
748 &have_method_entry_listeners_);
749 PotentiallyAddListenerTo(kMethodExited,
750 events,
751 method_exit_listeners_,
752 listener,
753 &have_method_exit_listeners_);
754 PotentiallyAddListenerTo(kMethodUnwind,
755 events,
756 method_unwind_listeners_,
757 listener,
758 &have_method_unwind_listeners_);
759 PotentiallyAddListenerTo(kBranch,
760 events,
761 branch_listeners_,
762 listener,
763 &have_branch_listeners_);
764 PotentiallyAddListenerTo(kDexPcMoved,
765 events,
766 dex_pc_listeners_,
767 listener,
768 &have_dex_pc_listeners_);
769 PotentiallyAddListenerTo(kFieldRead,
770 events,
771 field_read_listeners_,
772 listener,
773 &have_field_read_listeners_);
774 PotentiallyAddListenerTo(kFieldWritten,
775 events,
776 field_write_listeners_,
777 listener,
778 &have_field_write_listeners_);
779 PotentiallyAddListenerTo(kExceptionThrown,
780 events,
781 exception_thrown_listeners_,
782 listener,
783 &have_exception_thrown_listeners_);
784 PotentiallyAddListenerTo(kWatchedFramePop,
785 events,
786 watched_frame_pop_listeners_,
787 listener,
788 &have_watched_frame_pop_listeners_);
789 PotentiallyAddListenerTo(kExceptionHandled,
790 events,
791 exception_handled_listeners_,
792 listener,
793 &have_exception_handled_listeners_);
794 }
795
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)796 static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
797 uint32_t events,
798 std::list<InstrumentationListener*>& list,
799 InstrumentationListener* listener,
800 bool* has_listener)
801 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
802 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
803 if (!HasEvent(event, events)) {
804 return;
805 }
806 auto it = std::find(list.begin(), list.end(), listener);
807 if (it != list.end()) {
808 // Just update the entry, do not remove from the list. Removing entries in the list
809 // is unsafe when mutators are iterating over it.
810 *it = nullptr;
811 }
812
813 // Check if the list contains any non-null listener, and update 'has_listener'.
814 for (InstrumentationListener* l : list) {
815 if (l != nullptr) {
816 *has_listener = true;
817 return;
818 }
819 }
820 *has_listener = false;
821 }
822
RemoveListener(InstrumentationListener * listener,uint32_t events)823 void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
824 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
825 PotentiallyRemoveListenerFrom(kMethodEntered,
826 events,
827 method_entry_listeners_,
828 listener,
829 &have_method_entry_listeners_);
830 PotentiallyRemoveListenerFrom(kMethodExited,
831 events,
832 method_exit_listeners_,
833 listener,
834 &have_method_exit_listeners_);
835 PotentiallyRemoveListenerFrom(kMethodUnwind,
836 events,
837 method_unwind_listeners_,
838 listener,
839 &have_method_unwind_listeners_);
840 PotentiallyRemoveListenerFrom(kBranch,
841 events,
842 branch_listeners_,
843 listener,
844 &have_branch_listeners_);
845 PotentiallyRemoveListenerFrom(kDexPcMoved,
846 events,
847 dex_pc_listeners_,
848 listener,
849 &have_dex_pc_listeners_);
850 PotentiallyRemoveListenerFrom(kFieldRead,
851 events,
852 field_read_listeners_,
853 listener,
854 &have_field_read_listeners_);
855 PotentiallyRemoveListenerFrom(kFieldWritten,
856 events,
857 field_write_listeners_,
858 listener,
859 &have_field_write_listeners_);
860 PotentiallyRemoveListenerFrom(kExceptionThrown,
861 events,
862 exception_thrown_listeners_,
863 listener,
864 &have_exception_thrown_listeners_);
865 PotentiallyRemoveListenerFrom(kWatchedFramePop,
866 events,
867 watched_frame_pop_listeners_,
868 listener,
869 &have_watched_frame_pop_listeners_);
870 PotentiallyRemoveListenerFrom(kExceptionHandled,
871 events,
872 exception_handled_listeners_,
873 listener,
874 &have_exception_handled_listeners_);
875 }
876
GetCurrentInstrumentationLevel() const877 Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
878 return instrumentation_level_;
879 }
880
RequiresInstrumentationInstallation(InstrumentationLevel new_level) const881 bool Instrumentation::RequiresInstrumentationInstallation(InstrumentationLevel new_level) const {
882 // We need to reinstall instrumentation if we go to a different level.
883 return GetCurrentInstrumentationLevel() != new_level;
884 }
885
ConfigureStubs(const char * key,InstrumentationLevel desired_level)886 void Instrumentation::ConfigureStubs(const char* key, InstrumentationLevel desired_level) {
887 // Store the instrumentation level for this key or remove it.
888 if (desired_level == InstrumentationLevel::kInstrumentNothing) {
889 // The client no longer needs instrumentation.
890 requested_instrumentation_levels_.erase(key);
891 } else {
892 // The client needs instrumentation.
893 requested_instrumentation_levels_.Overwrite(key, desired_level);
894 }
895
896 UpdateStubs();
897 }
898
UpdateInstrumentationLevel(InstrumentationLevel requested_level)899 void Instrumentation::UpdateInstrumentationLevel(InstrumentationLevel requested_level) {
900 instrumentation_level_ = requested_level;
901 }
902
MaybeRestoreInstrumentationStack()903 void Instrumentation::MaybeRestoreInstrumentationStack() {
904 // Restore stack only if there is no method currently deoptimized.
905 if (!IsDeoptimizedMethodsEmpty()) {
906 return;
907 }
908
909 Thread* self = Thread::Current();
910 MutexLock mu(self, *Locks::thread_list_lock_);
911 bool no_remaining_deopts = true;
912 // Check that there are no other forced deoptimizations. Do it here so we only need to lock
913 // thread_list_lock once.
914 // The compiler gets confused on the thread annotations, so use
915 // NO_THREAD_SAFETY_ANALYSIS. Note that we hold the mutator lock
916 // exclusively at this point.
917 Locks::mutator_lock_->AssertExclusiveHeld(self);
918 Runtime::Current()->GetThreadList()->ForEach([&](Thread* t) NO_THREAD_SAFETY_ANALYSIS {
919 no_remaining_deopts =
920 no_remaining_deopts &&
921 !t->IsForceInterpreter() &&
922 !t->HasDebuggerShadowFrames() &&
923 std::all_of(t->GetInstrumentationStack()->cbegin(),
924 t->GetInstrumentationStack()->cend(),
925 [&](const auto& frame) REQUIRES_SHARED(Locks::mutator_lock_) {
926 return frame.second.force_deopt_id_ == current_force_deopt_id_;
927 });
928 });
929 if (no_remaining_deopts) {
930 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
931 // Only do this after restoring, as walking the stack when restoring will see
932 // the instrumentation exit pc.
933 instrumentation_stubs_installed_ = false;
934 }
935 }
936
UpdateStubs()937 void Instrumentation::UpdateStubs() {
938 // Look for the highest required instrumentation level.
939 InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
940 for (const auto& v : requested_instrumentation_levels_) {
941 requested_level = std::max(requested_level, v.second);
942 }
943
944 if (!RequiresInstrumentationInstallation(requested_level)) {
945 // We're already set.
946 return;
947 }
948 Thread* const self = Thread::Current();
949 Runtime* runtime = Runtime::Current();
950 Locks::mutator_lock_->AssertExclusiveHeld(self);
951 Locks::thread_list_lock_->AssertNotHeld(self);
952 UpdateInstrumentationLevel(requested_level);
953 if (requested_level > InstrumentationLevel::kInstrumentNothing) {
954 InstallStubsClassVisitor visitor(this);
955 runtime->GetClassLinker()->VisitClasses(&visitor);
956 instrumentation_stubs_installed_ = true;
957 MutexLock mu(self, *Locks::thread_list_lock_);
958 for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
959 InstrumentThreadStack(thread, /* deopt_all_frames= */ false);
960 }
961 } else {
962 InstallStubsClassVisitor visitor(this);
963 runtime->GetClassLinker()->VisitClasses(&visitor);
964 MaybeRestoreInstrumentationStack();
965 }
966 }
967
ResetQuickAllocEntryPointsForThread(Thread * thread,void * arg ATTRIBUTE_UNUSED)968 static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg ATTRIBUTE_UNUSED) {
969 thread->ResetQuickAllocEntryPointsForThread();
970 }
971
SetEntrypointsInstrumented(bool instrumented)972 void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
973 Thread* self = Thread::Current();
974 Runtime* runtime = Runtime::Current();
975 Locks::mutator_lock_->AssertNotHeld(self);
976 Locks::instrument_entrypoints_lock_->AssertHeld(self);
977 if (runtime->IsStarted()) {
978 ScopedSuspendAll ssa(__FUNCTION__);
979 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
980 SetQuickAllocEntryPointsInstrumented(instrumented);
981 ResetQuickAllocEntryPoints();
982 alloc_entrypoints_instrumented_ = instrumented;
983 } else {
984 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
985 SetQuickAllocEntryPointsInstrumented(instrumented);
986
987 // Note: ResetQuickAllocEntryPoints only works when the runtime is started. Manually run the
988 // update for just this thread.
989 // Note: self may be null. One of those paths is setting instrumentation in the Heap
990 // constructor for gcstress mode.
991 if (self != nullptr) {
992 ResetQuickAllocEntryPointsForThread(self, nullptr);
993 }
994
995 alloc_entrypoints_instrumented_ = instrumented;
996 }
997 }
998
InstrumentQuickAllocEntryPoints()999 void Instrumentation::InstrumentQuickAllocEntryPoints() {
1000 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
1001 InstrumentQuickAllocEntryPointsLocked();
1002 }
1003
UninstrumentQuickAllocEntryPoints()1004 void Instrumentation::UninstrumentQuickAllocEntryPoints() {
1005 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
1006 UninstrumentQuickAllocEntryPointsLocked();
1007 }
1008
InstrumentQuickAllocEntryPointsLocked()1009 void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
1010 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
1011 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
1012 SetEntrypointsInstrumented(true);
1013 }
1014 ++quick_alloc_entry_points_instrumentation_counter_;
1015 }
1016
UninstrumentQuickAllocEntryPointsLocked()1017 void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
1018 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
1019 CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
1020 --quick_alloc_entry_points_instrumentation_counter_;
1021 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
1022 SetEntrypointsInstrumented(false);
1023 }
1024 }
1025
ResetQuickAllocEntryPoints()1026 void Instrumentation::ResetQuickAllocEntryPoints() {
1027 Runtime* runtime = Runtime::Current();
1028 if (runtime->IsStarted()) {
1029 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
1030 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
1031 }
1032 }
1033
EntryPointString(const void * code)1034 std::string Instrumentation::EntryPointString(const void* code) {
1035 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1036 jit::Jit* jit = Runtime::Current()->GetJit();
1037 if (class_linker->IsQuickToInterpreterBridge(code)) {
1038 return "interpreter";
1039 } else if (class_linker->IsQuickResolutionStub(code)) {
1040 return "resolution";
1041 } else if (code == GetQuickInstrumentationEntryPoint()) {
1042 return "instrumentation";
1043 } else if (jit != nullptr && jit->GetCodeCache()->ContainsPc(code)) {
1044 return "jit";
1045 } else if (code == GetInvokeObsoleteMethodStub()) {
1046 return "obsolete";
1047 } else if (code == interpreter::GetNterpEntryPoint()) {
1048 return "nterp";
1049 } else if (class_linker->IsQuickGenericJniStub(code)) {
1050 return "generic jni";
1051 } else if (Runtime::Current()->GetOatFileManager().ContainsPc(code)) {
1052 return "oat";
1053 }
1054 return "unknown";
1055 }
1056
UpdateMethodsCodeImpl(ArtMethod * method,const void * new_code)1057 void Instrumentation::UpdateMethodsCodeImpl(ArtMethod* method, const void* new_code) {
1058 if (!AreExitStubsInstalled()) {
1059 // Fast path: no instrumentation.
1060 DCHECK(!IsDeoptimized(method));
1061 UpdateEntryPoints(method, new_code);
1062 return;
1063 }
1064
1065 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1066 if (class_linker->IsQuickToInterpreterBridge(new_code)) {
1067 // It's always OK to update to the interpreter.
1068 UpdateEntryPoints(method, new_code);
1069 return;
1070 }
1071
1072 if (IsDeoptimized(method)) {
1073 DCHECK(class_linker->IsQuickToInterpreterBridge(method->GetEntryPointFromQuickCompiledCode()))
1074 << EntryPointString(method->GetEntryPointFromQuickCompiledCode());
1075 // Don't update, stay deoptimized.
1076 return;
1077 }
1078
1079 if (EntryExitStubsInstalled() && CodeNeedsEntryExitStub(new_code, method)) {
1080 DCHECK(method->GetEntryPointFromQuickCompiledCode() == GetQuickInstrumentationEntryPoint() ||
1081 class_linker->IsQuickToInterpreterBridge(method->GetEntryPointFromQuickCompiledCode()))
1082 << EntryPointString(method->GetEntryPointFromQuickCompiledCode())
1083 << " " << method->PrettyMethod();
1084 // If the code we want to update the method with still needs entry/exit stub, just skip.
1085 return;
1086 }
1087
1088 // At this point, we can update as asked.
1089 UpdateEntryPoints(method, new_code);
1090 }
1091
UpdateNativeMethodsCodeToJitCode(ArtMethod * method,const void * new_code)1092 void Instrumentation::UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* new_code) {
1093 // We don't do any read barrier on `method`'s declaring class in this code, as the JIT might
1094 // enter here on a soon-to-be deleted ArtMethod. Updating the entrypoint is OK though, as
1095 // the ArtMethod is still in memory.
1096 if (EntryExitStubsInstalled()) {
1097 // If stubs are installed don't update.
1098 return;
1099 }
1100 UpdateEntryPoints(method, new_code);
1101 }
1102
UpdateMethodsCode(ArtMethod * method,const void * new_code)1103 void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* new_code) {
1104 DCHECK(method->GetDeclaringClass()->IsResolved());
1105 UpdateMethodsCodeImpl(method, new_code);
1106 }
1107
AddDeoptimizedMethod(ArtMethod * method)1108 bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
1109 if (IsDeoptimizedMethod(method)) {
1110 // Already in the map. Return.
1111 return false;
1112 }
1113 // Not found. Add it.
1114 deoptimized_methods_.insert(method);
1115 return true;
1116 }
1117
IsDeoptimizedMethod(ArtMethod * method)1118 bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
1119 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
1120 }
1121
BeginDeoptimizedMethod()1122 ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
1123 if (deoptimized_methods_.empty()) {
1124 // Empty.
1125 return nullptr;
1126 }
1127 return *deoptimized_methods_.begin();
1128 }
1129
RemoveDeoptimizedMethod(ArtMethod * method)1130 bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
1131 auto it = deoptimized_methods_.find(method);
1132 if (it == deoptimized_methods_.end()) {
1133 return false;
1134 }
1135 deoptimized_methods_.erase(it);
1136 return true;
1137 }
1138
IsDeoptimizedMethodsEmptyLocked() const1139 bool Instrumentation::IsDeoptimizedMethodsEmptyLocked() const {
1140 return deoptimized_methods_.empty();
1141 }
1142
Deoptimize(ArtMethod * method)1143 void Instrumentation::Deoptimize(ArtMethod* method) {
1144 CHECK(!method->IsNative());
1145 CHECK(!method->IsProxyMethod());
1146 CHECK(method->IsInvokable());
1147
1148 Thread* self = Thread::Current();
1149 {
1150 WriterMutexLock mu(self, *GetDeoptimizedMethodsLock());
1151 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
1152 CHECK(has_not_been_deoptimized) << "Method " << ArtMethod::PrettyMethod(method)
1153 << " is already deoptimized";
1154 }
1155 if (!InterpreterStubsInstalled()) {
1156 UpdateEntryPoints(method, GetQuickToInterpreterBridge());
1157
1158 // Install instrumentation exit stub and instrumentation frames. We may already have installed
1159 // these previously so it will only cover the newly created frames.
1160 instrumentation_stubs_installed_ = true;
1161 MutexLock mu(self, *Locks::thread_list_lock_);
1162 for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
1163 // This isn't a strong deopt. We deopt this method if it is still in the
1164 // deopt methods list. If by the time we hit this frame we no longer need
1165 // a deopt it is safe to continue. So we don't mark the frame.
1166 InstrumentThreadStack(thread, /* deopt_all_frames= */ false);
1167 }
1168 }
1169 }
1170
Undeoptimize(ArtMethod * method)1171 void Instrumentation::Undeoptimize(ArtMethod* method) {
1172 CHECK(!method->IsNative());
1173 CHECK(!method->IsProxyMethod());
1174 CHECK(method->IsInvokable());
1175
1176 Thread* self = Thread::Current();
1177 {
1178 WriterMutexLock mu(self, *GetDeoptimizedMethodsLock());
1179 bool found_and_erased = RemoveDeoptimizedMethod(method);
1180 CHECK(found_and_erased) << "Method " << ArtMethod::PrettyMethod(method)
1181 << " is not deoptimized";
1182 }
1183
1184 // If interpreter stubs are still needed nothing to do.
1185 if (InterpreterStubsInstalled()) {
1186 return;
1187 }
1188
1189 // We are not using interpreter stubs for deoptimization. Restore the code of the method.
1190 // We still retain interpreter bridge if we need it for other reasons.
1191 if (InterpretOnly(method)) {
1192 UpdateEntryPoints(method, GetQuickToInterpreterBridge());
1193 } else if (NeedsClinitCheckBeforeCall(method) &&
1194 !method->GetDeclaringClass()->IsVisiblyInitialized()) {
1195 UpdateEntryPoints(method, GetQuickResolutionStub());
1196 } else {
1197 UpdateEntryPoints(method, GetMaybeInstrumentedCodeForInvoke(method));
1198 }
1199
1200 // If there is no deoptimized method left, we can restore the stack of each thread.
1201 if (!EntryExitStubsInstalled()) {
1202 MaybeRestoreInstrumentationStack();
1203 }
1204 }
1205
IsDeoptimizedMethodsEmpty() const1206 bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
1207 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
1208 return deoptimized_methods_.empty();
1209 }
1210
IsDeoptimized(ArtMethod * method)1211 bool Instrumentation::IsDeoptimized(ArtMethod* method) {
1212 DCHECK(method != nullptr);
1213 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
1214 return IsDeoptimizedMethod(method);
1215 }
1216
1217
DisableDeoptimization(const char * key)1218 void Instrumentation::DisableDeoptimization(const char* key) {
1219 // Remove any instrumentation support added for deoptimization.
1220 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
1221 // Undeoptimized selected methods.
1222 while (true) {
1223 ArtMethod* method;
1224 {
1225 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
1226 if (IsDeoptimizedMethodsEmptyLocked()) {
1227 break;
1228 }
1229 method = BeginDeoptimizedMethod();
1230 CHECK(method != nullptr);
1231 }
1232 Undeoptimize(method);
1233 }
1234 }
1235
1236 // Indicates if instrumentation should notify method enter/exit events to the listeners.
ShouldNotifyMethodEnterExitEvents() const1237 bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
1238 if (!HasMethodEntryListeners() && !HasMethodExitListeners()) {
1239 return false;
1240 }
1241 return !InterpreterStubsInstalled();
1242 }
1243
DeoptimizeEverything(const char * key)1244 void Instrumentation::DeoptimizeEverything(const char* key) {
1245 ConfigureStubs(key, InstrumentationLevel::kInstrumentWithInterpreter);
1246 }
1247
UndeoptimizeEverything(const char * key)1248 void Instrumentation::UndeoptimizeEverything(const char* key) {
1249 CHECK(InterpreterStubsInstalled());
1250 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
1251 }
1252
EnableMethodTracing(const char * key,bool needs_interpreter)1253 void Instrumentation::EnableMethodTracing(const char* key, bool needs_interpreter) {
1254 InstrumentationLevel level;
1255 if (needs_interpreter) {
1256 level = InstrumentationLevel::kInstrumentWithInterpreter;
1257 } else {
1258 level = InstrumentationLevel::kInstrumentWithInstrumentationStubs;
1259 }
1260 ConfigureStubs(key, level);
1261 }
1262
DisableMethodTracing(const char * key)1263 void Instrumentation::DisableMethodTracing(const char* key) {
1264 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
1265 }
1266
GetCodeForInvoke(ArtMethod * method)1267 const void* Instrumentation::GetCodeForInvoke(ArtMethod* method) {
1268 // This is called by instrumentation and resolution trampolines
1269 // and that should never be getting proxy methods.
1270 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1271 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1272 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
1273 // If we don't have the instrumentation, the resolution stub, or the
1274 // interpreter as entrypoint, just return the current entrypoint, assuming
1275 // it's the most optimized.
1276 if (code != GetQuickInstrumentationEntryPoint() &&
1277 !class_linker->IsQuickResolutionStub(code) &&
1278 !class_linker->IsQuickToInterpreterBridge(code)) {
1279 return code;
1280 }
1281
1282 if (InterpretOnly(method)) {
1283 // If we're forced into interpreter just use it.
1284 return GetQuickToInterpreterBridge();
1285 }
1286
1287 return GetOptimizedCodeFor(method);
1288 }
1289
GetMaybeInstrumentedCodeForInvoke(ArtMethod * method)1290 const void* Instrumentation::GetMaybeInstrumentedCodeForInvoke(ArtMethod* method) {
1291 // This is called by resolution trampolines and that should never be getting proxy methods.
1292 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1293 const void* code = GetCodeForInvoke(method);
1294 if (EntryExitStubsInstalled() && CodeNeedsEntryExitStub(code, method)) {
1295 return GetQuickInstrumentationEntryPoint();
1296 }
1297 return code;
1298 }
1299
MethodEnterEventImpl(Thread * thread,ArtMethod * method) const1300 void Instrumentation::MethodEnterEventImpl(Thread* thread, ArtMethod* method) const {
1301 DCHECK(!method->IsRuntimeMethod());
1302 if (HasMethodEntryListeners()) {
1303 for (InstrumentationListener* listener : method_entry_listeners_) {
1304 if (listener != nullptr) {
1305 listener->MethodEntered(thread, method);
1306 }
1307 }
1308 }
1309 }
1310
1311 template <>
MethodExitEventImpl(Thread * thread,ArtMethod * method,OptionalFrame frame,MutableHandle<mirror::Object> & return_value) const1312 void Instrumentation::MethodExitEventImpl(Thread* thread,
1313 ArtMethod* method,
1314 OptionalFrame frame,
1315 MutableHandle<mirror::Object>& return_value) const {
1316 if (HasMethodExitListeners()) {
1317 for (InstrumentationListener* listener : method_exit_listeners_) {
1318 if (listener != nullptr) {
1319 listener->MethodExited(thread, method, frame, return_value);
1320 }
1321 }
1322 }
1323 }
1324
MethodExitEventImpl(Thread * thread,ArtMethod * method,OptionalFrame frame,JValue & return_value) const1325 template<> void Instrumentation::MethodExitEventImpl(Thread* thread,
1326 ArtMethod* method,
1327 OptionalFrame frame,
1328 JValue& return_value) const {
1329 if (HasMethodExitListeners()) {
1330 Thread* self = Thread::Current();
1331 StackHandleScope<1> hs(self);
1332 if (method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive() !=
1333 Primitive::kPrimNot) {
1334 for (InstrumentationListener* listener : method_exit_listeners_) {
1335 if (listener != nullptr) {
1336 listener->MethodExited(thread, method, frame, return_value);
1337 }
1338 }
1339 } else {
1340 MutableHandle<mirror::Object> ret(hs.NewHandle(return_value.GetL()));
1341 MethodExitEventImpl(thread, method, frame, ret);
1342 return_value.SetL(ret.Get());
1343 }
1344 }
1345 }
1346
MethodUnwindEvent(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1347 void Instrumentation::MethodUnwindEvent(Thread* thread,
1348 ObjPtr<mirror::Object> this_object,
1349 ArtMethod* method,
1350 uint32_t dex_pc) const {
1351 if (HasMethodUnwindListeners()) {
1352 Thread* self = Thread::Current();
1353 StackHandleScope<1> hs(self);
1354 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1355 for (InstrumentationListener* listener : method_unwind_listeners_) {
1356 if (listener != nullptr) {
1357 listener->MethodUnwind(thread, thiz, method, dex_pc);
1358 }
1359 }
1360 }
1361 }
1362
DexPcMovedEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1363 void Instrumentation::DexPcMovedEventImpl(Thread* thread,
1364 ObjPtr<mirror::Object> this_object,
1365 ArtMethod* method,
1366 uint32_t dex_pc) const {
1367 Thread* self = Thread::Current();
1368 StackHandleScope<1> hs(self);
1369 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1370 for (InstrumentationListener* listener : dex_pc_listeners_) {
1371 if (listener != nullptr) {
1372 listener->DexPcMoved(thread, thiz, method, dex_pc);
1373 }
1374 }
1375 }
1376
BranchImpl(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset) const1377 void Instrumentation::BranchImpl(Thread* thread,
1378 ArtMethod* method,
1379 uint32_t dex_pc,
1380 int32_t offset) const {
1381 for (InstrumentationListener* listener : branch_listeners_) {
1382 if (listener != nullptr) {
1383 listener->Branch(thread, method, dex_pc, offset);
1384 }
1385 }
1386 }
1387
WatchedFramePopImpl(Thread * thread,const ShadowFrame & frame) const1388 void Instrumentation::WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const {
1389 for (InstrumentationListener* listener : watched_frame_pop_listeners_) {
1390 if (listener != nullptr) {
1391 listener->WatchedFramePop(thread, frame);
1392 }
1393 }
1394 }
1395
FieldReadEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field) const1396 void Instrumentation::FieldReadEventImpl(Thread* thread,
1397 ObjPtr<mirror::Object> this_object,
1398 ArtMethod* method,
1399 uint32_t dex_pc,
1400 ArtField* field) const {
1401 Thread* self = Thread::Current();
1402 StackHandleScope<1> hs(self);
1403 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1404 for (InstrumentationListener* listener : field_read_listeners_) {
1405 if (listener != nullptr) {
1406 listener->FieldRead(thread, thiz, method, dex_pc, field);
1407 }
1408 }
1409 }
1410
FieldWriteEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value) const1411 void Instrumentation::FieldWriteEventImpl(Thread* thread,
1412 ObjPtr<mirror::Object> this_object,
1413 ArtMethod* method,
1414 uint32_t dex_pc,
1415 ArtField* field,
1416 const JValue& field_value) const {
1417 Thread* self = Thread::Current();
1418 StackHandleScope<2> hs(self);
1419 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1420 if (field->IsPrimitiveType()) {
1421 for (InstrumentationListener* listener : field_write_listeners_) {
1422 if (listener != nullptr) {
1423 listener->FieldWritten(thread, thiz, method, dex_pc, field, field_value);
1424 }
1425 }
1426 } else {
1427 Handle<mirror::Object> val(hs.NewHandle(field_value.GetL()));
1428 for (InstrumentationListener* listener : field_write_listeners_) {
1429 if (listener != nullptr) {
1430 listener->FieldWritten(thread, thiz, method, dex_pc, field, val);
1431 }
1432 }
1433 }
1434 }
1435
ExceptionThrownEvent(Thread * thread,ObjPtr<mirror::Throwable> exception_object) const1436 void Instrumentation::ExceptionThrownEvent(Thread* thread,
1437 ObjPtr<mirror::Throwable> exception_object) const {
1438 Thread* self = Thread::Current();
1439 StackHandleScope<1> hs(self);
1440 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1441 if (HasExceptionThrownListeners()) {
1442 DCHECK_EQ(thread->GetException(), h_exception.Get());
1443 thread->ClearException();
1444 for (InstrumentationListener* listener : exception_thrown_listeners_) {
1445 if (listener != nullptr) {
1446 listener->ExceptionThrown(thread, h_exception);
1447 }
1448 }
1449 // See b/65049545 for discussion about this behavior.
1450 thread->AssertNoPendingException();
1451 thread->SetException(h_exception.Get());
1452 }
1453 }
1454
ExceptionHandledEvent(Thread * thread,ObjPtr<mirror::Throwable> exception_object) const1455 void Instrumentation::ExceptionHandledEvent(Thread* thread,
1456 ObjPtr<mirror::Throwable> exception_object) const {
1457 Thread* self = Thread::Current();
1458 StackHandleScope<1> hs(self);
1459 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1460 if (HasExceptionHandledListeners()) {
1461 // We should have cleared the exception so that callers can detect a new one.
1462 DCHECK(thread->GetException() == nullptr);
1463 for (InstrumentationListener* listener : exception_handled_listeners_) {
1464 if (listener != nullptr) {
1465 listener->ExceptionHandled(thread, h_exception);
1466 }
1467 }
1468 }
1469 }
1470
PushInstrumentationStackFrame(Thread * self,ObjPtr<mirror::Object> this_object,ArtMethod * method,uintptr_t stack_ptr,uintptr_t lr,bool interpreter_entry)1471 void Instrumentation::PushInstrumentationStackFrame(Thread* self,
1472 ObjPtr<mirror::Object> this_object,
1473 ArtMethod* method,
1474 uintptr_t stack_ptr,
1475 uintptr_t lr,
1476 bool interpreter_entry) {
1477 DCHECK(!self->IsExceptionPending());
1478 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
1479 self->GetInstrumentationStack();
1480 if (kVerboseInstrumentation) {
1481 LOG(INFO) << "Entering " << ArtMethod::PrettyMethod(method) << " from PC "
1482 << reinterpret_cast<void*>(lr);
1483 }
1484
1485 // We send the enter event before pushing the instrumentation frame to make cleanup easier. If the
1486 // event causes an exception we can simply send the unwind event and return.
1487 StackHandleScope<1> hs(self);
1488 Handle<mirror::Object> h_this(hs.NewHandle(this_object));
1489 if (!interpreter_entry) {
1490 MethodEnterEvent(self, method);
1491 if (self->IsExceptionPending()) {
1492 MethodUnwindEvent(self, h_this.Get(), method, 0);
1493 return;
1494 }
1495 }
1496
1497 // We have a callee-save frame meaning this value is guaranteed to never be 0.
1498 DCHECK(!self->IsExceptionPending());
1499
1500 instrumentation::InstrumentationStackFrame instrumentation_frame(
1501 h_this.Get(), method, lr, interpreter_entry, current_force_deopt_id_);
1502 stack->insert({stack_ptr, instrumentation_frame});
1503 }
1504
GetDeoptimizationMethodType(ArtMethod * method)1505 DeoptimizationMethodType Instrumentation::GetDeoptimizationMethodType(ArtMethod* method) {
1506 if (method->IsRuntimeMethod()) {
1507 // Certain methods have strict requirement on whether the dex instruction
1508 // should be re-executed upon deoptimization.
1509 if (method == Runtime::Current()->GetCalleeSaveMethod(
1510 CalleeSaveType::kSaveEverythingForClinit)) {
1511 return DeoptimizationMethodType::kKeepDexPc;
1512 }
1513 if (method == Runtime::Current()->GetCalleeSaveMethod(
1514 CalleeSaveType::kSaveEverythingForSuspendCheck)) {
1515 return DeoptimizationMethodType::kKeepDexPc;
1516 }
1517 }
1518 return DeoptimizationMethodType::kDefault;
1519 }
1520
1521 // Try to get the shorty of a runtime method if it's an invocation stub.
GetRuntimeMethodShorty(Thread * thread)1522 static char GetRuntimeMethodShorty(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_) {
1523 char shorty = 'V';
1524 StackVisitor::WalkStack(
1525 [&shorty](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1526 ArtMethod* m = stack_visitor->GetMethod();
1527 if (m == nullptr || m->IsRuntimeMethod()) {
1528 return true;
1529 }
1530 // The first Java method.
1531 if (m->IsNative()) {
1532 // Use JNI method's shorty for the jni stub.
1533 shorty = m->GetShorty()[0];
1534 } else if (m->IsProxyMethod()) {
1535 // Proxy method just invokes its proxied method via
1536 // art_quick_proxy_invoke_handler.
1537 shorty = m->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty()[0];
1538 } else {
1539 const Instruction& instr = m->DexInstructions().InstructionAt(stack_visitor->GetDexPc());
1540 if (instr.IsInvoke()) {
1541 uint16_t method_index = static_cast<uint16_t>(instr.VRegB());
1542 const DexFile* dex_file = m->GetDexFile();
1543 if (interpreter::IsStringInit(dex_file, method_index)) {
1544 // Invoking string init constructor is turned into invoking
1545 // StringFactory.newStringFromChars() which returns a string.
1546 shorty = 'L';
1547 } else {
1548 shorty = dex_file->GetMethodShorty(method_index)[0];
1549 }
1550
1551 } else {
1552 // It could be that a non-invoke opcode invokes a stub, which in turn
1553 // invokes Java code. In such cases, we should never expect a return
1554 // value from the stub.
1555 }
1556 }
1557 // Stop stack walking since we've seen a Java frame.
1558 return false;
1559 },
1560 thread,
1561 /* context= */ nullptr,
1562 art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
1563 return shorty;
1564 }
1565
GetReturnValue(Thread * self,ArtMethod * method,bool * is_ref,uint64_t * gpr_result,uint64_t * fpr_result)1566 JValue Instrumentation::GetReturnValue(
1567 Thread* self, ArtMethod* method, bool* is_ref, uint64_t* gpr_result, uint64_t* fpr_result) {
1568 uint32_t length;
1569 const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1570 char return_shorty;
1571
1572 // Runtime method does not call into MethodExitEvent() so there should not be
1573 // suspension point below.
1574 ScopedAssertNoThreadSuspension ants(__FUNCTION__, method->IsRuntimeMethod());
1575 if (method->IsRuntimeMethod()) {
1576 Runtime* runtime = Runtime::Current();
1577 if (method != runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForClinit) &&
1578 method != runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForSuspendCheck)) {
1579 // If the caller is at an invocation point and the runtime method is not
1580 // for clinit, we need to pass return results to the caller.
1581 // We need the correct shorty to decide whether we need to pass the return
1582 // result for deoptimization below.
1583 return_shorty = GetRuntimeMethodShorty(self);
1584 } else {
1585 // Some runtime methods such as allocations, unresolved field getters, etc.
1586 // have return value. We don't need to set return_value since MethodExitEvent()
1587 // below isn't called for runtime methods. Deoptimization doesn't need the
1588 // value either since the dex instruction will be re-executed by the
1589 // interpreter, except these two cases:
1590 // (1) For an invoke, which is handled above to get the correct shorty.
1591 // (2) For MONITOR_ENTER/EXIT, which cannot be re-executed since it's not
1592 // idempotent. However there is no return value for it anyway.
1593 return_shorty = 'V';
1594 }
1595 } else {
1596 return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
1597 }
1598
1599 *is_ref = return_shorty == '[' || return_shorty == 'L';
1600 JValue return_value;
1601 if (return_shorty == 'V') {
1602 return_value.SetJ(0);
1603 } else if (return_shorty == 'F' || return_shorty == 'D') {
1604 return_value.SetJ(*fpr_result);
1605 } else {
1606 return_value.SetJ(*gpr_result);
1607 }
1608 return return_value;
1609 }
1610
ShouldDeoptimizeMethod(Thread * self,const NthCallerVisitor & visitor)1611 bool Instrumentation::ShouldDeoptimizeMethod(Thread* self, const NthCallerVisitor& visitor) {
1612 bool should_deoptimize_frame = false;
1613 const OatQuickMethodHeader* header = visitor.GetCurrentOatQuickMethodHeader();
1614 if (header != nullptr && header->HasShouldDeoptimizeFlag()) {
1615 uint8_t should_deopt_flag = visitor.GetShouldDeoptimizeFlag();
1616 // DeoptimizeFlag could be set for debugging or for CHA invalidations.
1617 // Deoptimize here only if it was requested for debugging. CHA
1618 // invalidations are handled in the JITed code.
1619 if ((should_deopt_flag & static_cast<uint8_t>(DeoptimizeFlagValue::kDebug)) != 0) {
1620 should_deoptimize_frame = true;
1621 }
1622 }
1623 return (visitor.caller != nullptr) &&
1624 (InterpreterStubsInstalled() || IsDeoptimized(visitor.caller) ||
1625 self->IsForceInterpreter() ||
1626 // NB Since structurally obsolete compiled methods might have the offsets of
1627 // methods/fields compiled in we need to go back to interpreter whenever we hit
1628 // them.
1629 visitor.caller->GetDeclaringClass()->IsObsoleteObject() ||
1630 Dbg::IsForcedInterpreterNeededForUpcall(self, visitor.caller) ||
1631 should_deoptimize_frame);
1632 }
1633
PopInstrumentationStackFrame(Thread * self,uintptr_t * return_pc_addr,uint64_t * gpr_result,uint64_t * fpr_result)1634 TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self,
1635 uintptr_t* return_pc_addr,
1636 uint64_t* gpr_result,
1637 uint64_t* fpr_result) {
1638 DCHECK(gpr_result != nullptr);
1639 DCHECK(fpr_result != nullptr);
1640 // Do the pop.
1641 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
1642 self->GetInstrumentationStack();
1643 CHECK_GT(stack->size(), 0U);
1644 auto it = stack->find(reinterpret_cast<uintptr_t>(return_pc_addr));
1645 CHECK(it != stack->end());
1646 InstrumentationStackFrame instrumentation_frame = it->second;
1647 stack->erase(it);
1648
1649 // Set return PC and check the consistency of the stack.
1650 // We don't cache the return pc value in a local as it may change after
1651 // sending a method exit event.
1652 *return_pc_addr = instrumentation_frame.return_pc_;
1653 self->VerifyStack();
1654
1655 ArtMethod* method = instrumentation_frame.method_;
1656
1657 bool is_ref;
1658 JValue return_value = GetReturnValue(self, method, &is_ref, gpr_result, fpr_result);
1659 StackHandleScope<1> hs(self);
1660 MutableHandle<mirror::Object> res(hs.NewHandle<mirror::Object>(nullptr));
1661 if (is_ref) {
1662 // Take a handle to the return value so we won't lose it if we suspend.
1663 // FIXME: The `is_ref` is often guessed wrong, so even object aligment
1664 // assertion would fail for some tests. See b/204766614 .
1665 // DCHECK_ALIGNED(return_value.GetL(), kObjectAlignment);
1666 res.Assign(return_value.GetL());
1667 }
1668 if (!method->IsRuntimeMethod() && !instrumentation_frame.interpreter_entry_) {
1669 // Note that sending the event may change the contents of *return_pc_addr.
1670 MethodExitEvent(self, instrumentation_frame.method_, OptionalFrame{}, return_value);
1671 }
1672
1673 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1674 // back to an upcall.
1675 NthCallerVisitor visitor(self, 1, true);
1676 visitor.WalkStack(true);
1677 // Check if we forced all threads to deoptimize in the time between this frame being created and
1678 // now.
1679 bool should_deoptimize_frame = instrumentation_frame.force_deopt_id_ != current_force_deopt_id_;
1680 bool deoptimize = ShouldDeoptimizeMethod(self, visitor) || should_deoptimize_frame;
1681
1682 if (is_ref) {
1683 // Restore the return value if it's a reference since it might have moved.
1684 *reinterpret_cast<mirror::Object**>(gpr_result) = res.Get();
1685 }
1686 if (deoptimize && Runtime::Current()->IsAsyncDeoptimizeable(*return_pc_addr)) {
1687 if (kVerboseInstrumentation) {
1688 LOG(INFO) << "Deoptimizing "
1689 << visitor.caller->PrettyMethod()
1690 << " by returning from "
1691 << method->PrettyMethod()
1692 << " with result "
1693 << std::hex << return_value.GetJ() << std::dec
1694 << " in "
1695 << *self;
1696 }
1697 DeoptimizationMethodType deopt_method_type = GetDeoptimizationMethodType(method);
1698 self->PushDeoptimizationContext(return_value,
1699 is_ref,
1700 /* exception= */ nullptr,
1701 /* from_code= */ false,
1702 deopt_method_type);
1703 return GetTwoWordSuccessValue(*return_pc_addr,
1704 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
1705 } else {
1706 if (deoptimize && !Runtime::Current()->IsAsyncDeoptimizeable(*return_pc_addr)) {
1707 VLOG(deopt) << "Got a deoptimization request on un-deoptimizable " << method->PrettyMethod()
1708 << " at PC " << reinterpret_cast<void*>(*return_pc_addr);
1709 }
1710 if (kVerboseInstrumentation) {
1711 LOG(INFO) << "Returning from " << method->PrettyMethod()
1712 << " to PC " << reinterpret_cast<void*>(*return_pc_addr);
1713 }
1714 return GetTwoWordSuccessValue(0, *return_pc_addr);
1715 }
1716 }
1717
PopFramesForDeoptimization(Thread * self,uintptr_t pop_until) const1718 uintptr_t Instrumentation::PopFramesForDeoptimization(Thread* self, uintptr_t pop_until) const {
1719 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
1720 self->GetInstrumentationStack();
1721 // Pop all instrumentation frames below `pop_until`.
1722 uintptr_t return_pc = 0u;
1723 for (auto i = stack->begin(); i != stack->end() && i->first <= pop_until;) {
1724 auto e = i;
1725 ++i;
1726 if (kVerboseInstrumentation) {
1727 LOG(INFO) << "Popping for deoptimization " << e->second.method_->PrettyMethod();
1728 }
1729 return_pc = e->second.return_pc_;
1730 stack->erase(e);
1731 }
1732 return return_pc;
1733 }
1734
Dump() const1735 std::string InstrumentationStackFrame::Dump() const {
1736 std::ostringstream os;
1737 os << ArtMethod::PrettyMethod(method_) << ":"
1738 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_)
1739 << " force_deopt_id=" << force_deopt_id_;
1740 return os.str();
1741 }
1742
1743 } // namespace instrumentation
1744 } // namespace art
1745