• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "quick_exception_handler.h"
18 
19 #include <ios>
20 #include <queue>
21 #include <sstream>
22 
23 #include "arch/context.h"
24 #include "art_method-inl.h"
25 #include "base/array_ref.h"
26 #include "base/globals.h"
27 #include "base/logging.h"  // For VLOG_IS_ON.
28 #include "base/pointer_size.h"
29 #include "base/systrace.h"
30 #include "dex/dex_file_types.h"
31 #include "dex/dex_instruction.h"
32 #include "dex/dex_instruction-inl.h"
33 #include "entrypoints/entrypoint_utils.h"
34 #include "entrypoints/quick/quick_entrypoints_enum.h"
35 #include "entrypoints/runtime_asm_entrypoints.h"
36 #include "handle_scope-inl.h"
37 #include "instrumentation.h"
38 #include "interpreter/shadow_frame-inl.h"
39 #include "jit/jit.h"
40 #include "jit/jit_code_cache.h"
41 #include "mirror/class-inl.h"
42 #include "mirror/class_loader.h"
43 #include "mirror/throwable.h"
44 #include "nterp_helpers.h"
45 #include "oat/oat_quick_method_header.h"
46 #include "oat/stack_map.h"
47 #include "stack.h"
48 
49 namespace art HIDDEN {
50 
51 static constexpr bool kDebugExceptionDelivery = false;
52 static constexpr size_t kInvalidFrameDepth = 0xffffffff;
53 
QuickExceptionHandler(Thread * self,bool is_deoptimization)54 QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
55     : self_(self),
56       context_(Context::Create()),
57       is_deoptimization_(is_deoptimization),
58       handler_quick_frame_(nullptr),
59       handler_quick_frame_pc_(0),
60       handler_method_header_(nullptr),
61       handler_quick_arg0_(0),
62       clear_exception_(false),
63       handler_frame_depth_(kInvalidFrameDepth),
64       full_fragment_done_(false) {}
65 
66 // Finds catch handler.
67 class CatchBlockStackVisitor final : public StackVisitor {
68  public:
CatchBlockStackVisitor(Thread * self,Context * context,Handle<mirror::Throwable> * exception,QuickExceptionHandler * exception_handler,uint32_t skip_frames,bool skip_top_unwind_callback)69   CatchBlockStackVisitor(Thread* self,
70                          Context* context,
71                          Handle<mirror::Throwable>* exception,
72                          QuickExceptionHandler* exception_handler,
73                          uint32_t skip_frames,
74                          bool skip_top_unwind_callback)
75       REQUIRES_SHARED(Locks::mutator_lock_)
76       : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
77         exception_(exception),
78         exception_handler_(exception_handler),
79         skip_frames_(skip_frames),
80         skip_unwind_callback_(skip_top_unwind_callback) {
81     DCHECK_IMPLIES(skip_unwind_callback_, skip_frames_ == 0);
82   }
83 
VisitFrame()84   bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
85     ArtMethod* method = GetMethod();
86     exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
87     if (method == nullptr) {
88       DCHECK_EQ(skip_frames_, 0u)
89           << "We tried to skip an upcall! We should have returned to the upcall to finish delivery";
90       // This is the upcall, we remember the frame and last pc so that we may long jump to them.
91       exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
92       exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
93       return false;  // End stack walk.
94     }
95     if (skip_frames_ != 0) {
96       skip_frames_--;
97       return true;
98     }
99     if (method->IsRuntimeMethod()) {
100       // Ignore callee save method.
101       DCHECK(method->IsCalleeSaveMethod());
102       return true;
103     }
104     bool continue_stack_walk = HandleTryItems(method);
105     // Collect methods for which MethodUnwind callback needs to be invoked. MethodUnwind callback
106     // can potentially throw, so we want to call these after we find the catch block.
107     // We stop the stack walk when we find the catch block. If we are ending the stack walk we don't
108     // have to unwind this method so don't record it.
109     if (continue_stack_walk && !skip_unwind_callback_) {
110       // Skip unwind callback is only used when method exit callback has thrown an exception. In
111       // that case, we should have runtime method (artMethodExitHook) on top of stack and the
112       // second should be the method for which method exit was called.
113       DCHECK_IMPLIES(skip_unwind_callback_, GetFrameDepth() == 2);
114       unwound_methods_.push(method);
115     }
116     skip_unwind_callback_ = false;
117     return continue_stack_walk;
118   }
119 
GetUnwoundMethods()120   std::queue<ArtMethod*>& GetUnwoundMethods() {
121     return unwound_methods_;
122   }
123 
124  private:
HandleTryItems(ArtMethod * method)125   bool HandleTryItems(ArtMethod* method)
126       REQUIRES_SHARED(Locks::mutator_lock_) {
127     uint32_t dex_pc = dex::kDexNoIndex;
128     if (!method->IsNative()) {
129       dex_pc = GetDexPc();
130     }
131     if (dex_pc != dex::kDexNoIndex) {
132       bool clear_exception = false;
133       StackHandleScope<1> hs(GetThread());
134       Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
135       uint32_t found_dex_pc = method->FindCatchBlock(to_find, dex_pc, &clear_exception);
136       exception_handler_->SetClearException(clear_exception);
137       if (found_dex_pc != dex::kDexNoIndex) {
138         exception_handler_->SetHandlerDexPcList(ComputeDexPcList(found_dex_pc));
139         uint32_t stack_map_row = -1;
140         exception_handler_->SetHandlerQuickFramePc(
141             GetCurrentOatQuickMethodHeader()->ToNativeQuickPcForCatchHandlers(
142                 method, exception_handler_->GetHandlerDexPcList(), &stack_map_row));
143         exception_handler_->SetCatchStackMapRow(stack_map_row);
144         exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
145         exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
146         return false;  // End stack walk.
147       } else if (UNLIKELY(GetThread()->HasDebuggerShadowFrames())) {
148         // We are going to unwind this frame. Did we prepare a shadow frame for debugging?
149         size_t frame_id = GetFrameId();
150         ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(frame_id);
151         if (frame != nullptr) {
152           // We will not execute this shadow frame so we can safely deallocate it.
153           GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
154           ShadowFrame::DeleteDeoptimizedFrame(frame);
155         }
156       }
157     }
158     return true;  // Continue stack walk.
159   }
160 
161   // The exception we're looking for the catch block of.
162   Handle<mirror::Throwable>* exception_;
163   // The quick exception handler we're visiting for.
164   QuickExceptionHandler* const exception_handler_;
165   // The number of frames to skip searching for catches in.
166   uint32_t skip_frames_;
167   // The list of methods we would skip to reach the catch block. We record these to call
168   // MethodUnwind callbacks.
169   std::queue<ArtMethod*> unwound_methods_;
170   // Specifies if the unwind callback should be ignored for method at the top of the stack.
171   bool skip_unwind_callback_;
172 
173   DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
174 };
175 
176 // Finds the appropriate exception catch after calling all method exit instrumentation functions.
177 // Note that this might change the exception being thrown. If is_method_exit_exception is true
178 // skip the method unwind call for the method on top of the stack as the exception was thrown by
179 // method exit callback.
FindCatch(ObjPtr<mirror::Throwable> exception,bool is_method_exit_exception)180 void QuickExceptionHandler::FindCatch(ObjPtr<mirror::Throwable> exception,
181                                       bool is_method_exit_exception) {
182   DCHECK(!is_deoptimization_);
183   instrumentation::Instrumentation* instr = Runtime::Current()->GetInstrumentation();
184   // The number of total frames we have so far popped.
185   uint32_t already_popped = 0;
186   bool popped_to_top = true;
187   StackHandleScope<1> hs(self_);
188   MutableHandle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
189   bool skip_top_unwind_callback = is_method_exit_exception;
190   // Sending the instrumentation events (done by the InstrumentationStackPopper) can cause new
191   // exceptions to be thrown which will override the current exception. Therefore we need to perform
192   // the search for a catch in a loop until we have successfully popped all the way to a catch or
193   // the top of the stack.
194   do {
195     if (kDebugExceptionDelivery) {
196       ObjPtr<mirror::String> msg = exception_ref->GetDetailMessage();
197       std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
198       self_->DumpStack(LOG_STREAM(INFO) << "Delivering exception: " << exception_ref->PrettyTypeOf()
199                                         << ": " << str_msg << "\n");
200     }
201 
202     // Walk the stack to find catch handler.
203     CatchBlockStackVisitor visitor(self_,
204                                    context_.get(),
205                                    &exception_ref,
206                                    this,
207                                    /*skip_frames=*/already_popped,
208                                    skip_top_unwind_callback);
209     visitor.WalkStack(true);
210     skip_top_unwind_callback = false;
211 
212     uint32_t new_pop_count = handler_frame_depth_;
213     DCHECK_GE(new_pop_count, already_popped);
214     already_popped = new_pop_count;
215 
216     if (kDebugExceptionDelivery) {
217       if (*handler_quick_frame_ == nullptr) {
218         LOG(INFO) << "Handler is upcall";
219       }
220       if (GetHandlerMethod() != nullptr) {
221         const DexFile* dex_file = GetHandlerMethod()->GetDexFile();
222         DCHECK(handler_dex_pc_list_.has_value());
223         DCHECK_GE(handler_dex_pc_list_->size(), 1u);
224         int line_number = annotations::GetLineNumFromPC(
225             dex_file, GetHandlerMethod(), handler_dex_pc_list_->front());
226 
227         // We may have an inlined method. If so, we can add some extra logging.
228         std::stringstream ss;
229         ArtMethod* maybe_inlined_method = visitor.GetMethod();
230         if (maybe_inlined_method != GetHandlerMethod()) {
231           const DexFile* inlined_dex_file = maybe_inlined_method->GetDexFile();
232           DCHECK_GE(handler_dex_pc_list_->size(), 2u);
233           int inlined_line_number = annotations::GetLineNumFromPC(
234               inlined_dex_file, maybe_inlined_method, handler_dex_pc_list_->back());
235           ss << " which ends up calling inlined method " << maybe_inlined_method->PrettyMethod()
236              << " (line: " << inlined_line_number << ")";
237         }
238 
239         LOG(INFO) << "Handler: " << GetHandlerMethod()->PrettyMethod() << " (line: "
240                   << line_number << ")" << ss.str();
241       }
242     }
243     // Exception was cleared as part of delivery.
244     DCHECK(!self_->IsExceptionPending());
245     // If the handler is in optimized code, we need to set the catch environment.
246     if (*handler_quick_frame_ != nullptr &&
247         handler_method_header_ != nullptr &&
248         handler_method_header_->IsOptimized()) {
249       SetCatchEnvironmentForOptimizedHandler(&visitor);
250     }
251     popped_to_top = instr->ProcessMethodUnwindCallbacks(self_,
252                                                         visitor.GetUnwoundMethods(),
253                                                         exception_ref);
254   } while (!popped_to_top);
255 
256   if (!clear_exception_) {
257     // Put exception back in root set with clear throw location.
258     self_->SetException(exception_ref.Get());
259   }
260 }
261 
ToVRegKind(DexRegisterLocation::Kind kind)262 static VRegKind ToVRegKind(DexRegisterLocation::Kind kind) {
263   // Slightly hacky since we cannot map DexRegisterLocationKind and VRegKind
264   // one to one. However, StackVisitor::GetVRegFromOptimizedCode only needs to
265   // distinguish between core/FPU registers and low/high bits on 64-bit.
266   switch (kind) {
267     case DexRegisterLocation::Kind::kConstant:
268     case DexRegisterLocation::Kind::kInStack:
269       // VRegKind is ignored.
270       return VRegKind::kUndefined;
271 
272     case DexRegisterLocation::Kind::kInRegister:
273       // Selects core register. For 64-bit registers, selects low 32 bits.
274       return VRegKind::kLongLoVReg;
275 
276     case DexRegisterLocation::Kind::kInRegisterHigh:
277       // Selects core register. For 64-bit registers, selects high 32 bits.
278       return VRegKind::kLongHiVReg;
279 
280     case DexRegisterLocation::Kind::kInFpuRegister:
281       // Selects FPU register. For 64-bit registers, selects low 32 bits.
282       return VRegKind::kDoubleLoVReg;
283 
284     case DexRegisterLocation::Kind::kInFpuRegisterHigh:
285       // Selects FPU register. For 64-bit registers, selects high 32 bits.
286       return VRegKind::kDoubleHiVReg;
287 
288     default:
289       LOG(FATAL) << "Unexpected vreg location " << kind;
290       UNREACHABLE();
291   }
292 }
293 
SetCatchEnvironmentForOptimizedHandler(StackVisitor * stack_visitor)294 void QuickExceptionHandler::SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor) {
295   DCHECK(!is_deoptimization_);
296   DCHECK(*handler_quick_frame_ != nullptr) << "Method should not be called on upcall exceptions";
297   DCHECK(GetHandlerMethod() != nullptr && handler_method_header_->IsOptimized());
298 
299   if (kDebugExceptionDelivery) {
300     self_->DumpStack(LOG_STREAM(INFO) << "Setting catch phis: ");
301   }
302 
303   CodeInfo code_info(handler_method_header_);
304 
305   // Find stack map of the catch block.
306   ArrayRef<const uint32_t> dex_pc_list = GetHandlerDexPcList();
307   DCHECK_GE(dex_pc_list.size(), 1u);
308   StackMap catch_stack_map = code_info.GetStackMapAt(GetCatchStackMapRow());
309   DCHECK(catch_stack_map.IsValid());
310   DCHECK_EQ(catch_stack_map.Row(), code_info.GetCatchStackMapForDexPc(dex_pc_list).Row());
311   const uint32_t catch_depth = dex_pc_list.size() - 1;
312   const size_t number_of_registers = stack_visitor->GetNumberOfRegisters(&code_info, catch_depth);
313   DexRegisterMap catch_vreg_map =
314       code_info.GetDexRegisterMapOf(catch_stack_map, /* first= */ 0, number_of_registers);
315 
316   if (!catch_vreg_map.HasAnyLiveDexRegisters()) {
317     return;
318   }
319 
320   // Find stack map of the throwing instruction.
321   StackMap throw_stack_map =
322       code_info.GetStackMapForNativePcOffset(stack_visitor->GetNativePcOffset());
323   DCHECK(throw_stack_map.IsValid());
324   const uint32_t throw_depth = stack_visitor->InlineDepth();
325   DCHECK_EQ(throw_depth, catch_depth);
326   DexRegisterMap throw_vreg_map =
327       code_info.GetDexRegisterMapOf(throw_stack_map, /* first= */ 0, number_of_registers);
328   DCHECK_EQ(throw_vreg_map.size(), catch_vreg_map.size());
329 
330   // First vreg that it is part of the catch's environment.
331   const size_t catch_vreg_start = catch_depth == 0
332     ? 0
333     : stack_visitor->GetNumberOfRegisters(&code_info, catch_depth - 1);
334 
335   // We don't need to copy anything in the parent's environment.
336   for (size_t vreg = 0; vreg < catch_vreg_start; ++vreg) {
337     DexRegisterLocation::Kind catch_location_kind = catch_vreg_map[vreg].GetKind();
338     DCHECK(catch_location_kind == DexRegisterLocation::Kind::kNone ||
339            catch_location_kind == DexRegisterLocation::Kind::kConstant ||
340            catch_location_kind == DexRegisterLocation::Kind::kInStack)
341         << "Unexpected catch_location_kind: " << catch_location_kind;
342   }
343 
344   // Copy values between the throw and the catch.
345   for (size_t vreg = catch_vreg_start; vreg < catch_vreg_map.size(); ++vreg) {
346     DexRegisterLocation::Kind catch_location_kind = catch_vreg_map[vreg].GetKind();
347     if (catch_location_kind == DexRegisterLocation::Kind::kNone) {
348       continue;
349     }
350 
351     // Consistency checks.
352     DCHECK_EQ(catch_location_kind, DexRegisterLocation::Kind::kInStack);
353     uint32_t vreg_value;
354     VRegKind vreg_kind = ToVRegKind(throw_vreg_map[vreg].GetKind());
355     DCHECK_NE(vreg_kind, kReferenceVReg)
356         << "The fast path in GetVReg doesn't expect a kReferenceVReg.";
357 
358     // Get vreg value from its current location.
359     bool get_vreg_success = stack_visitor->GetVReg(stack_visitor->GetMethod(),
360                                                    vreg,
361                                                    vreg_kind,
362                                                    &vreg_value,
363                                                    throw_vreg_map[vreg],
364                                                    /* need_full_register_list= */ true);
365     CHECK(get_vreg_success) << "VReg " << vreg << " was optimized out ("
366                             << "method=" << ArtMethod::PrettyMethod(stack_visitor->GetMethod())
367                             << ", dex_pc=" << stack_visitor->GetDexPc() << ", "
368                             << "native_pc_offset=" << stack_visitor->GetNativePcOffset() << ")";
369 
370     // Copy value to the catch phi's stack slot.
371     int32_t slot_offset = catch_vreg_map[vreg].GetStackOffsetInBytes();
372     ArtMethod** frame_top = stack_visitor->GetCurrentQuickFrame();
373     uint8_t* slot_address = reinterpret_cast<uint8_t*>(frame_top) + slot_offset;
374     uint32_t* slot_ptr = reinterpret_cast<uint32_t*>(slot_address);
375     *slot_ptr = vreg_value;
376   }
377 }
378 
379 // Prepares deoptimization.
380 class DeoptimizeStackVisitor final : public StackVisitor {
381  public:
DeoptimizeStackVisitor(Thread * self,Context * context,QuickExceptionHandler * exception_handler,bool single_frame,bool skip_method_exit_callbacks)382   DeoptimizeStackVisitor(Thread* self,
383                          Context* context,
384                          QuickExceptionHandler* exception_handler,
385                          bool single_frame,
386                          bool skip_method_exit_callbacks) REQUIRES_SHARED(Locks::mutator_lock_)
387       : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
388         exception_handler_(exception_handler),
389         prev_shadow_frame_(nullptr),
390         bottom_shadow_frame_(nullptr),
391         stacked_shadow_frame_pushed_(false),
392         single_frame_deopt_(single_frame),
393         single_frame_done_(false),
394         single_frame_deopt_method_(nullptr),
395         single_frame_deopt_quick_method_header_(nullptr),
396         callee_method_(nullptr),
397         skip_method_exit_callbacks_(skip_method_exit_callbacks) {}
398 
GetSingleFrameDeoptMethod() const399   ArtMethod* GetSingleFrameDeoptMethod() const {
400     return single_frame_deopt_method_;
401   }
402 
GetSingleFrameDeoptQuickMethodHeader() const403   const OatQuickMethodHeader* GetSingleFrameDeoptQuickMethodHeader() const {
404     return single_frame_deopt_quick_method_header_;
405   }
406 
GetBottomShadowFrame() const407   ShadowFrame* GetBottomShadowFrame() const {
408     return bottom_shadow_frame_;
409   }
410 
GetDexPcs() const411   const std::vector<uint32_t>& GetDexPcs() const {
412     return dex_pcs_;
413   }
414 
FinishStackWalk()415   void FinishStackWalk() REQUIRES_SHARED(Locks::mutator_lock_) {
416     // This is the upcall, or the next full frame in single-frame deopt, or the
417     // code isn't deoptimizeable. We remember the frame and last pc so that we
418     // may long jump to them.
419     exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
420     exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
421     exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
422     if (!stacked_shadow_frame_pushed_) {
423       // In case there is no deoptimized shadow frame for this upcall, we still
424       // need to push a nullptr to the stack since there is always a matching pop after
425       // the long jump.
426       GetThread()->PushStackedShadowFrame(nullptr,
427                                           StackedShadowFrameType::kDeoptimizationShadowFrame);
428       stacked_shadow_frame_pushed_ = true;
429     }
430     if (GetMethod() == nullptr) {
431       exception_handler_->SetFullFragmentDone(true);
432     } else {
433       CHECK(callee_method_ != nullptr) << GetMethod()->PrettyMethod(false);
434       exception_handler_->SetHandlerQuickArg0(reinterpret_cast<uintptr_t>(callee_method_));
435     }
436   }
437 
VisitFrame()438   bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
439     exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
440     ArtMethod* method = GetMethod();
441     VLOG(deopt) << "Deoptimizing stack: depth: " << GetFrameDepth()
442                 << " at method " << ArtMethod::PrettyMethod(method);
443 
444     if (method == nullptr || single_frame_done_) {
445       FinishStackWalk();
446       return false;  // End stack walk.
447     }
448 
449     // Update if method exit event needs to be reported. We should report exit event only if we
450     // have reported an entry event. So tell interpreter if/ an entry event was reported.
451     bool supports_exit_events = Runtime::Current()->GetInstrumentation()->MethodSupportsExitEvents(
452         method, GetCurrentOatQuickMethodHeader());
453 
454     if (method->IsRuntimeMethod()) {
455       // Ignore callee save method.
456       DCHECK(method->IsCalleeSaveMethod());
457       return true;
458     } else if (method->IsNative()) {
459       // If we return from JNI with a pending exception and want to deoptimize, we need to skip
460       // the native method. The top method is a runtime method, the native method comes next.
461       // We also deoptimize due to method instrumentation reasons from method exit callbacks.
462       // In these cases native method is at the top of stack.
463       CHECK((GetFrameDepth() == 1U) || (GetFrameDepth() == 0U));
464       // We see a native frame when:
465       // 1. returning from JNI with a pending exception
466       // 2. deopting from method exit callbacks (with or without a pending exception).
467       // skip_method_exit_callbacks_ is set in this case
468       // 3. handling async exception on suspend points for fast native methods.
469       // We only need to call method unwind event in the first case.
470       if (supports_exit_events &&
471           !skip_method_exit_callbacks_ &&
472           GetThread()->IsExceptionPending()) {
473         // An exception has occurred in a native method and we are deoptimizing past the native
474         // method. So report method unwind event here.
475         Runtime::Current()->GetInstrumentation()->MethodUnwindEvent(
476             GetThread(), method, dex::kDexNoIndex);
477       }
478       callee_method_ = method;
479       return true;
480     } else if (!single_frame_deopt_ &&
481                !Runtime::Current()->IsAsyncDeoptimizeable(GetOuterMethod(),
482                                                           GetCurrentQuickFramePc())) {
483       // We hit some code that's not deoptimizeable. However, Single-frame deoptimization triggered
484       // from compiled code is always allowed since HDeoptimize always saves the full environment.
485       LOG(WARNING) << "Got request to deoptimize un-deoptimizable method "
486                    << method->PrettyMethod();
487       FinishStackWalk();
488       return false;  // End stack walk.
489     } else {
490       // Check if a shadow frame already exists for debugger's set-local-value purpose.
491       const size_t frame_id = GetFrameId();
492       ShadowFrame* new_frame = GetThread()->FindDebuggerShadowFrame(frame_id);
493       const bool* updated_vregs;
494       CodeItemDataAccessor accessor(method->DexInstructionData());
495       const size_t num_regs = accessor.RegistersSize();
496       if (new_frame == nullptr) {
497         new_frame = ShadowFrame::CreateDeoptimizedFrame(num_regs, method, GetDexPc());
498         updated_vregs = nullptr;
499       } else {
500         updated_vregs = GetThread()->GetUpdatedVRegFlags(frame_id);
501         DCHECK(updated_vregs != nullptr);
502       }
503       if (GetCurrentOatQuickMethodHeader()->IsNterpMethodHeader()) {
504         HandleNterpDeoptimization(method, new_frame, updated_vregs);
505       } else {
506         HandleOptimizingDeoptimization(method, new_frame, updated_vregs);
507       }
508       new_frame->SetSkipMethodExitEvents(!supports_exit_events);
509       // If we are deoptimizing after method exit callback we shouldn't call the method exit
510       // callbacks again for the top frame. We may have to deopt after the callback if the callback
511       // either throws or performs other actions that require a deopt.
512       // We only need to skip for the top frame and the rest of the frames should still run the
513       // callbacks. So only do this check for the top frame.
514       if (GetFrameDepth() == 0U && skip_method_exit_callbacks_) {
515         new_frame->SetSkipMethodExitEvents(true);
516         // This exception was raised by method exit callbacks and we shouldn't report it to
517         // listeners for these exceptions.
518         if (GetThread()->IsExceptionPending()) {
519           new_frame->SetSkipNextExceptionEvent(true);
520         }
521       }
522       if (updated_vregs != nullptr) {
523         // Calling Thread::RemoveDebuggerShadowFrameMapping will also delete the updated_vregs
524         // array so this must come after we processed the frame.
525         GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
526         DCHECK(GetThread()->FindDebuggerShadowFrame(frame_id) == nullptr);
527       }
528       if (prev_shadow_frame_ != nullptr) {
529         prev_shadow_frame_->SetLink(new_frame);
530       } else {
531         // Will be popped after the long jump after DeoptimizeStack(),
532         // right before interpreter::EnterInterpreterFromDeoptimize().
533         stacked_shadow_frame_pushed_ = true;
534         bottom_shadow_frame_ = new_frame;
535         GetThread()->PushStackedShadowFrame(
536             new_frame, StackedShadowFrameType::kDeoptimizationShadowFrame);
537       }
538       prev_shadow_frame_ = new_frame;
539 
540       if (single_frame_deopt_) {
541         dex_pcs_.push_back(GetDexPc());
542         if (!IsInInlinedFrame()) {
543           // Single-frame deopt ends at the first non-inlined frame and needs to store that method.
544           single_frame_done_ = true;
545           single_frame_deopt_method_ = method;
546           single_frame_deopt_quick_method_header_ = GetCurrentOatQuickMethodHeader();
547         }
548       }
549       callee_method_ = method;
550       return true;
551     }
552   }
553 
554  private:
HandleNterpDeoptimization(ArtMethod * m,ShadowFrame * new_frame,const bool * updated_vregs)555   void HandleNterpDeoptimization(ArtMethod* m,
556                                  ShadowFrame* new_frame,
557                                  const bool* updated_vregs)
558       REQUIRES_SHARED(Locks::mutator_lock_) {
559     ArtMethod** cur_quick_frame = GetCurrentQuickFrame();
560     StackReference<mirror::Object>* vreg_ref_base =
561         reinterpret_cast<StackReference<mirror::Object>*>(NterpGetReferenceArray(cur_quick_frame));
562     int32_t* vreg_int_base =
563         reinterpret_cast<int32_t*>(NterpGetRegistersArray(cur_quick_frame));
564     CodeItemDataAccessor accessor(m->DexInstructionData());
565     const uint16_t num_regs = accessor.RegistersSize();
566     // An nterp frame has two arrays: a dex register array and a reference array
567     // that shadows the dex register array but only containing references
568     // (non-reference dex registers have nulls). See nterp_helpers.cc.
569     for (size_t reg = 0; reg < num_regs; ++reg) {
570       if (updated_vregs != nullptr && updated_vregs[reg]) {
571         // Keep the value set by debugger.
572         continue;
573       }
574       StackReference<mirror::Object>* ref_addr = vreg_ref_base + reg;
575       mirror::Object* ref = ref_addr->AsMirrorPtr();
576       if (ref != nullptr) {
577         new_frame->SetVRegReference(reg, ref);
578       } else {
579         new_frame->SetVReg(reg, vreg_int_base[reg]);
580       }
581     }
582   }
583 
HandleOptimizingDeoptimization(ArtMethod * m,ShadowFrame * new_frame,const bool * updated_vregs)584   void HandleOptimizingDeoptimization(ArtMethod* m,
585                                       ShadowFrame* new_frame,
586                                       const bool* updated_vregs)
587       REQUIRES_SHARED(Locks::mutator_lock_) {
588     const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
589     CodeInfo code_info(method_header);
590     uintptr_t native_pc_offset = method_header->NativeQuickPcOffset(GetCurrentQuickFramePc());
591     StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
592     CodeItemDataAccessor accessor(m->DexInstructionData());
593     const size_t number_of_vregs = accessor.RegistersSize();
594     uint32_t register_mask = code_info.GetRegisterMaskOf(stack_map);
595     BitMemoryRegion stack_mask = code_info.GetStackMaskOf(stack_map);
596     DexRegisterMap vreg_map = IsInInlinedFrame()
597         ? code_info.GetInlineDexRegisterMapOf(stack_map, GetCurrentInlinedFrame())
598         : code_info.GetDexRegisterMapOf(stack_map);
599 
600     if (kIsDebugBuild || UNLIKELY(Runtime::Current()->IsJavaDebuggable())) {
601       CHECK_EQ(vreg_map.size(), number_of_vregs) << *Thread::Current()
602                                                  << "Deopting: " << m->PrettyMethod()
603                                                  << " inlined? "
604                                                  << std::boolalpha << IsInInlinedFrame();
605     }
606     if (vreg_map.empty()) {
607       return;
608     }
609 
610     for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
611       if (updated_vregs != nullptr && updated_vregs[vreg]) {
612         // Keep the value set by debugger.
613         continue;
614       }
615 
616       DexRegisterLocation::Kind location = vreg_map[vreg].GetKind();
617       static constexpr uint32_t kDeadValue = 0xEBADDE09;
618       uint32_t value = kDeadValue;
619       bool is_reference = false;
620 
621       switch (location) {
622         case DexRegisterLocation::Kind::kInStack: {
623           const int32_t offset = vreg_map[vreg].GetStackOffsetInBytes();
624           const uint8_t* addr = reinterpret_cast<const uint8_t*>(GetCurrentQuickFrame()) + offset;
625           value = *reinterpret_cast<const uint32_t*>(addr);
626           uint32_t bit = (offset >> 2);
627           if (bit < stack_mask.size_in_bits() && stack_mask.LoadBit(bit)) {
628             is_reference = true;
629           }
630           break;
631         }
632         case DexRegisterLocation::Kind::kInRegister:
633         case DexRegisterLocation::Kind::kInRegisterHigh:
634         case DexRegisterLocation::Kind::kInFpuRegister:
635         case DexRegisterLocation::Kind::kInFpuRegisterHigh: {
636           uint32_t reg = vreg_map[vreg].GetMachineRegister();
637           bool result = GetRegisterIfAccessible(reg, location, &value);
638           CHECK(result);
639           if (location == DexRegisterLocation::Kind::kInRegister) {
640             if (((1u << reg) & register_mask) != 0) {
641               is_reference = true;
642             }
643           }
644           break;
645         }
646         case DexRegisterLocation::Kind::kConstant: {
647           value = vreg_map[vreg].GetConstant();
648           if (value == 0) {
649             // Make it a reference for extra safety.
650             is_reference = true;
651           }
652           break;
653         }
654         case DexRegisterLocation::Kind::kNone: {
655           break;
656         }
657         default: {
658           LOG(FATAL) << "Unexpected location kind " << vreg_map[vreg].GetKind();
659           UNREACHABLE();
660         }
661       }
662       if (is_reference) {
663         new_frame->SetVRegReference(vreg, reinterpret_cast<mirror::Object*>(value));
664       } else {
665         new_frame->SetVReg(vreg, value);
666       }
667     }
668   }
669 
GetVRegKind(uint16_t reg,const std::vector<int32_t> & kinds)670   static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
671     return static_cast<VRegKind>(kinds[reg * 2]);
672   }
673 
674   QuickExceptionHandler* const exception_handler_;
675   ShadowFrame* prev_shadow_frame_;
676   ShadowFrame* bottom_shadow_frame_;
677   bool stacked_shadow_frame_pushed_;
678   const bool single_frame_deopt_;
679   bool single_frame_done_;
680   ArtMethod* single_frame_deopt_method_;
681   const OatQuickMethodHeader* single_frame_deopt_quick_method_header_;
682   ArtMethod* callee_method_;
683   // This specifies if method exit callbacks should be skipped for the top frame. We may request
684   // a deopt after running method exit callbacks if the callback throws or requests events that
685   // need a deopt.
686   bool skip_method_exit_callbacks_;
687   std::vector<uint32_t> dex_pcs_;
688 
689   DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
690 };
691 
PrepareForLongJumpToInvokeStubOrInterpreterBridge()692 void QuickExceptionHandler::PrepareForLongJumpToInvokeStubOrInterpreterBridge() {
693   if (full_fragment_done_) {
694     // Restore deoptimization exception. When returning from the invoke stub,
695     // ArtMethod::Invoke() will see the special exception to know deoptimization
696     // is needed.
697     self_->SetException(Thread::GetDeoptimizationException());
698   } else {
699     // PC needs to be of the quick-to-interpreter bridge.
700     int32_t offset;
701     offset = GetThreadOffset<kRuntimePointerSize>(kQuickQuickToInterpreterBridge).Int32Value();
702     handler_quick_frame_pc_ = *reinterpret_cast<uintptr_t*>(
703         reinterpret_cast<uint8_t*>(self_) + offset);
704   }
705 }
706 
DeoptimizeStack(bool skip_method_exit_callbacks)707 void QuickExceptionHandler::DeoptimizeStack(bool skip_method_exit_callbacks) {
708   DCHECK(is_deoptimization_);
709   if (kDebugExceptionDelivery) {
710     self_->DumpStack(LOG_STREAM(INFO) << "Deoptimizing: ");
711   }
712 
713   DeoptimizeStackVisitor visitor(self_, context_.get(), this, false, skip_method_exit_callbacks);
714   visitor.WalkStack(true);
715   PrepareForLongJumpToInvokeStubOrInterpreterBridge();
716 }
717 
DeoptimizeSingleFrame(DeoptimizationKind kind)718 void QuickExceptionHandler::DeoptimizeSingleFrame(DeoptimizationKind kind) {
719   DCHECK(is_deoptimization_);
720 
721   // This deopt is requested while still executing the method. We haven't run method exit callbacks
722   // yet, so don't skip them.
723   DeoptimizeStackVisitor visitor(
724       self_, context_.get(), this, true, /* skip_method_exit_callbacks= */ false);
725   visitor.WalkStack(true);
726 
727   // Compiled code made an explicit deoptimization.
728   ArtMethod* deopt_method = visitor.GetSingleFrameDeoptMethod();
729   SCOPED_TRACE << "Deoptimizing "
730                <<  deopt_method->PrettyMethod()
731                << ": " << GetDeoptimizationKindName(kind);
732 
733   DCHECK(deopt_method != nullptr);
734   if (VLOG_IS_ON(deopt) || kDebugExceptionDelivery) {
735     LOG(INFO) << "Single-frame deopting: "
736               << deopt_method->PrettyMethod()
737               << " due to "
738               << GetDeoptimizationKindName(kind);
739     DumpFramesWithType(self_, /* details= */ true);
740   }
741   // When deoptimizing for debug support the optimized code is still valid and
742   // can be reused when debugging support (like breakpoints) are no longer
743   // needed fot this method.
744   Runtime* runtime = Runtime::Current();
745   if (runtime->UseJitCompilation() && (kind != DeoptimizationKind::kDebugging)) {
746     runtime->GetJit()->GetCodeCache()->InvalidateCompiledCodeFor(
747         deopt_method, visitor.GetSingleFrameDeoptQuickMethodHeader());
748   } else {
749     runtime->GetInstrumentation()->ReinitializeMethodsCode(deopt_method);
750   }
751 
752   // If the deoptimization is due to an inline cache, update it with the type
753   // that made us deoptimize. This avoids pathological cases of never seeing
754   // that type while executing baseline generated code.
755   if (kind == DeoptimizationKind::kJitInlineCache || kind == DeoptimizationKind::kJitSameTarget) {
756     DCHECK(runtime->UseJitCompilation());
757     ShadowFrame* shadow_frame = visitor.GetBottomShadowFrame();
758     uint32_t dex_pc = shadow_frame->GetDexPC();
759     CodeItemDataAccessor accessor(shadow_frame->GetMethod()->DexInstructionData());
760     const uint16_t* const insns = accessor.Insns();
761     const Instruction* inst = Instruction::At(insns + dex_pc);
762     switch (inst->Opcode()) {
763       case Instruction::INVOKE_INTERFACE:
764       case Instruction::INVOKE_VIRTUAL:
765       case Instruction::INVOKE_INTERFACE_RANGE:
766       case Instruction::INVOKE_VIRTUAL_RANGE: {
767         uint32_t encoded_dex_pc = InlineCache::EncodeDexPc(
768             visitor.GetSingleFrameDeoptMethod(),
769             visitor.GetDexPcs(),
770             runtime->GetJit()->GetJitCompiler()->GetInlineMaxCodeUnits());
771         if (encoded_dex_pc != static_cast<uint32_t>(-1)) {
772           // The inline cache comes from the top-level method.
773           runtime->GetJit()->GetCodeCache()->MaybeUpdateInlineCache(
774               visitor.GetSingleFrameDeoptMethod(),
775               encoded_dex_pc,
776               shadow_frame->GetVRegReference(inst->VRegC())->GetClass(),
777               self_);
778         } else {
779           // If the top-level inline cache did not exist, update the one for the
780           // bottom method, we know it's the one that was used for compilation.
781           runtime->GetJit()->GetCodeCache()->MaybeUpdateInlineCache(
782               shadow_frame->GetMethod(),
783               dex_pc,
784               shadow_frame->GetVRegReference(inst->VRegC())->GetClass(),
785               self_);
786         }
787         break;
788       }
789       default: {
790         LOG(FATAL) << "Unexpected instruction for inline cache: " << inst->Name();
791       }
792     }
793   }
794 
795   PrepareForLongJumpToInvokeStubOrInterpreterBridge();
796 }
797 
DeoptimizePartialFragmentFixup()798 void QuickExceptionHandler::DeoptimizePartialFragmentFixup() {
799   CHECK(handler_quick_frame_ != nullptr);
800   // Architecture-dependent work. This is to get the LR right for x86 and x86-64.
801   if (kRuntimeQuickCodeISA == InstructionSet::kX86 ||
802       kRuntimeQuickCodeISA == InstructionSet::kX86_64) {
803     // On x86, the return address is on the stack, so just reuse it. Otherwise we would have to
804     // change how longjump works.
805     handler_quick_frame_ = reinterpret_cast<ArtMethod**>(
806         reinterpret_cast<uintptr_t>(handler_quick_frame_) - sizeof(void*));
807   }
808 }
809 
PrepareLongJump(bool smash_caller_saves)810 std::unique_ptr<Context> QuickExceptionHandler::PrepareLongJump(bool smash_caller_saves) {
811   // Prepare and return the context.
812   context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
813   CHECK_NE(handler_quick_frame_pc_, 0u);
814   context_->SetPC(handler_quick_frame_pc_);
815   context_->SetArg0(handler_quick_arg0_);
816   if (smash_caller_saves) {
817     context_->SmashCallerSaves();
818   }
819   if (!is_deoptimization_ &&
820       handler_method_header_ != nullptr &&
821       handler_method_header_->IsNterpMethodHeader()) {
822     // Interpreter procceses one method at a time i.e. not inlining
823     DCHECK(handler_dex_pc_list_.has_value());
824     DCHECK_EQ(handler_dex_pc_list_->size(), 1u) << "We shouldn't have any inlined frames.";
825     context_->SetNterpDexPC(reinterpret_cast<uintptr_t>(
826         GetHandlerMethod()->DexInstructions().Insns() + handler_dex_pc_list_->front()));
827   }
828   // Clear the dex_pc list so as not to leak memory.
829   handler_dex_pc_list_.reset();
830   return std::move(context_);
831 }
832 
DumpFramesWithType(Thread * self,bool details)833 void QuickExceptionHandler::DumpFramesWithType(Thread* self, bool details) {
834   StackVisitor::WalkStack(
835       [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
836         ArtMethod* method = stack_visitor->GetMethod();
837         if (details) {
838           LOG(INFO) << "|> pc   = " << std::hex << stack_visitor->GetCurrentQuickFramePc();
839           LOG(INFO) << "|> addr = " << std::hex
840               << reinterpret_cast<uintptr_t>(stack_visitor->GetCurrentQuickFrame());
841           if (stack_visitor->GetCurrentQuickFrame() != nullptr && method != nullptr) {
842             LOG(INFO) << "|> ret  = " << std::hex << stack_visitor->GetReturnPc();
843           }
844         }
845         if (method == nullptr) {
846           // Transition, do go on, we want to unwind over bridges, all the way.
847           if (details) {
848             LOG(INFO) << "N  <transition>";
849           }
850           return true;
851         } else if (method->IsRuntimeMethod()) {
852           if (details) {
853             LOG(INFO) << "R  " << method->PrettyMethod(true);
854           }
855           return true;
856         } else {
857           bool is_shadow = stack_visitor->GetCurrentShadowFrame() != nullptr;
858           LOG(INFO) << (is_shadow ? "S" : "Q")
859                     << ((!is_shadow && stack_visitor->IsInInlinedFrame()) ? "i" : " ")
860                     << " "
861                     << method->PrettyMethod(true);
862           return true;  // Go on.
863         }
864       },
865       self,
866       /* context= */ nullptr,
867       art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
868 }
869 
870 }  // namespace art
871