1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "quick_exception_handler.h"
18
19 #include "arch/context.h"
20 #include "art_method-inl.h"
21 #include "base/enums.h"
22 #include "base/logging.h" // For VLOG_IS_ON.
23 #include "base/systrace.h"
24 #include "dex/dex_file_types.h"
25 #include "dex/dex_instruction.h"
26 #include "entrypoints/entrypoint_utils.h"
27 #include "entrypoints/quick/quick_entrypoints_enum.h"
28 #include "entrypoints/runtime_asm_entrypoints.h"
29 #include "handle_scope-inl.h"
30 #include "interpreter/shadow_frame-inl.h"
31 #include "jit/jit.h"
32 #include "jit/jit_code_cache.h"
33 #include "mirror/class-inl.h"
34 #include "mirror/class_loader.h"
35 #include "mirror/throwable.h"
36 #include "oat_quick_method_header.h"
37 #include "stack.h"
38 #include "stack_map.h"
39
40 namespace art {
41
42 static constexpr bool kDebugExceptionDelivery = false;
43 static constexpr size_t kInvalidFrameDepth = 0xffffffff;
44
QuickExceptionHandler(Thread * self,bool is_deoptimization)45 QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
46 : self_(self),
47 context_(self->GetLongJumpContext()),
48 is_deoptimization_(is_deoptimization),
49 method_tracing_active_(is_deoptimization ||
50 Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()),
51 handler_quick_frame_(nullptr),
52 handler_quick_frame_pc_(0),
53 handler_method_header_(nullptr),
54 handler_quick_arg0_(0),
55 handler_method_(nullptr),
56 handler_dex_pc_(0),
57 clear_exception_(false),
58 handler_frame_depth_(kInvalidFrameDepth),
59 full_fragment_done_(false) {}
60
61 // Finds catch handler.
62 class CatchBlockStackVisitor final : public StackVisitor {
63 public:
CatchBlockStackVisitor(Thread * self,Context * context,Handle<mirror::Throwable> * exception,QuickExceptionHandler * exception_handler,uint32_t skip_frames)64 CatchBlockStackVisitor(Thread* self,
65 Context* context,
66 Handle<mirror::Throwable>* exception,
67 QuickExceptionHandler* exception_handler,
68 uint32_t skip_frames)
69 REQUIRES_SHARED(Locks::mutator_lock_)
70 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
71 exception_(exception),
72 exception_handler_(exception_handler),
73 skip_frames_(skip_frames) {
74 }
75
VisitFrame()76 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
77 ArtMethod* method = GetMethod();
78 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
79 if (method == nullptr) {
80 DCHECK_EQ(skip_frames_, 0u)
81 << "We tried to skip an upcall! We should have returned to the upcall to finish delivery";
82 // This is the upcall, we remember the frame and last pc so that we may long jump to them.
83 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
84 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
85 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
86 uint32_t next_dex_pc;
87 ArtMethod* next_art_method;
88 bool has_next = GetNextMethodAndDexPc(&next_art_method, &next_dex_pc);
89 // Report the method that did the down call as the handler.
90 exception_handler_->SetHandlerDexPc(next_dex_pc);
91 exception_handler_->SetHandlerMethod(next_art_method);
92 if (!has_next) {
93 // No next method? Check exception handler is set up for the unhandled exception handler
94 // case.
95 DCHECK_EQ(0U, exception_handler_->GetHandlerDexPc());
96 DCHECK(nullptr == exception_handler_->GetHandlerMethod());
97 }
98 return false; // End stack walk.
99 }
100 if (skip_frames_ != 0) {
101 skip_frames_--;
102 return true;
103 }
104 if (method->IsRuntimeMethod()) {
105 // Ignore callee save method.
106 DCHECK(method->IsCalleeSaveMethod());
107 return true;
108 }
109 return HandleTryItems(method);
110 }
111
112 private:
HandleTryItems(ArtMethod * method)113 bool HandleTryItems(ArtMethod* method)
114 REQUIRES_SHARED(Locks::mutator_lock_) {
115 uint32_t dex_pc = dex::kDexNoIndex;
116 if (!method->IsNative()) {
117 dex_pc = GetDexPc();
118 }
119 if (dex_pc != dex::kDexNoIndex) {
120 bool clear_exception = false;
121 StackHandleScope<1> hs(GetThread());
122 Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
123 uint32_t found_dex_pc = method->FindCatchBlock(to_find, dex_pc, &clear_exception);
124 exception_handler_->SetClearException(clear_exception);
125 if (found_dex_pc != dex::kDexNoIndex) {
126 exception_handler_->SetHandlerMethod(method);
127 exception_handler_->SetHandlerDexPc(found_dex_pc);
128 exception_handler_->SetHandlerQuickFramePc(
129 GetCurrentOatQuickMethodHeader()->ToNativeQuickPc(
130 method, found_dex_pc, /* is_for_catch_handler= */ true));
131 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
132 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
133 return false; // End stack walk.
134 } else if (UNLIKELY(GetThread()->HasDebuggerShadowFrames())) {
135 // We are going to unwind this frame. Did we prepare a shadow frame for debugging?
136 size_t frame_id = GetFrameId();
137 ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(frame_id);
138 if (frame != nullptr) {
139 // We will not execute this shadow frame so we can safely deallocate it.
140 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
141 ShadowFrame::DeleteDeoptimizedFrame(frame);
142 }
143 }
144 }
145 return true; // Continue stack walk.
146 }
147
148 // The exception we're looking for the catch block of.
149 Handle<mirror::Throwable>* exception_;
150 // The quick exception handler we're visiting for.
151 QuickExceptionHandler* const exception_handler_;
152 // The number of frames to skip searching for catches in.
153 uint32_t skip_frames_;
154
155 DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
156 };
157
GetInstrumentationFramesToPop(Thread * self,size_t frame_depth)158 static size_t GetInstrumentationFramesToPop(Thread* self, size_t frame_depth)
159 REQUIRES_SHARED(Locks::mutator_lock_) {
160 CHECK_NE(frame_depth, kInvalidFrameDepth);
161 size_t instrumentation_frames_to_pop = 0;
162 StackVisitor::WalkStack(
163 [&](art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
164 size_t current_frame_depth = stack_visitor->GetFrameDepth();
165 if (current_frame_depth < frame_depth) {
166 CHECK(stack_visitor->GetMethod() != nullptr);
167 if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) ==
168 stack_visitor->GetReturnPc())) {
169 if (!stack_visitor->IsInInlinedFrame()) {
170 // We do not count inlined frames, because we do not instrument them. The reason we
171 // include them in the stack walking is the check against `frame_depth_`, which is
172 // given to us by a visitor that visits inlined frames.
173 ++instrumentation_frames_to_pop;
174 }
175 }
176 return true;
177 }
178 // We reached the frame of the catch handler or the upcall.
179 return false;
180 },
181 self,
182 /* context= */ nullptr,
183 art::StackVisitor::StackWalkKind::kIncludeInlinedFrames,
184 /* check_suspended */ true,
185 /* include_transitions */ true);
186 return instrumentation_frames_to_pop;
187 }
188
189 // Finds the appropriate exception catch after calling all method exit instrumentation functions.
190 // Note that this might change the exception being thrown.
FindCatch(ObjPtr<mirror::Throwable> exception)191 void QuickExceptionHandler::FindCatch(ObjPtr<mirror::Throwable> exception) {
192 DCHECK(!is_deoptimization_);
193 instrumentation::InstrumentationStackPopper popper(self_);
194 // The number of total frames we have so far popped.
195 uint32_t already_popped = 0;
196 bool popped_to_top = true;
197 StackHandleScope<1> hs(self_);
198 MutableHandle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
199 // Sending the instrumentation events (done by the InstrumentationStackPopper) can cause new
200 // exceptions to be thrown which will override the current exception. Therefore we need to perform
201 // the search for a catch in a loop until we have successfully popped all the way to a catch or
202 // the top of the stack.
203 do {
204 if (kDebugExceptionDelivery) {
205 ObjPtr<mirror::String> msg = exception_ref->GetDetailMessage();
206 std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
207 self_->DumpStack(LOG_STREAM(INFO) << "Delivering exception: " << exception_ref->PrettyTypeOf()
208 << ": " << str_msg << "\n");
209 }
210
211 // Walk the stack to find catch handler.
212 CatchBlockStackVisitor visitor(self_, context_,
213 &exception_ref,
214 this,
215 /*skip_frames=*/already_popped);
216 visitor.WalkStack(true);
217 uint32_t new_pop_count = handler_frame_depth_;
218 DCHECK_GE(new_pop_count, already_popped);
219 already_popped = new_pop_count;
220
221 // Figure out how many of those frames have instrumentation we need to remove (Should be the
222 // exact same as number of new_pop_count if there aren't inlined frames).
223 size_t instrumentation_frames_to_pop =
224 GetInstrumentationFramesToPop(self_, handler_frame_depth_);
225
226 if (kDebugExceptionDelivery) {
227 if (*handler_quick_frame_ == nullptr) {
228 LOG(INFO) << "Handler is upcall";
229 }
230 if (handler_method_ != nullptr) {
231 const DexFile* dex_file = handler_method_->GetDexFile();
232 int line_number = annotations::GetLineNumFromPC(dex_file, handler_method_, handler_dex_pc_);
233 LOG(INFO) << "Handler: " << handler_method_->PrettyMethod() << " (line: "
234 << line_number << ")";
235 }
236 LOG(INFO) << "Will attempt to pop " << instrumentation_frames_to_pop
237 << " off of the instrumentation stack";
238 }
239 // Exception was cleared as part of delivery.
240 DCHECK(!self_->IsExceptionPending());
241 // If the handler is in optimized code, we need to set the catch environment.
242 if (*handler_quick_frame_ != nullptr &&
243 handler_method_header_ != nullptr &&
244 handler_method_header_->IsOptimized()) {
245 SetCatchEnvironmentForOptimizedHandler(&visitor);
246 }
247 popped_to_top = popper.PopFramesTo(instrumentation_frames_to_pop, exception_ref);
248 } while (!popped_to_top);
249 if (!clear_exception_) {
250 // Put exception back in root set with clear throw location.
251 self_->SetException(exception_ref.Get());
252 }
253 }
254
ToVRegKind(DexRegisterLocation::Kind kind)255 static VRegKind ToVRegKind(DexRegisterLocation::Kind kind) {
256 // Slightly hacky since we cannot map DexRegisterLocationKind and VRegKind
257 // one to one. However, StackVisitor::GetVRegFromOptimizedCode only needs to
258 // distinguish between core/FPU registers and low/high bits on 64-bit.
259 switch (kind) {
260 case DexRegisterLocation::Kind::kConstant:
261 case DexRegisterLocation::Kind::kInStack:
262 // VRegKind is ignored.
263 return VRegKind::kUndefined;
264
265 case DexRegisterLocation::Kind::kInRegister:
266 // Selects core register. For 64-bit registers, selects low 32 bits.
267 return VRegKind::kLongLoVReg;
268
269 case DexRegisterLocation::Kind::kInRegisterHigh:
270 // Selects core register. For 64-bit registers, selects high 32 bits.
271 return VRegKind::kLongHiVReg;
272
273 case DexRegisterLocation::Kind::kInFpuRegister:
274 // Selects FPU register. For 64-bit registers, selects low 32 bits.
275 return VRegKind::kDoubleLoVReg;
276
277 case DexRegisterLocation::Kind::kInFpuRegisterHigh:
278 // Selects FPU register. For 64-bit registers, selects high 32 bits.
279 return VRegKind::kDoubleHiVReg;
280
281 default:
282 LOG(FATAL) << "Unexpected vreg location " << kind;
283 UNREACHABLE();
284 }
285 }
286
SetCatchEnvironmentForOptimizedHandler(StackVisitor * stack_visitor)287 void QuickExceptionHandler::SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor) {
288 DCHECK(!is_deoptimization_);
289 DCHECK(*handler_quick_frame_ != nullptr) << "Method should not be called on upcall exceptions";
290 DCHECK(handler_method_ != nullptr && handler_method_header_->IsOptimized());
291
292 if (kDebugExceptionDelivery) {
293 self_->DumpStack(LOG_STREAM(INFO) << "Setting catch phis: ");
294 }
295
296 CodeItemDataAccessor accessor(handler_method_->DexInstructionData());
297 const size_t number_of_vregs = accessor.RegistersSize();
298 CodeInfo code_info(handler_method_header_);
299
300 // Find stack map of the catch block.
301 StackMap catch_stack_map = code_info.GetCatchStackMapForDexPc(GetHandlerDexPc());
302 DCHECK(catch_stack_map.IsValid());
303 DexRegisterMap catch_vreg_map = code_info.GetDexRegisterMapOf(catch_stack_map);
304 if (!catch_vreg_map.HasAnyLiveDexRegisters()) {
305 return;
306 }
307 DCHECK_EQ(catch_vreg_map.size(), number_of_vregs);
308
309 // Find stack map of the throwing instruction.
310 StackMap throw_stack_map =
311 code_info.GetStackMapForNativePcOffset(stack_visitor->GetNativePcOffset());
312 DCHECK(throw_stack_map.IsValid());
313 DexRegisterMap throw_vreg_map = code_info.GetDexRegisterMapOf(throw_stack_map);
314 DCHECK_EQ(throw_vreg_map.size(), number_of_vregs);
315
316 // Copy values between them.
317 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
318 DexRegisterLocation::Kind catch_location = catch_vreg_map[vreg].GetKind();
319 if (catch_location == DexRegisterLocation::Kind::kNone) {
320 continue;
321 }
322 DCHECK(catch_location == DexRegisterLocation::Kind::kInStack);
323
324 // Get vreg value from its current location.
325 uint32_t vreg_value;
326 VRegKind vreg_kind = ToVRegKind(throw_vreg_map[vreg].GetKind());
327 bool get_vreg_success = stack_visitor->GetVReg(stack_visitor->GetMethod(),
328 vreg,
329 vreg_kind,
330 &vreg_value);
331 CHECK(get_vreg_success) << "VReg " << vreg << " was optimized out ("
332 << "method=" << ArtMethod::PrettyMethod(stack_visitor->GetMethod())
333 << ", dex_pc=" << stack_visitor->GetDexPc() << ", "
334 << "native_pc_offset=" << stack_visitor->GetNativePcOffset() << ")";
335
336 // Copy value to the catch phi's stack slot.
337 int32_t slot_offset = catch_vreg_map[vreg].GetStackOffsetInBytes();
338 ArtMethod** frame_top = stack_visitor->GetCurrentQuickFrame();
339 uint8_t* slot_address = reinterpret_cast<uint8_t*>(frame_top) + slot_offset;
340 uint32_t* slot_ptr = reinterpret_cast<uint32_t*>(slot_address);
341 *slot_ptr = vreg_value;
342 }
343 }
344
345 // Prepares deoptimization.
346 class DeoptimizeStackVisitor final : public StackVisitor {
347 public:
DeoptimizeStackVisitor(Thread * self,Context * context,QuickExceptionHandler * exception_handler,bool single_frame)348 DeoptimizeStackVisitor(Thread* self,
349 Context* context,
350 QuickExceptionHandler* exception_handler,
351 bool single_frame)
352 REQUIRES_SHARED(Locks::mutator_lock_)
353 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
354 exception_handler_(exception_handler),
355 prev_shadow_frame_(nullptr),
356 stacked_shadow_frame_pushed_(false),
357 single_frame_deopt_(single_frame),
358 single_frame_done_(false),
359 single_frame_deopt_method_(nullptr),
360 single_frame_deopt_quick_method_header_(nullptr),
361 callee_method_(nullptr) {
362 }
363
GetSingleFrameDeoptMethod() const364 ArtMethod* GetSingleFrameDeoptMethod() const {
365 return single_frame_deopt_method_;
366 }
367
GetSingleFrameDeoptQuickMethodHeader() const368 const OatQuickMethodHeader* GetSingleFrameDeoptQuickMethodHeader() const {
369 return single_frame_deopt_quick_method_header_;
370 }
371
FinishStackWalk()372 void FinishStackWalk() REQUIRES_SHARED(Locks::mutator_lock_) {
373 // This is the upcall, or the next full frame in single-frame deopt, or the
374 // code isn't deoptimizeable. We remember the frame and last pc so that we
375 // may long jump to them.
376 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
377 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
378 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
379 if (!stacked_shadow_frame_pushed_) {
380 // In case there is no deoptimized shadow frame for this upcall, we still
381 // need to push a nullptr to the stack since there is always a matching pop after
382 // the long jump.
383 GetThread()->PushStackedShadowFrame(nullptr,
384 StackedShadowFrameType::kDeoptimizationShadowFrame);
385 stacked_shadow_frame_pushed_ = true;
386 }
387 if (GetMethod() == nullptr) {
388 exception_handler_->SetFullFragmentDone(true);
389 } else {
390 CHECK(callee_method_ != nullptr) << GetMethod()->PrettyMethod(false);
391 exception_handler_->SetHandlerQuickArg0(reinterpret_cast<uintptr_t>(callee_method_));
392 }
393 }
394
VisitFrame()395 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
396 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
397 ArtMethod* method = GetMethod();
398 VLOG(deopt) << "Deoptimizing stack: depth: " << GetFrameDepth()
399 << " at method " << ArtMethod::PrettyMethod(method);
400 if (method == nullptr || single_frame_done_) {
401 FinishStackWalk();
402 return false; // End stack walk.
403 } else if (method->IsRuntimeMethod()) {
404 // Ignore callee save method.
405 DCHECK(method->IsCalleeSaveMethod());
406 return true;
407 } else if (method->IsNative()) {
408 // If we return from JNI with a pending exception and want to deoptimize, we need to skip
409 // the native method.
410 // The top method is a runtime method, the native method comes next.
411 CHECK_EQ(GetFrameDepth(), 1U);
412 callee_method_ = method;
413 return true;
414 } else if (!single_frame_deopt_ &&
415 !Runtime::Current()->IsAsyncDeoptimizeable(GetCurrentQuickFramePc())) {
416 // We hit some code that's not deoptimizeable. However, Single-frame deoptimization triggered
417 // from compiled code is always allowed since HDeoptimize always saves the full environment.
418 LOG(WARNING) << "Got request to deoptimize un-deoptimizable method "
419 << method->PrettyMethod();
420 FinishStackWalk();
421 return false; // End stack walk.
422 } else {
423 // Check if a shadow frame already exists for debugger's set-local-value purpose.
424 const size_t frame_id = GetFrameId();
425 ShadowFrame* new_frame = GetThread()->FindDebuggerShadowFrame(frame_id);
426 const bool* updated_vregs;
427 CodeItemDataAccessor accessor(method->DexInstructionData());
428 const size_t num_regs = accessor.RegistersSize();
429 if (new_frame == nullptr) {
430 new_frame = ShadowFrame::CreateDeoptimizedFrame(num_regs, nullptr, method, GetDexPc());
431 updated_vregs = nullptr;
432 } else {
433 updated_vregs = GetThread()->GetUpdatedVRegFlags(frame_id);
434 DCHECK(updated_vregs != nullptr);
435 }
436 HandleOptimizingDeoptimization(method, new_frame, updated_vregs);
437 if (updated_vregs != nullptr) {
438 // Calling Thread::RemoveDebuggerShadowFrameMapping will also delete the updated_vregs
439 // array so this must come after we processed the frame.
440 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
441 DCHECK(GetThread()->FindDebuggerShadowFrame(frame_id) == nullptr);
442 }
443 if (prev_shadow_frame_ != nullptr) {
444 prev_shadow_frame_->SetLink(new_frame);
445 } else {
446 // Will be popped after the long jump after DeoptimizeStack(),
447 // right before interpreter::EnterInterpreterFromDeoptimize().
448 stacked_shadow_frame_pushed_ = true;
449 GetThread()->PushStackedShadowFrame(
450 new_frame, StackedShadowFrameType::kDeoptimizationShadowFrame);
451 }
452 prev_shadow_frame_ = new_frame;
453
454 if (single_frame_deopt_ && !IsInInlinedFrame()) {
455 // Single-frame deopt ends at the first non-inlined frame and needs to store that method.
456 single_frame_done_ = true;
457 single_frame_deopt_method_ = method;
458 single_frame_deopt_quick_method_header_ = GetCurrentOatQuickMethodHeader();
459 }
460 callee_method_ = method;
461 return true;
462 }
463 }
464
465 private:
HandleOptimizingDeoptimization(ArtMethod * m,ShadowFrame * new_frame,const bool * updated_vregs)466 void HandleOptimizingDeoptimization(ArtMethod* m,
467 ShadowFrame* new_frame,
468 const bool* updated_vregs)
469 REQUIRES_SHARED(Locks::mutator_lock_) {
470 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
471 CodeInfo code_info(method_header);
472 uintptr_t native_pc_offset = method_header->NativeQuickPcOffset(GetCurrentQuickFramePc());
473 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
474 CodeItemDataAccessor accessor(m->DexInstructionData());
475 const size_t number_of_vregs = accessor.RegistersSize();
476 uint32_t register_mask = code_info.GetRegisterMaskOf(stack_map);
477 BitMemoryRegion stack_mask = code_info.GetStackMaskOf(stack_map);
478 DexRegisterMap vreg_map = IsInInlinedFrame()
479 ? code_info.GetInlineDexRegisterMapOf(stack_map, GetCurrentInlinedFrame())
480 : code_info.GetDexRegisterMapOf(stack_map);
481 if (vreg_map.empty()) {
482 return;
483 }
484 DCHECK_EQ(vreg_map.size(), number_of_vregs);
485
486 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
487 if (updated_vregs != nullptr && updated_vregs[vreg]) {
488 // Keep the value set by debugger.
489 continue;
490 }
491
492 DexRegisterLocation::Kind location = vreg_map[vreg].GetKind();
493 static constexpr uint32_t kDeadValue = 0xEBADDE09;
494 uint32_t value = kDeadValue;
495 bool is_reference = false;
496
497 switch (location) {
498 case DexRegisterLocation::Kind::kInStack: {
499 const int32_t offset = vreg_map[vreg].GetStackOffsetInBytes();
500 const uint8_t* addr = reinterpret_cast<const uint8_t*>(GetCurrentQuickFrame()) + offset;
501 value = *reinterpret_cast<const uint32_t*>(addr);
502 uint32_t bit = (offset >> 2);
503 if (bit < stack_mask.size_in_bits() && stack_mask.LoadBit(bit)) {
504 is_reference = true;
505 }
506 break;
507 }
508 case DexRegisterLocation::Kind::kInRegister:
509 case DexRegisterLocation::Kind::kInRegisterHigh:
510 case DexRegisterLocation::Kind::kInFpuRegister:
511 case DexRegisterLocation::Kind::kInFpuRegisterHigh: {
512 uint32_t reg = vreg_map[vreg].GetMachineRegister();
513 bool result = GetRegisterIfAccessible(reg, ToVRegKind(location), &value);
514 CHECK(result);
515 if (location == DexRegisterLocation::Kind::kInRegister) {
516 if (((1u << reg) & register_mask) != 0) {
517 is_reference = true;
518 }
519 }
520 break;
521 }
522 case DexRegisterLocation::Kind::kConstant: {
523 value = vreg_map[vreg].GetConstant();
524 if (value == 0) {
525 // Make it a reference for extra safety.
526 is_reference = true;
527 }
528 break;
529 }
530 case DexRegisterLocation::Kind::kNone: {
531 break;
532 }
533 default: {
534 LOG(FATAL) << "Unexpected location kind " << vreg_map[vreg].GetKind();
535 UNREACHABLE();
536 }
537 }
538 if (is_reference) {
539 new_frame->SetVRegReference(vreg, reinterpret_cast<mirror::Object*>(value));
540 } else {
541 new_frame->SetVReg(vreg, value);
542 }
543 }
544 }
545
GetVRegKind(uint16_t reg,const std::vector<int32_t> & kinds)546 static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
547 return static_cast<VRegKind>(kinds[reg * 2]);
548 }
549
550 QuickExceptionHandler* const exception_handler_;
551 ShadowFrame* prev_shadow_frame_;
552 bool stacked_shadow_frame_pushed_;
553 const bool single_frame_deopt_;
554 bool single_frame_done_;
555 ArtMethod* single_frame_deopt_method_;
556 const OatQuickMethodHeader* single_frame_deopt_quick_method_header_;
557 ArtMethod* callee_method_;
558
559 DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
560 };
561
PrepareForLongJumpToInvokeStubOrInterpreterBridge()562 void QuickExceptionHandler::PrepareForLongJumpToInvokeStubOrInterpreterBridge() {
563 if (full_fragment_done_) {
564 // Restore deoptimization exception. When returning from the invoke stub,
565 // ArtMethod::Invoke() will see the special exception to know deoptimization
566 // is needed.
567 self_->SetException(Thread::GetDeoptimizationException());
568 } else {
569 // PC needs to be of the quick-to-interpreter bridge.
570 int32_t offset;
571 offset = GetThreadOffset<kRuntimePointerSize>(kQuickQuickToInterpreterBridge).Int32Value();
572 handler_quick_frame_pc_ = *reinterpret_cast<uintptr_t*>(
573 reinterpret_cast<uint8_t*>(self_) + offset);
574 }
575 }
576
DeoptimizeStack()577 void QuickExceptionHandler::DeoptimizeStack() {
578 DCHECK(is_deoptimization_);
579 if (kDebugExceptionDelivery) {
580 self_->DumpStack(LOG_STREAM(INFO) << "Deoptimizing: ");
581 }
582
583 DeoptimizeStackVisitor visitor(self_, context_, this, false);
584 visitor.WalkStack(true);
585 PrepareForLongJumpToInvokeStubOrInterpreterBridge();
586 }
587
DeoptimizeSingleFrame(DeoptimizationKind kind)588 void QuickExceptionHandler::DeoptimizeSingleFrame(DeoptimizationKind kind) {
589 DCHECK(is_deoptimization_);
590
591 DeoptimizeStackVisitor visitor(self_, context_, this, true);
592 visitor.WalkStack(true);
593
594 // Compiled code made an explicit deoptimization.
595 ArtMethod* deopt_method = visitor.GetSingleFrameDeoptMethod();
596 SCOPED_TRACE << "Deoptimizing "
597 << deopt_method->PrettyMethod()
598 << ": " << GetDeoptimizationKindName(kind);
599
600 DCHECK(deopt_method != nullptr);
601 if (VLOG_IS_ON(deopt) || kDebugExceptionDelivery) {
602 LOG(INFO) << "Single-frame deopting: "
603 << deopt_method->PrettyMethod()
604 << " due to "
605 << GetDeoptimizationKindName(kind);
606 DumpFramesWithType(self_, /* details= */ true);
607 }
608 if (Runtime::Current()->UseJitCompilation()) {
609 Runtime::Current()->GetJit()->GetCodeCache()->InvalidateCompiledCodeFor(
610 deopt_method, visitor.GetSingleFrameDeoptQuickMethodHeader());
611 } else {
612 // Transfer the code to interpreter.
613 Runtime::Current()->GetInstrumentation()->UpdateMethodsCode(
614 deopt_method, GetQuickToInterpreterBridge());
615 }
616
617 PrepareForLongJumpToInvokeStubOrInterpreterBridge();
618 }
619
DeoptimizePartialFragmentFixup(uintptr_t return_pc)620 void QuickExceptionHandler::DeoptimizePartialFragmentFixup(uintptr_t return_pc) {
621 // At this point, the instrumentation stack has been updated. We need to install
622 // the real return pc on stack, in case instrumentation stub is stored there,
623 // so that the interpreter bridge code can return to the right place.
624 if (return_pc != 0) {
625 uintptr_t* pc_addr = reinterpret_cast<uintptr_t*>(handler_quick_frame_);
626 CHECK(pc_addr != nullptr);
627 pc_addr--;
628 *reinterpret_cast<uintptr_t*>(pc_addr) = return_pc;
629 }
630
631 // Architecture-dependent work. This is to get the LR right for x86 and x86-64.
632 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
633 // On x86, the return address is on the stack, so just reuse it. Otherwise we would have to
634 // change how longjump works.
635 handler_quick_frame_ = reinterpret_cast<ArtMethod**>(
636 reinterpret_cast<uintptr_t>(handler_quick_frame_) - sizeof(void*));
637 }
638 }
639
UpdateInstrumentationStack()640 uintptr_t QuickExceptionHandler::UpdateInstrumentationStack() {
641 DCHECK(is_deoptimization_) << "Non-deoptimization handlers should use FindCatch";
642 uintptr_t return_pc = 0;
643 if (method_tracing_active_) {
644 size_t instrumentation_frames_to_pop =
645 GetInstrumentationFramesToPop(self_, handler_frame_depth_);
646 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
647 return_pc = instrumentation->PopFramesForDeoptimization(self_, instrumentation_frames_to_pop);
648 }
649 return return_pc;
650 }
651
DoLongJump(bool smash_caller_saves)652 void QuickExceptionHandler::DoLongJump(bool smash_caller_saves) {
653 // Place context back on thread so it will be available when we continue.
654 self_->ReleaseLongJumpContext(context_);
655 context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
656 CHECK_NE(handler_quick_frame_pc_, 0u);
657 context_->SetPC(handler_quick_frame_pc_);
658 context_->SetArg0(handler_quick_arg0_);
659 if (smash_caller_saves) {
660 context_->SmashCallerSaves();
661 }
662 context_->DoLongJump();
663 UNREACHABLE();
664 }
665
DumpFramesWithType(Thread * self,bool details)666 void QuickExceptionHandler::DumpFramesWithType(Thread* self, bool details) {
667 StackVisitor::WalkStack(
668 [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
669 ArtMethod* method = stack_visitor->GetMethod();
670 if (details) {
671 LOG(INFO) << "|> pc = " << std::hex << stack_visitor->GetCurrentQuickFramePc();
672 LOG(INFO) << "|> addr = " << std::hex
673 << reinterpret_cast<uintptr_t>(stack_visitor->GetCurrentQuickFrame());
674 if (stack_visitor->GetCurrentQuickFrame() != nullptr && method != nullptr) {
675 LOG(INFO) << "|> ret = " << std::hex << stack_visitor->GetReturnPc();
676 }
677 }
678 if (method == nullptr) {
679 // Transition, do go on, we want to unwind over bridges, all the way.
680 if (details) {
681 LOG(INFO) << "N <transition>";
682 }
683 return true;
684 } else if (method->IsRuntimeMethod()) {
685 if (details) {
686 LOG(INFO) << "R " << method->PrettyMethod(true);
687 }
688 return true;
689 } else {
690 bool is_shadow = stack_visitor->GetCurrentShadowFrame() != nullptr;
691 LOG(INFO) << (is_shadow ? "S" : "Q")
692 << ((!is_shadow && stack_visitor->IsInInlinedFrame()) ? "i" : " ")
693 << " "
694 << method->PrettyMethod(true);
695 return true; // Go on.
696 }
697 },
698 self,
699 /* context= */ nullptr,
700 art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
701 }
702
703 } // namespace art
704