1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "quick_exception_handler.h"
18
19 #include "arch/context.h"
20 #include "art_method-inl.h"
21 #include "dex_instruction.h"
22 #include "entrypoints/entrypoint_utils.h"
23 #include "entrypoints/quick/quick_entrypoints_enum.h"
24 #include "entrypoints/runtime_asm_entrypoints.h"
25 #include "handle_scope-inl.h"
26 #include "jit/jit.h"
27 #include "jit/jit_code_cache.h"
28 #include "mirror/class-inl.h"
29 #include "mirror/class_loader.h"
30 #include "mirror/throwable.h"
31 #include "oat_quick_method_header.h"
32 #include "stack_map.h"
33 #include "verifier/method_verifier.h"
34
35 namespace art {
36
37 static constexpr bool kDebugExceptionDelivery = false;
38 static constexpr size_t kInvalidFrameDepth = 0xffffffff;
39
QuickExceptionHandler(Thread * self,bool is_deoptimization)40 QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
41 : self_(self),
42 context_(self->GetLongJumpContext()),
43 is_deoptimization_(is_deoptimization),
44 method_tracing_active_(is_deoptimization ||
45 Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()),
46 handler_quick_frame_(nullptr),
47 handler_quick_frame_pc_(0),
48 handler_method_header_(nullptr),
49 handler_quick_arg0_(0),
50 handler_method_(nullptr),
51 handler_dex_pc_(0),
52 clear_exception_(false),
53 handler_frame_depth_(kInvalidFrameDepth) {}
54
55 // Finds catch handler.
56 class CatchBlockStackVisitor FINAL : public StackVisitor {
57 public:
CatchBlockStackVisitor(Thread * self,Context * context,Handle<mirror::Throwable> * exception,QuickExceptionHandler * exception_handler)58 CatchBlockStackVisitor(Thread* self, Context* context, Handle<mirror::Throwable>* exception,
59 QuickExceptionHandler* exception_handler)
60 SHARED_REQUIRES(Locks::mutator_lock_)
61 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
62 exception_(exception),
63 exception_handler_(exception_handler) {
64 }
65
VisitFrame()66 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
67 ArtMethod* method = GetMethod();
68 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
69 if (method == nullptr) {
70 // This is the upcall, we remember the frame and last pc so that we may long jump to them.
71 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
72 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
73 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
74 uint32_t next_dex_pc;
75 ArtMethod* next_art_method;
76 bool has_next = GetNextMethodAndDexPc(&next_art_method, &next_dex_pc);
77 // Report the method that did the down call as the handler.
78 exception_handler_->SetHandlerDexPc(next_dex_pc);
79 exception_handler_->SetHandlerMethod(next_art_method);
80 if (!has_next) {
81 // No next method? Check exception handler is set up for the unhandled exception handler
82 // case.
83 DCHECK_EQ(0U, exception_handler_->GetHandlerDexPc());
84 DCHECK(nullptr == exception_handler_->GetHandlerMethod());
85 }
86 return false; // End stack walk.
87 }
88 if (method->IsRuntimeMethod()) {
89 // Ignore callee save method.
90 DCHECK(method->IsCalleeSaveMethod());
91 return true;
92 }
93 return HandleTryItems(method);
94 }
95
96 private:
HandleTryItems(ArtMethod * method)97 bool HandleTryItems(ArtMethod* method)
98 SHARED_REQUIRES(Locks::mutator_lock_) {
99 uint32_t dex_pc = DexFile::kDexNoIndex;
100 if (!method->IsNative()) {
101 dex_pc = GetDexPc();
102 }
103 if (dex_pc != DexFile::kDexNoIndex) {
104 bool clear_exception = false;
105 StackHandleScope<1> hs(GetThread());
106 Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
107 uint32_t found_dex_pc = method->FindCatchBlock(to_find, dex_pc, &clear_exception);
108 exception_handler_->SetClearException(clear_exception);
109 if (found_dex_pc != DexFile::kDexNoIndex) {
110 exception_handler_->SetHandlerMethod(method);
111 exception_handler_->SetHandlerDexPc(found_dex_pc);
112 exception_handler_->SetHandlerQuickFramePc(
113 GetCurrentOatQuickMethodHeader()->ToNativeQuickPc(
114 method, found_dex_pc, /* is_catch_handler */ true));
115 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
116 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
117 return false; // End stack walk.
118 } else if (UNLIKELY(GetThread()->HasDebuggerShadowFrames())) {
119 // We are going to unwind this frame. Did we prepare a shadow frame for debugging?
120 size_t frame_id = GetFrameId();
121 ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(frame_id);
122 if (frame != nullptr) {
123 // We will not execute this shadow frame so we can safely deallocate it.
124 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
125 ShadowFrame::DeleteDeoptimizedFrame(frame);
126 }
127 }
128 }
129 return true; // Continue stack walk.
130 }
131
132 // The exception we're looking for the catch block of.
133 Handle<mirror::Throwable>* exception_;
134 // The quick exception handler we're visiting for.
135 QuickExceptionHandler* const exception_handler_;
136
137 DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
138 };
139
FindCatch(mirror::Throwable * exception)140 void QuickExceptionHandler::FindCatch(mirror::Throwable* exception) {
141 DCHECK(!is_deoptimization_);
142 if (kDebugExceptionDelivery) {
143 mirror::String* msg = exception->GetDetailMessage();
144 std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
145 self_->DumpStack(LOG(INFO) << "Delivering exception: " << PrettyTypeOf(exception)
146 << ": " << str_msg << "\n");
147 }
148 StackHandleScope<1> hs(self_);
149 Handle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
150
151 // Walk the stack to find catch handler.
152 CatchBlockStackVisitor visitor(self_, context_, &exception_ref, this);
153 visitor.WalkStack(true);
154
155 if (kDebugExceptionDelivery) {
156 if (*handler_quick_frame_ == nullptr) {
157 LOG(INFO) << "Handler is upcall";
158 }
159 if (handler_method_ != nullptr) {
160 const DexFile& dex_file = *handler_method_->GetDeclaringClass()->GetDexCache()->GetDexFile();
161 int line_number = dex_file.GetLineNumFromPC(handler_method_, handler_dex_pc_);
162 LOG(INFO) << "Handler: " << PrettyMethod(handler_method_) << " (line: " << line_number << ")";
163 }
164 }
165 if (clear_exception_) {
166 // Exception was cleared as part of delivery.
167 DCHECK(!self_->IsExceptionPending());
168 } else {
169 // Put exception back in root set with clear throw location.
170 self_->SetException(exception_ref.Get());
171 }
172 // If the handler is in optimized code, we need to set the catch environment.
173 if (*handler_quick_frame_ != nullptr &&
174 handler_method_header_ != nullptr &&
175 handler_method_header_->IsOptimized()) {
176 SetCatchEnvironmentForOptimizedHandler(&visitor);
177 }
178 }
179
ToVRegKind(DexRegisterLocation::Kind kind)180 static VRegKind ToVRegKind(DexRegisterLocation::Kind kind) {
181 // Slightly hacky since we cannot map DexRegisterLocationKind and VRegKind
182 // one to one. However, StackVisitor::GetVRegFromOptimizedCode only needs to
183 // distinguish between core/FPU registers and low/high bits on 64-bit.
184 switch (kind) {
185 case DexRegisterLocation::Kind::kConstant:
186 case DexRegisterLocation::Kind::kInStack:
187 // VRegKind is ignored.
188 return VRegKind::kUndefined;
189
190 case DexRegisterLocation::Kind::kInRegister:
191 // Selects core register. For 64-bit registers, selects low 32 bits.
192 return VRegKind::kLongLoVReg;
193
194 case DexRegisterLocation::Kind::kInRegisterHigh:
195 // Selects core register. For 64-bit registers, selects high 32 bits.
196 return VRegKind::kLongHiVReg;
197
198 case DexRegisterLocation::Kind::kInFpuRegister:
199 // Selects FPU register. For 64-bit registers, selects low 32 bits.
200 return VRegKind::kDoubleLoVReg;
201
202 case DexRegisterLocation::Kind::kInFpuRegisterHigh:
203 // Selects FPU register. For 64-bit registers, selects high 32 bits.
204 return VRegKind::kDoubleHiVReg;
205
206 default:
207 LOG(FATAL) << "Unexpected vreg location " << kind;
208 UNREACHABLE();
209 }
210 }
211
SetCatchEnvironmentForOptimizedHandler(StackVisitor * stack_visitor)212 void QuickExceptionHandler::SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor) {
213 DCHECK(!is_deoptimization_);
214 DCHECK(*handler_quick_frame_ != nullptr) << "Method should not be called on upcall exceptions";
215 DCHECK(handler_method_ != nullptr && handler_method_header_->IsOptimized());
216
217 if (kDebugExceptionDelivery) {
218 self_->DumpStack(LOG(INFO) << "Setting catch phis: ");
219 }
220
221 const size_t number_of_vregs = handler_method_->GetCodeItem()->registers_size_;
222 CodeInfo code_info = handler_method_header_->GetOptimizedCodeInfo();
223 CodeInfoEncoding encoding = code_info.ExtractEncoding();
224
225 // Find stack map of the catch block.
226 StackMap catch_stack_map = code_info.GetCatchStackMapForDexPc(GetHandlerDexPc(), encoding);
227 DCHECK(catch_stack_map.IsValid());
228 DexRegisterMap catch_vreg_map =
229 code_info.GetDexRegisterMapOf(catch_stack_map, encoding, number_of_vregs);
230 if (!catch_vreg_map.IsValid()) {
231 return;
232 }
233
234 // Find stack map of the throwing instruction.
235 StackMap throw_stack_map =
236 code_info.GetStackMapForNativePcOffset(stack_visitor->GetNativePcOffset(), encoding);
237 DCHECK(throw_stack_map.IsValid());
238 DexRegisterMap throw_vreg_map =
239 code_info.GetDexRegisterMapOf(throw_stack_map, encoding, number_of_vregs);
240 DCHECK(throw_vreg_map.IsValid());
241
242 // Copy values between them.
243 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
244 DexRegisterLocation::Kind catch_location =
245 catch_vreg_map.GetLocationKind(vreg, number_of_vregs, code_info, encoding);
246 if (catch_location == DexRegisterLocation::Kind::kNone) {
247 continue;
248 }
249 DCHECK(catch_location == DexRegisterLocation::Kind::kInStack);
250
251 // Get vreg value from its current location.
252 uint32_t vreg_value;
253 VRegKind vreg_kind = ToVRegKind(throw_vreg_map.GetLocationKind(vreg,
254 number_of_vregs,
255 code_info,
256 encoding));
257 bool get_vreg_success = stack_visitor->GetVReg(stack_visitor->GetMethod(),
258 vreg,
259 vreg_kind,
260 &vreg_value);
261 CHECK(get_vreg_success) << "VReg " << vreg << " was optimized out ("
262 << "method=" << PrettyMethod(stack_visitor->GetMethod()) << ", "
263 << "dex_pc=" << stack_visitor->GetDexPc() << ", "
264 << "native_pc_offset=" << stack_visitor->GetNativePcOffset() << ")";
265
266 // Copy value to the catch phi's stack slot.
267 int32_t slot_offset = catch_vreg_map.GetStackOffsetInBytes(vreg,
268 number_of_vregs,
269 code_info,
270 encoding);
271 ArtMethod** frame_top = stack_visitor->GetCurrentQuickFrame();
272 uint8_t* slot_address = reinterpret_cast<uint8_t*>(frame_top) + slot_offset;
273 uint32_t* slot_ptr = reinterpret_cast<uint32_t*>(slot_address);
274 *slot_ptr = vreg_value;
275 }
276 }
277
278 // Prepares deoptimization.
279 class DeoptimizeStackVisitor FINAL : public StackVisitor {
280 public:
DeoptimizeStackVisitor(Thread * self,Context * context,QuickExceptionHandler * exception_handler,bool single_frame)281 DeoptimizeStackVisitor(Thread* self,
282 Context* context,
283 QuickExceptionHandler* exception_handler,
284 bool single_frame)
285 SHARED_REQUIRES(Locks::mutator_lock_)
286 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
287 exception_handler_(exception_handler),
288 prev_shadow_frame_(nullptr),
289 stacked_shadow_frame_pushed_(false),
290 single_frame_deopt_(single_frame),
291 single_frame_done_(false),
292 single_frame_deopt_method_(nullptr),
293 single_frame_deopt_quick_method_header_(nullptr) {
294 }
295
GetSingleFrameDeoptMethod() const296 ArtMethod* GetSingleFrameDeoptMethod() const {
297 return single_frame_deopt_method_;
298 }
299
GetSingleFrameDeoptQuickMethodHeader() const300 const OatQuickMethodHeader* GetSingleFrameDeoptQuickMethodHeader() const {
301 return single_frame_deopt_quick_method_header_;
302 }
303
VisitFrame()304 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
305 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
306 ArtMethod* method = GetMethod();
307 if (method == nullptr || single_frame_done_) {
308 // This is the upcall (or the next full frame in single-frame deopt), we remember the frame
309 // and last pc so that we may long jump to them.
310 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
311 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
312 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
313 if (!stacked_shadow_frame_pushed_) {
314 // In case there is no deoptimized shadow frame for this upcall, we still
315 // need to push a nullptr to the stack since there is always a matching pop after
316 // the long jump.
317 GetThread()->PushStackedShadowFrame(nullptr,
318 StackedShadowFrameType::kDeoptimizationShadowFrame);
319 stacked_shadow_frame_pushed_ = true;
320 }
321 return false; // End stack walk.
322 } else if (method->IsRuntimeMethod()) {
323 // Ignore callee save method.
324 DCHECK(method->IsCalleeSaveMethod());
325 return true;
326 } else if (method->IsNative()) {
327 // If we return from JNI with a pending exception and want to deoptimize, we need to skip
328 // the native method.
329 // The top method is a runtime method, the native method comes next.
330 CHECK_EQ(GetFrameDepth(), 1U);
331 return true;
332 } else {
333 // Check if a shadow frame already exists for debugger's set-local-value purpose.
334 const size_t frame_id = GetFrameId();
335 ShadowFrame* new_frame = GetThread()->FindDebuggerShadowFrame(frame_id);
336 const bool* updated_vregs;
337 const size_t num_regs = method->GetCodeItem()->registers_size_;
338 if (new_frame == nullptr) {
339 new_frame = ShadowFrame::CreateDeoptimizedFrame(num_regs, nullptr, method, GetDexPc());
340 updated_vregs = nullptr;
341 } else {
342 updated_vregs = GetThread()->GetUpdatedVRegFlags(frame_id);
343 DCHECK(updated_vregs != nullptr);
344 }
345 HandleOptimizingDeoptimization(method, new_frame, updated_vregs);
346 if (updated_vregs != nullptr) {
347 // Calling Thread::RemoveDebuggerShadowFrameMapping will also delete the updated_vregs
348 // array so this must come after we processed the frame.
349 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
350 DCHECK(GetThread()->FindDebuggerShadowFrame(frame_id) == nullptr);
351 }
352 if (prev_shadow_frame_ != nullptr) {
353 prev_shadow_frame_->SetLink(new_frame);
354 } else {
355 // Will be popped after the long jump after DeoptimizeStack(),
356 // right before interpreter::EnterInterpreterFromDeoptimize().
357 stacked_shadow_frame_pushed_ = true;
358 GetThread()->PushStackedShadowFrame(
359 new_frame,
360 single_frame_deopt_
361 ? StackedShadowFrameType::kSingleFrameDeoptimizationShadowFrame
362 : StackedShadowFrameType::kDeoptimizationShadowFrame);
363 }
364 prev_shadow_frame_ = new_frame;
365
366 if (single_frame_deopt_ && !IsInInlinedFrame()) {
367 // Single-frame deopt ends at the first non-inlined frame and needs to store that method.
368 exception_handler_->SetHandlerQuickArg0(reinterpret_cast<uintptr_t>(method));
369 single_frame_done_ = true;
370 single_frame_deopt_method_ = method;
371 single_frame_deopt_quick_method_header_ = GetCurrentOatQuickMethodHeader();
372 }
373 return true;
374 }
375 }
376
377 private:
HandleOptimizingDeoptimization(ArtMethod * m,ShadowFrame * new_frame,const bool * updated_vregs)378 void HandleOptimizingDeoptimization(ArtMethod* m,
379 ShadowFrame* new_frame,
380 const bool* updated_vregs)
381 SHARED_REQUIRES(Locks::mutator_lock_) {
382 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
383 CodeInfo code_info = method_header->GetOptimizedCodeInfo();
384 uintptr_t native_pc_offset = method_header->NativeQuickPcOffset(GetCurrentQuickFramePc());
385 CodeInfoEncoding encoding = code_info.ExtractEncoding();
386 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, encoding);
387 const size_t number_of_vregs = m->GetCodeItem()->registers_size_;
388 uint32_t register_mask = stack_map.GetRegisterMask(encoding.stack_map_encoding);
389 DexRegisterMap vreg_map = IsInInlinedFrame()
390 ? code_info.GetDexRegisterMapAtDepth(GetCurrentInliningDepth() - 1,
391 code_info.GetInlineInfoOf(stack_map, encoding),
392 encoding,
393 number_of_vregs)
394 : code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_vregs);
395
396 if (!vreg_map.IsValid()) {
397 return;
398 }
399
400 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
401 if (updated_vregs != nullptr && updated_vregs[vreg]) {
402 // Keep the value set by debugger.
403 continue;
404 }
405
406 DexRegisterLocation::Kind location =
407 vreg_map.GetLocationKind(vreg, number_of_vregs, code_info, encoding);
408 static constexpr uint32_t kDeadValue = 0xEBADDE09;
409 uint32_t value = kDeadValue;
410 bool is_reference = false;
411
412 switch (location) {
413 case DexRegisterLocation::Kind::kInStack: {
414 const int32_t offset = vreg_map.GetStackOffsetInBytes(vreg,
415 number_of_vregs,
416 code_info,
417 encoding);
418 const uint8_t* addr = reinterpret_cast<const uint8_t*>(GetCurrentQuickFrame()) + offset;
419 value = *reinterpret_cast<const uint32_t*>(addr);
420 uint32_t bit = (offset >> 2);
421 if (stack_map.GetNumberOfStackMaskBits(encoding.stack_map_encoding) > bit &&
422 stack_map.GetStackMaskBit(encoding.stack_map_encoding, bit)) {
423 is_reference = true;
424 }
425 break;
426 }
427 case DexRegisterLocation::Kind::kInRegister:
428 case DexRegisterLocation::Kind::kInRegisterHigh:
429 case DexRegisterLocation::Kind::kInFpuRegister:
430 case DexRegisterLocation::Kind::kInFpuRegisterHigh: {
431 uint32_t reg = vreg_map.GetMachineRegister(vreg, number_of_vregs, code_info, encoding);
432 bool result = GetRegisterIfAccessible(reg, ToVRegKind(location), &value);
433 CHECK(result);
434 if (location == DexRegisterLocation::Kind::kInRegister) {
435 if (((1u << reg) & register_mask) != 0) {
436 is_reference = true;
437 }
438 }
439 break;
440 }
441 case DexRegisterLocation::Kind::kConstant: {
442 value = vreg_map.GetConstant(vreg, number_of_vregs, code_info, encoding);
443 if (value == 0) {
444 // Make it a reference for extra safety.
445 is_reference = true;
446 }
447 break;
448 }
449 case DexRegisterLocation::Kind::kNone: {
450 break;
451 }
452 default: {
453 LOG(FATAL)
454 << "Unexpected location kind "
455 << vreg_map.GetLocationInternalKind(vreg,
456 number_of_vregs,
457 code_info,
458 encoding);
459 UNREACHABLE();
460 }
461 }
462 if (is_reference) {
463 new_frame->SetVRegReference(vreg, reinterpret_cast<mirror::Object*>(value));
464 } else {
465 new_frame->SetVReg(vreg, value);
466 }
467 }
468 }
469
GetVRegKind(uint16_t reg,const std::vector<int32_t> & kinds)470 static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
471 return static_cast<VRegKind>(kinds.at(reg * 2));
472 }
473
474 QuickExceptionHandler* const exception_handler_;
475 ShadowFrame* prev_shadow_frame_;
476 bool stacked_shadow_frame_pushed_;
477 const bool single_frame_deopt_;
478 bool single_frame_done_;
479 ArtMethod* single_frame_deopt_method_;
480 const OatQuickMethodHeader* single_frame_deopt_quick_method_header_;
481
482 DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
483 };
484
DeoptimizeStack()485 void QuickExceptionHandler::DeoptimizeStack() {
486 DCHECK(is_deoptimization_);
487 if (kDebugExceptionDelivery) {
488 self_->DumpStack(LOG(INFO) << "Deoptimizing: ");
489 }
490
491 DeoptimizeStackVisitor visitor(self_, context_, this, false);
492 visitor.WalkStack(true);
493
494 // Restore deoptimization exception
495 self_->SetException(Thread::GetDeoptimizationException());
496 }
497
DeoptimizeSingleFrame()498 void QuickExceptionHandler::DeoptimizeSingleFrame() {
499 DCHECK(is_deoptimization_);
500
501 if (VLOG_IS_ON(deopt) || kDebugExceptionDelivery) {
502 LOG(INFO) << "Single-frame deopting:";
503 DumpFramesWithType(self_, true);
504 }
505
506 DeoptimizeStackVisitor visitor(self_, context_, this, true);
507 visitor.WalkStack(true);
508
509 // Compiled code made an explicit deoptimization.
510 ArtMethod* deopt_method = visitor.GetSingleFrameDeoptMethod();
511 DCHECK(deopt_method != nullptr);
512 if (Runtime::Current()->UseJitCompilation()) {
513 Runtime::Current()->GetJit()->GetCodeCache()->InvalidateCompiledCodeFor(
514 deopt_method, visitor.GetSingleFrameDeoptQuickMethodHeader());
515 } else {
516 // Transfer the code to interpreter.
517 Runtime::Current()->GetInstrumentation()->UpdateMethodsCode(
518 deopt_method, GetQuickToInterpreterBridge());
519 }
520
521 // PC needs to be of the quick-to-interpreter bridge.
522 int32_t offset;
523 #ifdef __LP64__
524 offset = GetThreadOffset<8>(kQuickQuickToInterpreterBridge).Int32Value();
525 #else
526 offset = GetThreadOffset<4>(kQuickQuickToInterpreterBridge).Int32Value();
527 #endif
528 handler_quick_frame_pc_ = *reinterpret_cast<uintptr_t*>(
529 reinterpret_cast<uint8_t*>(self_) + offset);
530 }
531
DeoptimizeSingleFrameArchDependentFixup()532 void QuickExceptionHandler::DeoptimizeSingleFrameArchDependentFixup() {
533 // Architecture-dependent work. This is to get the LR right for x86 and x86-64.
534
535 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
536 // On x86, the return address is on the stack, so just reuse it. Otherwise we would have to
537 // change how longjump works.
538 handler_quick_frame_ = reinterpret_cast<ArtMethod**>(
539 reinterpret_cast<uintptr_t>(handler_quick_frame_) - sizeof(void*));
540 }
541 }
542
543 // Unwinds all instrumentation stack frame prior to catch handler or upcall.
544 class InstrumentationStackVisitor : public StackVisitor {
545 public:
InstrumentationStackVisitor(Thread * self,size_t frame_depth)546 InstrumentationStackVisitor(Thread* self, size_t frame_depth)
547 SHARED_REQUIRES(Locks::mutator_lock_)
548 : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
549 frame_depth_(frame_depth),
550 instrumentation_frames_to_pop_(0) {
551 CHECK_NE(frame_depth_, kInvalidFrameDepth);
552 }
553
VisitFrame()554 bool VisitFrame() SHARED_REQUIRES(Locks::mutator_lock_) {
555 size_t current_frame_depth = GetFrameDepth();
556 if (current_frame_depth < frame_depth_) {
557 CHECK(GetMethod() != nullptr);
558 if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == GetReturnPc())) {
559 if (!IsInInlinedFrame()) {
560 // We do not count inlined frames, because we do not instrument them. The reason we
561 // include them in the stack walking is the check against `frame_depth_`, which is
562 // given to us by a visitor that visits inlined frames.
563 ++instrumentation_frames_to_pop_;
564 }
565 }
566 return true;
567 } else {
568 // We reached the frame of the catch handler or the upcall.
569 return false;
570 }
571 }
572
GetInstrumentationFramesToPop() const573 size_t GetInstrumentationFramesToPop() const {
574 return instrumentation_frames_to_pop_;
575 }
576
577 private:
578 const size_t frame_depth_;
579 size_t instrumentation_frames_to_pop_;
580
581 DISALLOW_COPY_AND_ASSIGN(InstrumentationStackVisitor);
582 };
583
UpdateInstrumentationStack()584 void QuickExceptionHandler::UpdateInstrumentationStack() {
585 if (method_tracing_active_) {
586 InstrumentationStackVisitor visitor(self_, handler_frame_depth_);
587 visitor.WalkStack(true);
588
589 size_t instrumentation_frames_to_pop = visitor.GetInstrumentationFramesToPop();
590 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
591 for (size_t i = 0; i < instrumentation_frames_to_pop; ++i) {
592 instrumentation->PopMethodForUnwind(self_, is_deoptimization_);
593 }
594 }
595 }
596
DoLongJump(bool smash_caller_saves)597 void QuickExceptionHandler::DoLongJump(bool smash_caller_saves) {
598 // Place context back on thread so it will be available when we continue.
599 self_->ReleaseLongJumpContext(context_);
600 context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
601 CHECK_NE(handler_quick_frame_pc_, 0u);
602 context_->SetPC(handler_quick_frame_pc_);
603 context_->SetArg0(handler_quick_arg0_);
604 if (smash_caller_saves) {
605 context_->SmashCallerSaves();
606 }
607 context_->DoLongJump();
608 UNREACHABLE();
609 }
610
611 // Prints out methods with their type of frame.
612 class DumpFramesWithTypeStackVisitor FINAL : public StackVisitor {
613 public:
DumpFramesWithTypeStackVisitor(Thread * self,bool show_details=false)614 DumpFramesWithTypeStackVisitor(Thread* self, bool show_details = false)
615 SHARED_REQUIRES(Locks::mutator_lock_)
616 : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
617 show_details_(show_details) {}
618
VisitFrame()619 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
620 ArtMethod* method = GetMethod();
621 if (show_details_) {
622 LOG(INFO) << "|> pc = " << std::hex << GetCurrentQuickFramePc();
623 LOG(INFO) << "|> addr = " << std::hex << reinterpret_cast<uintptr_t>(GetCurrentQuickFrame());
624 if (GetCurrentQuickFrame() != nullptr && method != nullptr) {
625 LOG(INFO) << "|> ret = " << std::hex << GetReturnPc();
626 }
627 }
628 if (method == nullptr) {
629 // Transition, do go on, we want to unwind over bridges, all the way.
630 if (show_details_) {
631 LOG(INFO) << "N <transition>";
632 }
633 return true;
634 } else if (method->IsRuntimeMethod()) {
635 if (show_details_) {
636 LOG(INFO) << "R " << PrettyMethod(method, true);
637 }
638 return true;
639 } else {
640 bool is_shadow = GetCurrentShadowFrame() != nullptr;
641 LOG(INFO) << (is_shadow ? "S" : "Q")
642 << ((!is_shadow && IsInInlinedFrame()) ? "i" : " ")
643 << " "
644 << PrettyMethod(method, true);
645 return true; // Go on.
646 }
647 }
648
649 private:
650 bool show_details_;
651
652 DISALLOW_COPY_AND_ASSIGN(DumpFramesWithTypeStackVisitor);
653 };
654
DumpFramesWithType(Thread * self,bool details)655 void QuickExceptionHandler::DumpFramesWithType(Thread* self, bool details) {
656 DumpFramesWithTypeStackVisitor visitor(self, details);
657 visitor.WalkStack(true);
658 }
659
660 } // namespace art
661