1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "stack.h"
18
19 #include "android-base/stringprintf.h"
20
21 #include "arch/context.h"
22 #include "art_method-inl.h"
23 #include "base/callee_save_type.h"
24 #include "base/enums.h"
25 #include "base/hex_dump.h"
26 #include "dex/dex_file_types.h"
27 #include "entrypoints/entrypoint_utils-inl.h"
28 #include "entrypoints/runtime_asm_entrypoints.h"
29 #include "gc/space/image_space.h"
30 #include "gc/space/space-inl.h"
31 #include "interpreter/shadow_frame.h"
32 #include "jit/jit.h"
33 #include "jit/jit_code_cache.h"
34 #include "linear_alloc.h"
35 #include "managed_stack.h"
36 #include "mirror/class-inl.h"
37 #include "mirror/object-inl.h"
38 #include "mirror/object_array-inl.h"
39 #include "oat_quick_method_header.h"
40 #include "quick/quick_method_frame_info.h"
41 #include "runtime.h"
42 #include "thread.h"
43 #include "thread_list.h"
44
45 namespace art {
46
47 using android::base::StringPrintf;
48
49 static constexpr bool kDebugStackWalk = false;
50
StackVisitor(Thread * thread,Context * context,StackWalkKind walk_kind,bool check_suspended)51 StackVisitor::StackVisitor(Thread* thread,
52 Context* context,
53 StackWalkKind walk_kind,
54 bool check_suspended)
55 : StackVisitor(thread, context, walk_kind, 0, check_suspended) {}
56
StackVisitor(Thread * thread,Context * context,StackWalkKind walk_kind,size_t num_frames,bool check_suspended)57 StackVisitor::StackVisitor(Thread* thread,
58 Context* context,
59 StackWalkKind walk_kind,
60 size_t num_frames,
61 bool check_suspended)
62 : thread_(thread),
63 walk_kind_(walk_kind),
64 cur_shadow_frame_(nullptr),
65 cur_quick_frame_(nullptr),
66 cur_quick_frame_pc_(0),
67 cur_oat_quick_method_header_(nullptr),
68 num_frames_(num_frames),
69 cur_depth_(0),
70 current_inlining_depth_(0),
71 context_(context),
72 check_suspended_(check_suspended) {
73 if (check_suspended_) {
74 DCHECK(thread == Thread::Current() || thread->IsSuspended()) << *thread;
75 }
76 }
77
GetCurrentInlineInfo(const OatQuickMethodHeader * method_header,uintptr_t cur_quick_frame_pc)78 static InlineInfo GetCurrentInlineInfo(const OatQuickMethodHeader* method_header,
79 uintptr_t cur_quick_frame_pc)
80 REQUIRES_SHARED(Locks::mutator_lock_) {
81 uint32_t native_pc_offset = method_header->NativeQuickPcOffset(cur_quick_frame_pc);
82 CodeInfo code_info = method_header->GetOptimizedCodeInfo();
83 CodeInfoEncoding encoding = code_info.ExtractEncoding();
84 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, encoding);
85 DCHECK(stack_map.IsValid());
86 return code_info.GetInlineInfoOf(stack_map, encoding);
87 }
88
GetMethod() const89 ArtMethod* StackVisitor::GetMethod() const {
90 if (cur_shadow_frame_ != nullptr) {
91 return cur_shadow_frame_->GetMethod();
92 } else if (cur_quick_frame_ != nullptr) {
93 if (IsInInlinedFrame()) {
94 size_t depth_in_stack_map = current_inlining_depth_ - 1;
95 InlineInfo inline_info = GetCurrentInlineInfo(GetCurrentOatQuickMethodHeader(),
96 cur_quick_frame_pc_);
97 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
98 CodeInfoEncoding encoding = method_header->GetOptimizedCodeInfo().ExtractEncoding();
99 MethodInfo method_info = method_header->GetOptimizedMethodInfo();
100 DCHECK(walk_kind_ != StackWalkKind::kSkipInlinedFrames);
101 return GetResolvedMethod(*GetCurrentQuickFrame(),
102 method_info,
103 inline_info,
104 encoding.inline_info.encoding,
105 depth_in_stack_map);
106 } else {
107 return *cur_quick_frame_;
108 }
109 }
110 return nullptr;
111 }
112
GetDexPc(bool abort_on_failure) const113 uint32_t StackVisitor::GetDexPc(bool abort_on_failure) const {
114 if (cur_shadow_frame_ != nullptr) {
115 return cur_shadow_frame_->GetDexPC();
116 } else if (cur_quick_frame_ != nullptr) {
117 if (IsInInlinedFrame()) {
118 size_t depth_in_stack_map = current_inlining_depth_ - 1;
119 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
120 CodeInfoEncoding encoding = method_header->GetOptimizedCodeInfo().ExtractEncoding();
121 return GetCurrentInlineInfo(GetCurrentOatQuickMethodHeader(), cur_quick_frame_pc_).
122 GetDexPcAtDepth(encoding.inline_info.encoding, depth_in_stack_map);
123 } else if (cur_oat_quick_method_header_ == nullptr) {
124 return dex::kDexNoIndex;
125 } else {
126 return cur_oat_quick_method_header_->ToDexPc(
127 GetMethod(), cur_quick_frame_pc_, abort_on_failure);
128 }
129 } else {
130 return 0;
131 }
132 }
133
134 extern "C" mirror::Object* artQuickGetProxyThisObject(ArtMethod** sp)
135 REQUIRES_SHARED(Locks::mutator_lock_);
136
GetThisObject() const137 mirror::Object* StackVisitor::GetThisObject() const {
138 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize);
139 ArtMethod* m = GetMethod();
140 if (m->IsStatic()) {
141 return nullptr;
142 } else if (m->IsNative()) {
143 if (cur_quick_frame_ != nullptr) {
144 HandleScope* hs = reinterpret_cast<HandleScope*>(
145 reinterpret_cast<char*>(cur_quick_frame_) + sizeof(ArtMethod*));
146 return hs->GetReference(0);
147 } else {
148 return cur_shadow_frame_->GetVRegReference(0);
149 }
150 } else if (m->IsProxyMethod()) {
151 if (cur_quick_frame_ != nullptr) {
152 return artQuickGetProxyThisObject(cur_quick_frame_);
153 } else {
154 return cur_shadow_frame_->GetVRegReference(0);
155 }
156 } else {
157 CodeItemDataAccessor accessor(m->DexInstructionData());
158 if (!accessor.HasCodeItem()) {
159 UNIMPLEMENTED(ERROR) << "Failed to determine this object of abstract or proxy method: "
160 << ArtMethod::PrettyMethod(m);
161 return nullptr;
162 } else {
163 uint16_t reg = accessor.RegistersSize() - accessor.InsSize();
164 uint32_t value = 0;
165 bool success = GetVReg(m, reg, kReferenceVReg, &value);
166 // We currently always guarantee the `this` object is live throughout the method.
167 CHECK(success) << "Failed to read the this object in " << ArtMethod::PrettyMethod(m);
168 return reinterpret_cast<mirror::Object*>(value);
169 }
170 }
171 }
172
GetNativePcOffset() const173 size_t StackVisitor::GetNativePcOffset() const {
174 DCHECK(!IsShadowFrame());
175 return GetCurrentOatQuickMethodHeader()->NativeQuickPcOffset(cur_quick_frame_pc_);
176 }
177
GetVRegFromDebuggerShadowFrame(uint16_t vreg,VRegKind kind,uint32_t * val) const178 bool StackVisitor::GetVRegFromDebuggerShadowFrame(uint16_t vreg,
179 VRegKind kind,
180 uint32_t* val) const {
181 size_t frame_id = const_cast<StackVisitor*>(this)->GetFrameId();
182 ShadowFrame* shadow_frame = thread_->FindDebuggerShadowFrame(frame_id);
183 if (shadow_frame != nullptr) {
184 bool* updated_vreg_flags = thread_->GetUpdatedVRegFlags(frame_id);
185 DCHECK(updated_vreg_flags != nullptr);
186 if (updated_vreg_flags[vreg]) {
187 // Value is set by the debugger.
188 if (kind == kReferenceVReg) {
189 *val = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(
190 shadow_frame->GetVRegReference(vreg)));
191 } else {
192 *val = shadow_frame->GetVReg(vreg);
193 }
194 return true;
195 }
196 }
197 // No value is set by the debugger.
198 return false;
199 }
200
GetVReg(ArtMethod * m,uint16_t vreg,VRegKind kind,uint32_t * val) const201 bool StackVisitor::GetVReg(ArtMethod* m, uint16_t vreg, VRegKind kind, uint32_t* val) const {
202 if (cur_quick_frame_ != nullptr) {
203 DCHECK(context_ != nullptr); // You can't reliably read registers without a context.
204 DCHECK(m == GetMethod());
205 // Check if there is value set by the debugger.
206 if (GetVRegFromDebuggerShadowFrame(vreg, kind, val)) {
207 return true;
208 }
209 DCHECK(cur_oat_quick_method_header_->IsOptimized());
210 return GetVRegFromOptimizedCode(m, vreg, kind, val);
211 } else {
212 DCHECK(cur_shadow_frame_ != nullptr);
213 if (kind == kReferenceVReg) {
214 *val = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(
215 cur_shadow_frame_->GetVRegReference(vreg)));
216 } else {
217 *val = cur_shadow_frame_->GetVReg(vreg);
218 }
219 return true;
220 }
221 }
222
GetVRegFromOptimizedCode(ArtMethod * m,uint16_t vreg,VRegKind kind,uint32_t * val) const223 bool StackVisitor::GetVRegFromOptimizedCode(ArtMethod* m, uint16_t vreg, VRegKind kind,
224 uint32_t* val) const {
225 DCHECK_EQ(m, GetMethod());
226 // Can't be null or how would we compile its instructions?
227 DCHECK(m->GetCodeItem() != nullptr) << m->PrettyMethod();
228 CodeItemDataAccessor accessor(m->DexInstructionData());
229 uint16_t number_of_dex_registers = accessor.RegistersSize();
230 DCHECK_LT(vreg, number_of_dex_registers);
231 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
232 CodeInfo code_info = method_header->GetOptimizedCodeInfo();
233 CodeInfoEncoding encoding = code_info.ExtractEncoding();
234
235 uint32_t native_pc_offset = method_header->NativeQuickPcOffset(cur_quick_frame_pc_);
236 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, encoding);
237 DCHECK(stack_map.IsValid());
238 size_t depth_in_stack_map = current_inlining_depth_ - 1;
239
240 DexRegisterMap dex_register_map = IsInInlinedFrame()
241 ? code_info.GetDexRegisterMapAtDepth(depth_in_stack_map,
242 code_info.GetInlineInfoOf(stack_map, encoding),
243 encoding,
244 number_of_dex_registers)
245 : code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers);
246
247 if (!dex_register_map.IsValid()) {
248 return false;
249 }
250 DexRegisterLocation::Kind location_kind =
251 dex_register_map.GetLocationKind(vreg, number_of_dex_registers, code_info, encoding);
252 switch (location_kind) {
253 case DexRegisterLocation::Kind::kInStack: {
254 const int32_t offset = dex_register_map.GetStackOffsetInBytes(vreg,
255 number_of_dex_registers,
256 code_info,
257 encoding);
258 const uint8_t* addr = reinterpret_cast<const uint8_t*>(cur_quick_frame_) + offset;
259 *val = *reinterpret_cast<const uint32_t*>(addr);
260 return true;
261 }
262 case DexRegisterLocation::Kind::kInRegister:
263 case DexRegisterLocation::Kind::kInRegisterHigh:
264 case DexRegisterLocation::Kind::kInFpuRegister:
265 case DexRegisterLocation::Kind::kInFpuRegisterHigh: {
266 uint32_t reg =
267 dex_register_map.GetMachineRegister(vreg, number_of_dex_registers, code_info, encoding);
268 return GetRegisterIfAccessible(reg, kind, val);
269 }
270 case DexRegisterLocation::Kind::kConstant:
271 *val = dex_register_map.GetConstant(vreg, number_of_dex_registers, code_info, encoding);
272 return true;
273 case DexRegisterLocation::Kind::kNone:
274 return false;
275 default:
276 LOG(FATAL)
277 << "Unexpected location kind "
278 << dex_register_map.GetLocationInternalKind(vreg,
279 number_of_dex_registers,
280 code_info,
281 encoding);
282 UNREACHABLE();
283 }
284 }
285
GetRegisterIfAccessible(uint32_t reg,VRegKind kind,uint32_t * val) const286 bool StackVisitor::GetRegisterIfAccessible(uint32_t reg, VRegKind kind, uint32_t* val) const {
287 const bool is_float = (kind == kFloatVReg) || (kind == kDoubleLoVReg) || (kind == kDoubleHiVReg);
288
289 if (kRuntimeISA == InstructionSet::kX86 && is_float) {
290 // X86 float registers are 64-bit and each XMM register is provided as two separate
291 // 32-bit registers by the context.
292 reg = (kind == kDoubleHiVReg) ? (2 * reg + 1) : (2 * reg);
293 }
294
295 // MIPS32 float registers are used as 64-bit (for MIPS32r2 it is pair
296 // F(2n)-F(2n+1), and for MIPS32r6 it is 64-bit register F(2n)). When
297 // accessing upper 32-bits from double, reg + 1 should be used.
298 if ((kRuntimeISA == InstructionSet::kMips) && (kind == kDoubleHiVReg)) {
299 DCHECK_ALIGNED(reg, 2);
300 reg++;
301 }
302
303 if (!IsAccessibleRegister(reg, is_float)) {
304 return false;
305 }
306 uintptr_t ptr_val = GetRegister(reg, is_float);
307 const bool target64 = Is64BitInstructionSet(kRuntimeISA);
308 if (target64) {
309 const bool wide_lo = (kind == kLongLoVReg) || (kind == kDoubleLoVReg);
310 const bool wide_hi = (kind == kLongHiVReg) || (kind == kDoubleHiVReg);
311 int64_t value_long = static_cast<int64_t>(ptr_val);
312 if (wide_lo) {
313 ptr_val = static_cast<uintptr_t>(Low32Bits(value_long));
314 } else if (wide_hi) {
315 ptr_val = static_cast<uintptr_t>(High32Bits(value_long));
316 }
317 }
318 *val = ptr_val;
319 return true;
320 }
321
GetVRegPairFromDebuggerShadowFrame(uint16_t vreg,VRegKind kind_lo,VRegKind kind_hi,uint64_t * val) const322 bool StackVisitor::GetVRegPairFromDebuggerShadowFrame(uint16_t vreg,
323 VRegKind kind_lo,
324 VRegKind kind_hi,
325 uint64_t* val) const {
326 uint32_t low_32bits;
327 uint32_t high_32bits;
328 bool success = GetVRegFromDebuggerShadowFrame(vreg, kind_lo, &low_32bits);
329 success &= GetVRegFromDebuggerShadowFrame(vreg + 1, kind_hi, &high_32bits);
330 if (success) {
331 *val = (static_cast<uint64_t>(high_32bits) << 32) | static_cast<uint64_t>(low_32bits);
332 }
333 return success;
334 }
335
GetVRegPair(ArtMethod * m,uint16_t vreg,VRegKind kind_lo,VRegKind kind_hi,uint64_t * val) const336 bool StackVisitor::GetVRegPair(ArtMethod* m, uint16_t vreg, VRegKind kind_lo,
337 VRegKind kind_hi, uint64_t* val) const {
338 if (kind_lo == kLongLoVReg) {
339 DCHECK_EQ(kind_hi, kLongHiVReg);
340 } else if (kind_lo == kDoubleLoVReg) {
341 DCHECK_EQ(kind_hi, kDoubleHiVReg);
342 } else {
343 LOG(FATAL) << "Expected long or double: kind_lo=" << kind_lo << ", kind_hi=" << kind_hi;
344 UNREACHABLE();
345 }
346 // Check if there is value set by the debugger.
347 if (GetVRegPairFromDebuggerShadowFrame(vreg, kind_lo, kind_hi, val)) {
348 return true;
349 }
350 if (cur_quick_frame_ != nullptr) {
351 DCHECK(context_ != nullptr); // You can't reliably read registers without a context.
352 DCHECK(m == GetMethod());
353 DCHECK(cur_oat_quick_method_header_->IsOptimized());
354 return GetVRegPairFromOptimizedCode(m, vreg, kind_lo, kind_hi, val);
355 } else {
356 DCHECK(cur_shadow_frame_ != nullptr);
357 *val = cur_shadow_frame_->GetVRegLong(vreg);
358 return true;
359 }
360 }
361
GetVRegPairFromOptimizedCode(ArtMethod * m,uint16_t vreg,VRegKind kind_lo,VRegKind kind_hi,uint64_t * val) const362 bool StackVisitor::GetVRegPairFromOptimizedCode(ArtMethod* m, uint16_t vreg,
363 VRegKind kind_lo, VRegKind kind_hi,
364 uint64_t* val) const {
365 uint32_t low_32bits;
366 uint32_t high_32bits;
367 bool success = GetVRegFromOptimizedCode(m, vreg, kind_lo, &low_32bits);
368 success &= GetVRegFromOptimizedCode(m, vreg + 1, kind_hi, &high_32bits);
369 if (success) {
370 *val = (static_cast<uint64_t>(high_32bits) << 32) | static_cast<uint64_t>(low_32bits);
371 }
372 return success;
373 }
374
GetRegisterPairIfAccessible(uint32_t reg_lo,uint32_t reg_hi,VRegKind kind_lo,uint64_t * val) const375 bool StackVisitor::GetRegisterPairIfAccessible(uint32_t reg_lo, uint32_t reg_hi,
376 VRegKind kind_lo, uint64_t* val) const {
377 const bool is_float = (kind_lo == kDoubleLoVReg);
378 if (!IsAccessibleRegister(reg_lo, is_float) || !IsAccessibleRegister(reg_hi, is_float)) {
379 return false;
380 }
381 uintptr_t ptr_val_lo = GetRegister(reg_lo, is_float);
382 uintptr_t ptr_val_hi = GetRegister(reg_hi, is_float);
383 bool target64 = Is64BitInstructionSet(kRuntimeISA);
384 if (target64) {
385 int64_t value_long_lo = static_cast<int64_t>(ptr_val_lo);
386 int64_t value_long_hi = static_cast<int64_t>(ptr_val_hi);
387 ptr_val_lo = static_cast<uintptr_t>(Low32Bits(value_long_lo));
388 ptr_val_hi = static_cast<uintptr_t>(High32Bits(value_long_hi));
389 }
390 *val = (static_cast<uint64_t>(ptr_val_hi) << 32) | static_cast<uint32_t>(ptr_val_lo);
391 return true;
392 }
393
SetVReg(ArtMethod * m,uint16_t vreg,uint32_t new_value,VRegKind kind)394 bool StackVisitor::SetVReg(ArtMethod* m,
395 uint16_t vreg,
396 uint32_t new_value,
397 VRegKind kind) {
398 CodeItemDataAccessor accessor(m->DexInstructionData());
399 if (!accessor.HasCodeItem()) {
400 return false;
401 }
402 ShadowFrame* shadow_frame = GetCurrentShadowFrame();
403 if (shadow_frame == nullptr) {
404 // This is a compiled frame: we must prepare and update a shadow frame that will
405 // be executed by the interpreter after deoptimization of the stack.
406 const size_t frame_id = GetFrameId();
407 const uint16_t num_regs = accessor.RegistersSize();
408 shadow_frame = thread_->FindOrCreateDebuggerShadowFrame(frame_id, num_regs, m, GetDexPc());
409 CHECK(shadow_frame != nullptr);
410 // Remember the vreg has been set for debugging and must not be overwritten by the
411 // original value during deoptimization of the stack.
412 thread_->GetUpdatedVRegFlags(frame_id)[vreg] = true;
413 }
414 if (kind == kReferenceVReg) {
415 shadow_frame->SetVRegReference(vreg, reinterpret_cast<mirror::Object*>(new_value));
416 } else {
417 shadow_frame->SetVReg(vreg, new_value);
418 }
419 return true;
420 }
421
SetVRegPair(ArtMethod * m,uint16_t vreg,uint64_t new_value,VRegKind kind_lo,VRegKind kind_hi)422 bool StackVisitor::SetVRegPair(ArtMethod* m,
423 uint16_t vreg,
424 uint64_t new_value,
425 VRegKind kind_lo,
426 VRegKind kind_hi) {
427 if (kind_lo == kLongLoVReg) {
428 DCHECK_EQ(kind_hi, kLongHiVReg);
429 } else if (kind_lo == kDoubleLoVReg) {
430 DCHECK_EQ(kind_hi, kDoubleHiVReg);
431 } else {
432 LOG(FATAL) << "Expected long or double: kind_lo=" << kind_lo << ", kind_hi=" << kind_hi;
433 UNREACHABLE();
434 }
435 CodeItemDataAccessor accessor(m->DexInstructionData());
436 if (!accessor.HasCodeItem()) {
437 return false;
438 }
439 ShadowFrame* shadow_frame = GetCurrentShadowFrame();
440 if (shadow_frame == nullptr) {
441 // This is a compiled frame: we must prepare for deoptimization (see SetVRegFromDebugger).
442 const size_t frame_id = GetFrameId();
443 const uint16_t num_regs = accessor.RegistersSize();
444 shadow_frame = thread_->FindOrCreateDebuggerShadowFrame(frame_id, num_regs, m, GetDexPc());
445 CHECK(shadow_frame != nullptr);
446 // Remember the vreg pair has been set for debugging and must not be overwritten by the
447 // original value during deoptimization of the stack.
448 thread_->GetUpdatedVRegFlags(frame_id)[vreg] = true;
449 thread_->GetUpdatedVRegFlags(frame_id)[vreg + 1] = true;
450 }
451 shadow_frame->SetVRegLong(vreg, new_value);
452 return true;
453 }
454
IsAccessibleGPR(uint32_t reg) const455 bool StackVisitor::IsAccessibleGPR(uint32_t reg) const {
456 DCHECK(context_ != nullptr);
457 return context_->IsAccessibleGPR(reg);
458 }
459
GetGPRAddress(uint32_t reg) const460 uintptr_t* StackVisitor::GetGPRAddress(uint32_t reg) const {
461 DCHECK(cur_quick_frame_ != nullptr) << "This is a quick frame routine";
462 DCHECK(context_ != nullptr);
463 return context_->GetGPRAddress(reg);
464 }
465
GetGPR(uint32_t reg) const466 uintptr_t StackVisitor::GetGPR(uint32_t reg) const {
467 DCHECK(cur_quick_frame_ != nullptr) << "This is a quick frame routine";
468 DCHECK(context_ != nullptr);
469 return context_->GetGPR(reg);
470 }
471
IsAccessibleFPR(uint32_t reg) const472 bool StackVisitor::IsAccessibleFPR(uint32_t reg) const {
473 DCHECK(context_ != nullptr);
474 return context_->IsAccessibleFPR(reg);
475 }
476
GetFPR(uint32_t reg) const477 uintptr_t StackVisitor::GetFPR(uint32_t reg) const {
478 DCHECK(cur_quick_frame_ != nullptr) << "This is a quick frame routine";
479 DCHECK(context_ != nullptr);
480 return context_->GetFPR(reg);
481 }
482
GetReturnPc() const483 uintptr_t StackVisitor::GetReturnPc() const {
484 uint8_t* sp = reinterpret_cast<uint8_t*>(GetCurrentQuickFrame());
485 DCHECK(sp != nullptr);
486 uint8_t* pc_addr = sp + GetCurrentQuickFrameInfo().GetReturnPcOffset();
487 return *reinterpret_cast<uintptr_t*>(pc_addr);
488 }
489
SetReturnPc(uintptr_t new_ret_pc)490 void StackVisitor::SetReturnPc(uintptr_t new_ret_pc) {
491 uint8_t* sp = reinterpret_cast<uint8_t*>(GetCurrentQuickFrame());
492 CHECK(sp != nullptr);
493 uint8_t* pc_addr = sp + GetCurrentQuickFrameInfo().GetReturnPcOffset();
494 *reinterpret_cast<uintptr_t*>(pc_addr) = new_ret_pc;
495 }
496
ComputeNumFrames(Thread * thread,StackWalkKind walk_kind)497 size_t StackVisitor::ComputeNumFrames(Thread* thread, StackWalkKind walk_kind) {
498 struct NumFramesVisitor : public StackVisitor {
499 NumFramesVisitor(Thread* thread_in, StackWalkKind walk_kind_in)
500 : StackVisitor(thread_in, nullptr, walk_kind_in), frames(0) {}
501
502 bool VisitFrame() OVERRIDE {
503 frames++;
504 return true;
505 }
506
507 size_t frames;
508 };
509 NumFramesVisitor visitor(thread, walk_kind);
510 visitor.WalkStack(true);
511 return visitor.frames;
512 }
513
GetNextMethodAndDexPc(ArtMethod ** next_method,uint32_t * next_dex_pc)514 bool StackVisitor::GetNextMethodAndDexPc(ArtMethod** next_method, uint32_t* next_dex_pc) {
515 struct HasMoreFramesVisitor : public StackVisitor {
516 HasMoreFramesVisitor(Thread* thread,
517 StackWalkKind walk_kind,
518 size_t num_frames,
519 size_t frame_height)
520 : StackVisitor(thread, nullptr, walk_kind, num_frames),
521 frame_height_(frame_height),
522 found_frame_(false),
523 has_more_frames_(false),
524 next_method_(nullptr),
525 next_dex_pc_(0) {
526 }
527
528 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
529 if (found_frame_) {
530 ArtMethod* method = GetMethod();
531 if (method != nullptr && !method->IsRuntimeMethod()) {
532 has_more_frames_ = true;
533 next_method_ = method;
534 next_dex_pc_ = GetDexPc();
535 return false; // End stack walk once next method is found.
536 }
537 } else if (GetFrameHeight() == frame_height_) {
538 found_frame_ = true;
539 }
540 return true;
541 }
542
543 size_t frame_height_;
544 bool found_frame_;
545 bool has_more_frames_;
546 ArtMethod* next_method_;
547 uint32_t next_dex_pc_;
548 };
549 HasMoreFramesVisitor visitor(thread_, walk_kind_, GetNumFrames(), GetFrameHeight());
550 visitor.WalkStack(true);
551 *next_method = visitor.next_method_;
552 *next_dex_pc = visitor.next_dex_pc_;
553 return visitor.has_more_frames_;
554 }
555
DescribeStack(Thread * thread)556 void StackVisitor::DescribeStack(Thread* thread) {
557 struct DescribeStackVisitor : public StackVisitor {
558 explicit DescribeStackVisitor(Thread* thread_in)
559 : StackVisitor(thread_in, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames) {}
560
561 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
562 LOG(INFO) << "Frame Id=" << GetFrameId() << " " << DescribeLocation();
563 return true;
564 }
565 };
566 DescribeStackVisitor visitor(thread);
567 visitor.WalkStack(true);
568 }
569
DescribeLocation() const570 std::string StackVisitor::DescribeLocation() const {
571 std::string result("Visiting method '");
572 ArtMethod* m = GetMethod();
573 if (m == nullptr) {
574 return "upcall";
575 }
576 result += m->PrettyMethod();
577 result += StringPrintf("' at dex PC 0x%04x", GetDexPc());
578 if (!IsShadowFrame()) {
579 result += StringPrintf(" (native PC %p)", reinterpret_cast<void*>(GetCurrentQuickFramePc()));
580 }
581 return result;
582 }
583
SetMethod(ArtMethod * method)584 void StackVisitor::SetMethod(ArtMethod* method) {
585 DCHECK(GetMethod() != nullptr);
586 if (cur_shadow_frame_ != nullptr) {
587 cur_shadow_frame_->SetMethod(method);
588 } else {
589 DCHECK(cur_quick_frame_ != nullptr);
590 CHECK(!IsInInlinedFrame()) << "We do not support setting inlined method's ArtMethod!";
591 *cur_quick_frame_ = method;
592 }
593 }
594
AssertPcIsWithinQuickCode(ArtMethod * method,uintptr_t pc)595 static void AssertPcIsWithinQuickCode(ArtMethod* method, uintptr_t pc)
596 REQUIRES_SHARED(Locks::mutator_lock_) {
597 if (method->IsNative() || method->IsRuntimeMethod() || method->IsProxyMethod()) {
598 return;
599 }
600
601 if (pc == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc())) {
602 return;
603 }
604
605 Runtime* runtime = Runtime::Current();
606 if (runtime->UseJitCompilation() &&
607 runtime->GetJit()->GetCodeCache()->ContainsPc(reinterpret_cast<const void*>(pc))) {
608 return;
609 }
610
611 const void* code = method->GetEntryPointFromQuickCompiledCode();
612 if (code == GetQuickInstrumentationEntryPoint() || code == GetInvokeObsoleteMethodStub()) {
613 return;
614 }
615
616 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
617 if (class_linker->IsQuickToInterpreterBridge(code) ||
618 class_linker->IsQuickResolutionStub(code)) {
619 return;
620 }
621
622 if (runtime->UseJitCompilation() && runtime->GetJit()->GetCodeCache()->ContainsPc(code)) {
623 return;
624 }
625
626 uint32_t code_size = OatQuickMethodHeader::FromEntryPoint(code)->GetCodeSize();
627 uintptr_t code_start = reinterpret_cast<uintptr_t>(code);
628 CHECK(code_start <= pc && pc <= (code_start + code_size))
629 << method->PrettyMethod()
630 << " pc=" << std::hex << pc
631 << " code_start=" << code_start
632 << " code_size=" << code_size;
633 }
634
SanityCheckFrame() const635 void StackVisitor::SanityCheckFrame() const {
636 if (kIsDebugBuild) {
637 ArtMethod* method = GetMethod();
638 mirror::Class* declaring_class = method->GetDeclaringClass();
639 // Runtime methods have null declaring class.
640 if (!method->IsRuntimeMethod()) {
641 CHECK(declaring_class != nullptr);
642 CHECK_EQ(declaring_class->GetClass(), declaring_class->GetClass()->GetClass())
643 << declaring_class;
644 } else {
645 CHECK(declaring_class == nullptr);
646 }
647 Runtime* const runtime = Runtime::Current();
648 LinearAlloc* const linear_alloc = runtime->GetLinearAlloc();
649 if (!linear_alloc->Contains(method)) {
650 // Check class linker linear allocs.
651 // We get the canonical method as copied methods may have their declaring
652 // class from another class loader.
653 ArtMethod* canonical = method->GetCanonicalMethod();
654 mirror::Class* klass = canonical->GetDeclaringClass();
655 LinearAlloc* const class_linear_alloc = (klass != nullptr)
656 ? runtime->GetClassLinker()->GetAllocatorForClassLoader(klass->GetClassLoader())
657 : linear_alloc;
658 if (!class_linear_alloc->Contains(canonical)) {
659 // Check image space.
660 bool in_image = false;
661 for (auto& space : runtime->GetHeap()->GetContinuousSpaces()) {
662 if (space->IsImageSpace()) {
663 auto* image_space = space->AsImageSpace();
664 const auto& header = image_space->GetImageHeader();
665 const ImageSection& methods = header.GetMethodsSection();
666 const ImageSection& runtime_methods = header.GetRuntimeMethodsSection();
667 const size_t offset = reinterpret_cast<const uint8_t*>(canonical) - image_space->Begin();
668 if (methods.Contains(offset) || runtime_methods.Contains(offset)) {
669 in_image = true;
670 break;
671 }
672 }
673 }
674 CHECK(in_image) << canonical->PrettyMethod() << " not in linear alloc or image";
675 }
676 }
677 if (cur_quick_frame_ != nullptr) {
678 AssertPcIsWithinQuickCode(method, cur_quick_frame_pc_);
679 // Frame sanity.
680 size_t frame_size = GetCurrentQuickFrameInfo().FrameSizeInBytes();
681 CHECK_NE(frame_size, 0u);
682 // A rough guess at an upper size we expect to see for a frame.
683 // 256 registers
684 // 2 words HandleScope overhead
685 // 3+3 register spills
686 // TODO: this seems architecture specific for the case of JNI frames.
687 // TODO: 083-compiler-regressions ManyFloatArgs shows this estimate is wrong.
688 // const size_t kMaxExpectedFrameSize = (256 + 2 + 3 + 3) * sizeof(word);
689 const size_t kMaxExpectedFrameSize = 2 * KB;
690 CHECK_LE(frame_size, kMaxExpectedFrameSize) << method->PrettyMethod();
691 size_t return_pc_offset = GetCurrentQuickFrameInfo().GetReturnPcOffset();
692 CHECK_LT(return_pc_offset, frame_size);
693 }
694 }
695 }
696
697 // Counts the number of references in the parameter list of the corresponding method.
698 // Note: Thus does _not_ include "this" for non-static methods.
GetNumberOfReferenceArgsWithoutReceiver(ArtMethod * method)699 static uint32_t GetNumberOfReferenceArgsWithoutReceiver(ArtMethod* method)
700 REQUIRES_SHARED(Locks::mutator_lock_) {
701 uint32_t shorty_len;
702 const char* shorty = method->GetShorty(&shorty_len);
703 uint32_t refs = 0;
704 for (uint32_t i = 1; i < shorty_len ; ++i) {
705 if (shorty[i] == 'L') {
706 refs++;
707 }
708 }
709 return refs;
710 }
711
GetCurrentQuickFrameInfo() const712 QuickMethodFrameInfo StackVisitor::GetCurrentQuickFrameInfo() const {
713 if (cur_oat_quick_method_header_ != nullptr) {
714 return cur_oat_quick_method_header_->GetFrameInfo();
715 }
716
717 ArtMethod* method = GetMethod();
718 Runtime* runtime = Runtime::Current();
719
720 if (method->IsAbstract()) {
721 return runtime->GetCalleeSaveMethodFrameInfo(CalleeSaveType::kSaveRefsAndArgs);
722 }
723
724 // This goes before IsProxyMethod since runtime methods have a null declaring class.
725 if (method->IsRuntimeMethod()) {
726 return runtime->GetRuntimeMethodFrameInfo(method);
727 }
728
729 if (method->IsProxyMethod()) {
730 // There is only one direct method of a proxy class: the constructor. A direct method is
731 // cloned from the original java.lang.reflect.Proxy and is executed as usual quick
732 // compiled method without any stubs. Therefore the method must have a OatQuickMethodHeader.
733 DCHECK(!method->IsDirect() && !method->IsConstructor())
734 << "Constructors of proxy classes must have a OatQuickMethodHeader";
735 return runtime->GetCalleeSaveMethodFrameInfo(CalleeSaveType::kSaveRefsAndArgs);
736 }
737
738 // The only remaining case is if the method is native and uses the generic JNI stub,
739 // called either directly or through some (resolution, instrumentation) trampoline.
740 DCHECK(method->IsNative());
741 if (kIsDebugBuild) {
742 ClassLinker* class_linker = runtime->GetClassLinker();
743 const void* entry_point = runtime->GetInstrumentation()->GetQuickCodeFor(method,
744 kRuntimePointerSize);
745 CHECK(class_linker->IsQuickGenericJniStub(entry_point) ||
746 // The current entrypoint (after filtering out trampolines) may have changed
747 // from GenericJNI to JIT-compiled stub since we have entered this frame.
748 (runtime->GetJit() != nullptr &&
749 runtime->GetJit()->GetCodeCache()->ContainsPc(entry_point))) << method->PrettyMethod();
750 }
751 // Generic JNI frame.
752 uint32_t handle_refs = GetNumberOfReferenceArgsWithoutReceiver(method) + 1;
753 size_t scope_size = HandleScope::SizeOf(handle_refs);
754 QuickMethodFrameInfo callee_info =
755 runtime->GetCalleeSaveMethodFrameInfo(CalleeSaveType::kSaveRefsAndArgs);
756
757 // Callee saves + handle scope + method ref + alignment
758 // Note: -sizeof(void*) since callee-save frame stores a whole method pointer.
759 size_t frame_size = RoundUp(
760 callee_info.FrameSizeInBytes() - sizeof(void*) + sizeof(ArtMethod*) + scope_size,
761 kStackAlignment);
762 return QuickMethodFrameInfo(frame_size, callee_info.CoreSpillMask(), callee_info.FpSpillMask());
763 }
764
765 template <StackVisitor::CountTransitions kCount>
WalkStack(bool include_transitions)766 void StackVisitor::WalkStack(bool include_transitions) {
767 if (check_suspended_) {
768 DCHECK(thread_ == Thread::Current() || thread_->IsSuspended());
769 }
770 CHECK_EQ(cur_depth_, 0U);
771 bool exit_stubs_installed = Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled();
772 uint32_t instrumentation_stack_depth = 0;
773 size_t inlined_frames_count = 0;
774
775 for (const ManagedStack* current_fragment = thread_->GetManagedStack();
776 current_fragment != nullptr; current_fragment = current_fragment->GetLink()) {
777 cur_shadow_frame_ = current_fragment->GetTopShadowFrame();
778 cur_quick_frame_ = current_fragment->GetTopQuickFrame();
779 cur_quick_frame_pc_ = 0;
780 cur_oat_quick_method_header_ = nullptr;
781
782 if (cur_quick_frame_ != nullptr) { // Handle quick stack frames.
783 // Can't be both a shadow and a quick fragment.
784 DCHECK(current_fragment->GetTopShadowFrame() == nullptr);
785 ArtMethod* method = *cur_quick_frame_;
786 DCHECK(method != nullptr);
787 bool header_retrieved = false;
788 if (method->IsNative()) {
789 // We do not have a PC for the first frame, so we cannot simply use
790 // ArtMethod::GetOatQuickMethodHeader() as we're unable to distinguish there
791 // between GenericJNI frame and JIT-compiled JNI stub; the entrypoint may have
792 // changed since the frame was entered. The top quick frame tag indicates
793 // GenericJNI here, otherwise it's either AOT-compiled or JNI-compiled JNI stub.
794 if (UNLIKELY(current_fragment->GetTopQuickFrameTag())) {
795 // The generic JNI does not have any method header.
796 cur_oat_quick_method_header_ = nullptr;
797 } else {
798 const void* existing_entry_point = method->GetEntryPointFromQuickCompiledCode();
799 CHECK(existing_entry_point != nullptr);
800 Runtime* runtime = Runtime::Current();
801 ClassLinker* class_linker = runtime->GetClassLinker();
802 // Check whether we can quickly get the header from the current entrypoint.
803 if (!class_linker->IsQuickGenericJniStub(existing_entry_point) &&
804 !class_linker->IsQuickResolutionStub(existing_entry_point) &&
805 existing_entry_point != GetQuickInstrumentationEntryPoint()) {
806 cur_oat_quick_method_header_ =
807 OatQuickMethodHeader::FromEntryPoint(existing_entry_point);
808 } else {
809 const void* code = method->GetOatMethodQuickCode(class_linker->GetImagePointerSize());
810 if (code != nullptr) {
811 cur_oat_quick_method_header_ = OatQuickMethodHeader::FromEntryPoint(code);
812 } else {
813 // This must be a JITted JNI stub frame.
814 CHECK(runtime->GetJit() != nullptr);
815 code = runtime->GetJit()->GetCodeCache()->GetJniStubCode(method);
816 CHECK(code != nullptr) << method->PrettyMethod();
817 cur_oat_quick_method_header_ = OatQuickMethodHeader::FromCodePointer(code);
818 }
819 }
820 }
821 header_retrieved = true;
822 }
823 while (method != nullptr) {
824 if (!header_retrieved) {
825 cur_oat_quick_method_header_ = method->GetOatQuickMethodHeader(cur_quick_frame_pc_);
826 }
827 header_retrieved = false; // Force header retrieval in next iteration.
828 SanityCheckFrame();
829
830 if ((walk_kind_ == StackWalkKind::kIncludeInlinedFrames)
831 && (cur_oat_quick_method_header_ != nullptr)
832 && cur_oat_quick_method_header_->IsOptimized()) {
833 CodeInfo code_info = cur_oat_quick_method_header_->GetOptimizedCodeInfo();
834 CodeInfoEncoding encoding = code_info.ExtractEncoding();
835 uint32_t native_pc_offset =
836 cur_oat_quick_method_header_->NativeQuickPcOffset(cur_quick_frame_pc_);
837 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, encoding);
838 if (stack_map.IsValid() && stack_map.HasInlineInfo(encoding.stack_map.encoding)) {
839 InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
840 DCHECK_EQ(current_inlining_depth_, 0u);
841 for (current_inlining_depth_ = inline_info.GetDepth(encoding.inline_info.encoding);
842 current_inlining_depth_ != 0;
843 --current_inlining_depth_) {
844 bool should_continue = VisitFrame();
845 if (UNLIKELY(!should_continue)) {
846 return;
847 }
848 cur_depth_++;
849 inlined_frames_count++;
850 }
851 }
852 }
853
854 bool should_continue = VisitFrame();
855 if (UNLIKELY(!should_continue)) {
856 return;
857 }
858
859 QuickMethodFrameInfo frame_info = GetCurrentQuickFrameInfo();
860 if (context_ != nullptr) {
861 context_->FillCalleeSaves(reinterpret_cast<uint8_t*>(cur_quick_frame_), frame_info);
862 }
863 // Compute PC for next stack frame from return PC.
864 size_t frame_size = frame_info.FrameSizeInBytes();
865 size_t return_pc_offset = frame_size - sizeof(void*);
866 uint8_t* return_pc_addr = reinterpret_cast<uint8_t*>(cur_quick_frame_) + return_pc_offset;
867 uintptr_t return_pc = *reinterpret_cast<uintptr_t*>(return_pc_addr);
868
869 if (UNLIKELY(exit_stubs_installed)) {
870 // While profiling, the return pc is restored from the side stack, except when walking
871 // the stack for an exception where the side stack will be unwound in VisitFrame.
872 if (reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == return_pc) {
873 CHECK_LT(instrumentation_stack_depth, thread_->GetInstrumentationStack()->size());
874 const instrumentation::InstrumentationStackFrame& instrumentation_frame =
875 thread_->GetInstrumentationStack()->at(instrumentation_stack_depth);
876 instrumentation_stack_depth++;
877 if (GetMethod() ==
878 Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveAllCalleeSaves)) {
879 // Skip runtime save all callee frames which are used to deliver exceptions.
880 } else if (instrumentation_frame.interpreter_entry_) {
881 ArtMethod* callee =
882 Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs);
883 CHECK_EQ(GetMethod(), callee) << "Expected: " << ArtMethod::PrettyMethod(callee)
884 << " Found: " << ArtMethod::PrettyMethod(GetMethod());
885 } else {
886 // Instrumentation generally doesn't distinguish between a method's obsolete and
887 // non-obsolete version.
888 CHECK_EQ(instrumentation_frame.method_->GetNonObsoleteMethod(),
889 GetMethod()->GetNonObsoleteMethod())
890 << "Expected: "
891 << ArtMethod::PrettyMethod(instrumentation_frame.method_->GetNonObsoleteMethod())
892 << " Found: " << ArtMethod::PrettyMethod(GetMethod()->GetNonObsoleteMethod());
893 }
894 if (num_frames_ != 0) {
895 // Check agreement of frame Ids only if num_frames_ is computed to avoid infinite
896 // recursion.
897 size_t frame_id = instrumentation::Instrumentation::ComputeFrameId(
898 thread_,
899 cur_depth_,
900 inlined_frames_count);
901 CHECK_EQ(instrumentation_frame.frame_id_, frame_id);
902 }
903 return_pc = instrumentation_frame.return_pc_;
904 }
905 }
906
907 cur_quick_frame_pc_ = return_pc;
908 uint8_t* next_frame = reinterpret_cast<uint8_t*>(cur_quick_frame_) + frame_size;
909 cur_quick_frame_ = reinterpret_cast<ArtMethod**>(next_frame);
910
911 if (kDebugStackWalk) {
912 LOG(INFO) << ArtMethod::PrettyMethod(method) << "@" << method << " size=" << frame_size
913 << std::boolalpha
914 << " optimized=" << (cur_oat_quick_method_header_ != nullptr &&
915 cur_oat_quick_method_header_->IsOptimized())
916 << " native=" << method->IsNative()
917 << std::noboolalpha
918 << " entrypoints=" << method->GetEntryPointFromQuickCompiledCode()
919 << "," << (method->IsNative() ? method->GetEntryPointFromJni() : nullptr)
920 << " next=" << *cur_quick_frame_;
921 }
922
923 if (kCount == CountTransitions::kYes || !method->IsRuntimeMethod()) {
924 cur_depth_++;
925 }
926 method = *cur_quick_frame_;
927 }
928 } else if (cur_shadow_frame_ != nullptr) {
929 do {
930 SanityCheckFrame();
931 bool should_continue = VisitFrame();
932 if (UNLIKELY(!should_continue)) {
933 return;
934 }
935 cur_depth_++;
936 cur_shadow_frame_ = cur_shadow_frame_->GetLink();
937 } while (cur_shadow_frame_ != nullptr);
938 }
939 if (include_transitions) {
940 bool should_continue = VisitFrame();
941 if (!should_continue) {
942 return;
943 }
944 }
945 if (kCount == CountTransitions::kYes) {
946 cur_depth_++;
947 }
948 }
949 if (num_frames_ != 0) {
950 CHECK_EQ(cur_depth_, num_frames_);
951 }
952 }
953
954 template void StackVisitor::WalkStack<StackVisitor::CountTransitions::kYes>(bool);
955 template void StackVisitor::WalkStack<StackVisitor::CountTransitions::kNo>(bool);
956
957 } // namespace art
958