1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "stack.h"
18
19 #include "android-base/stringprintf.h"
20
21 #include "arch/context.h"
22 #include "art_method-inl.h"
23 #include "base/callee_save_type.h"
24 #include "base/enums.h"
25 #include "base/hex_dump.h"
26 #include "entrypoints/entrypoint_utils-inl.h"
27 #include "entrypoints/runtime_asm_entrypoints.h"
28 #include "gc/space/image_space.h"
29 #include "gc/space/space-inl.h"
30 #include "interpreter/shadow_frame.h"
31 #include "jit/jit.h"
32 #include "jit/jit_code_cache.h"
33 #include "linear_alloc.h"
34 #include "managed_stack.h"
35 #include "mirror/class-inl.h"
36 #include "mirror/object-inl.h"
37 #include "mirror/object_array-inl.h"
38 #include "oat_quick_method_header.h"
39 #include "quick/quick_method_frame_info.h"
40 #include "runtime.h"
41 #include "thread.h"
42 #include "thread_list.h"
43
44 namespace art {
45
46 using android::base::StringPrintf;
47
48 static constexpr bool kDebugStackWalk = false;
49
StackVisitor(Thread * thread,Context * context,StackWalkKind walk_kind,bool check_suspended)50 StackVisitor::StackVisitor(Thread* thread,
51 Context* context,
52 StackWalkKind walk_kind,
53 bool check_suspended)
54 : StackVisitor(thread, context, walk_kind, 0, check_suspended) {}
55
StackVisitor(Thread * thread,Context * context,StackWalkKind walk_kind,size_t num_frames,bool check_suspended)56 StackVisitor::StackVisitor(Thread* thread,
57 Context* context,
58 StackWalkKind walk_kind,
59 size_t num_frames,
60 bool check_suspended)
61 : thread_(thread),
62 walk_kind_(walk_kind),
63 cur_shadow_frame_(nullptr),
64 cur_quick_frame_(nullptr),
65 cur_quick_frame_pc_(0),
66 cur_oat_quick_method_header_(nullptr),
67 num_frames_(num_frames),
68 cur_depth_(0),
69 current_inlining_depth_(0),
70 context_(context),
71 check_suspended_(check_suspended) {
72 if (check_suspended_) {
73 DCHECK(thread == Thread::Current() || thread->IsSuspended()) << *thread;
74 }
75 }
76
GetCurrentInlineInfo(const OatQuickMethodHeader * method_header,uintptr_t cur_quick_frame_pc)77 static InlineInfo GetCurrentInlineInfo(const OatQuickMethodHeader* method_header,
78 uintptr_t cur_quick_frame_pc)
79 REQUIRES_SHARED(Locks::mutator_lock_) {
80 uint32_t native_pc_offset = method_header->NativeQuickPcOffset(cur_quick_frame_pc);
81 CodeInfo code_info = method_header->GetOptimizedCodeInfo();
82 CodeInfoEncoding encoding = code_info.ExtractEncoding();
83 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, encoding);
84 DCHECK(stack_map.IsValid());
85 return code_info.GetInlineInfoOf(stack_map, encoding);
86 }
87
GetMethod() const88 ArtMethod* StackVisitor::GetMethod() const {
89 if (cur_shadow_frame_ != nullptr) {
90 return cur_shadow_frame_->GetMethod();
91 } else if (cur_quick_frame_ != nullptr) {
92 if (IsInInlinedFrame()) {
93 size_t depth_in_stack_map = current_inlining_depth_ - 1;
94 InlineInfo inline_info = GetCurrentInlineInfo(GetCurrentOatQuickMethodHeader(),
95 cur_quick_frame_pc_);
96 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
97 CodeInfoEncoding encoding = method_header->GetOptimizedCodeInfo().ExtractEncoding();
98 MethodInfo method_info = method_header->GetOptimizedMethodInfo();
99 DCHECK(walk_kind_ != StackWalkKind::kSkipInlinedFrames);
100 return GetResolvedMethod(*GetCurrentQuickFrame(),
101 method_info,
102 inline_info,
103 encoding.inline_info.encoding,
104 depth_in_stack_map);
105 } else {
106 return *cur_quick_frame_;
107 }
108 }
109 return nullptr;
110 }
111
GetDexPc(bool abort_on_failure) const112 uint32_t StackVisitor::GetDexPc(bool abort_on_failure) const {
113 if (cur_shadow_frame_ != nullptr) {
114 return cur_shadow_frame_->GetDexPC();
115 } else if (cur_quick_frame_ != nullptr) {
116 if (IsInInlinedFrame()) {
117 size_t depth_in_stack_map = current_inlining_depth_ - 1;
118 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
119 CodeInfoEncoding encoding = method_header->GetOptimizedCodeInfo().ExtractEncoding();
120 return GetCurrentInlineInfo(GetCurrentOatQuickMethodHeader(), cur_quick_frame_pc_).
121 GetDexPcAtDepth(encoding.inline_info.encoding, depth_in_stack_map);
122 } else if (cur_oat_quick_method_header_ == nullptr) {
123 return DexFile::kDexNoIndex;
124 } else {
125 return cur_oat_quick_method_header_->ToDexPc(
126 GetMethod(), cur_quick_frame_pc_, abort_on_failure);
127 }
128 } else {
129 return 0;
130 }
131 }
132
133 extern "C" mirror::Object* artQuickGetProxyThisObject(ArtMethod** sp)
134 REQUIRES_SHARED(Locks::mutator_lock_);
135
GetThisObject() const136 mirror::Object* StackVisitor::GetThisObject() const {
137 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize);
138 ArtMethod* m = GetMethod();
139 if (m->IsStatic()) {
140 return nullptr;
141 } else if (m->IsNative()) {
142 if (cur_quick_frame_ != nullptr) {
143 HandleScope* hs = reinterpret_cast<HandleScope*>(
144 reinterpret_cast<char*>(cur_quick_frame_) + sizeof(ArtMethod*));
145 return hs->GetReference(0);
146 } else {
147 return cur_shadow_frame_->GetVRegReference(0);
148 }
149 } else if (m->IsProxyMethod()) {
150 if (cur_quick_frame_ != nullptr) {
151 return artQuickGetProxyThisObject(cur_quick_frame_);
152 } else {
153 return cur_shadow_frame_->GetVRegReference(0);
154 }
155 } else {
156 const DexFile::CodeItem* code_item = m->GetCodeItem();
157 if (code_item == nullptr) {
158 UNIMPLEMENTED(ERROR) << "Failed to determine this object of abstract or proxy method: "
159 << ArtMethod::PrettyMethod(m);
160 return nullptr;
161 } else {
162 uint16_t reg = code_item->registers_size_ - code_item->ins_size_;
163 uint32_t value = 0;
164 bool success = GetVReg(m, reg, kReferenceVReg, &value);
165 // We currently always guarantee the `this` object is live throughout the method.
166 CHECK(success) << "Failed to read the this object in " << ArtMethod::PrettyMethod(m);
167 return reinterpret_cast<mirror::Object*>(value);
168 }
169 }
170 }
171
GetNativePcOffset() const172 size_t StackVisitor::GetNativePcOffset() const {
173 DCHECK(!IsShadowFrame());
174 return GetCurrentOatQuickMethodHeader()->NativeQuickPcOffset(cur_quick_frame_pc_);
175 }
176
GetVRegFromDebuggerShadowFrame(uint16_t vreg,VRegKind kind,uint32_t * val) const177 bool StackVisitor::GetVRegFromDebuggerShadowFrame(uint16_t vreg,
178 VRegKind kind,
179 uint32_t* val) const {
180 size_t frame_id = const_cast<StackVisitor*>(this)->GetFrameId();
181 ShadowFrame* shadow_frame = thread_->FindDebuggerShadowFrame(frame_id);
182 if (shadow_frame != nullptr) {
183 bool* updated_vreg_flags = thread_->GetUpdatedVRegFlags(frame_id);
184 DCHECK(updated_vreg_flags != nullptr);
185 if (updated_vreg_flags[vreg]) {
186 // Value is set by the debugger.
187 if (kind == kReferenceVReg) {
188 *val = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(
189 shadow_frame->GetVRegReference(vreg)));
190 } else {
191 *val = shadow_frame->GetVReg(vreg);
192 }
193 return true;
194 }
195 }
196 // No value is set by the debugger.
197 return false;
198 }
199
GetVReg(ArtMethod * m,uint16_t vreg,VRegKind kind,uint32_t * val) const200 bool StackVisitor::GetVReg(ArtMethod* m, uint16_t vreg, VRegKind kind, uint32_t* val) const {
201 if (cur_quick_frame_ != nullptr) {
202 DCHECK(context_ != nullptr); // You can't reliably read registers without a context.
203 DCHECK(m == GetMethod());
204 // Check if there is value set by the debugger.
205 if (GetVRegFromDebuggerShadowFrame(vreg, kind, val)) {
206 return true;
207 }
208 DCHECK(cur_oat_quick_method_header_->IsOptimized());
209 return GetVRegFromOptimizedCode(m, vreg, kind, val);
210 } else {
211 DCHECK(cur_shadow_frame_ != nullptr);
212 if (kind == kReferenceVReg) {
213 *val = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(
214 cur_shadow_frame_->GetVRegReference(vreg)));
215 } else {
216 *val = cur_shadow_frame_->GetVReg(vreg);
217 }
218 return true;
219 }
220 }
221
GetVRegFromOptimizedCode(ArtMethod * m,uint16_t vreg,VRegKind kind,uint32_t * val) const222 bool StackVisitor::GetVRegFromOptimizedCode(ArtMethod* m, uint16_t vreg, VRegKind kind,
223 uint32_t* val) const {
224 DCHECK_EQ(m, GetMethod());
225 const DexFile::CodeItem* code_item = m->GetCodeItem();
226 DCHECK(code_item != nullptr) << m->PrettyMethod(); // Can't be null or how would we compile
227 // its instructions?
228 uint16_t number_of_dex_registers = code_item->registers_size_;
229 DCHECK_LT(vreg, code_item->registers_size_);
230 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
231 CodeInfo code_info = method_header->GetOptimizedCodeInfo();
232 CodeInfoEncoding encoding = code_info.ExtractEncoding();
233
234 uint32_t native_pc_offset = method_header->NativeQuickPcOffset(cur_quick_frame_pc_);
235 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, encoding);
236 DCHECK(stack_map.IsValid());
237 size_t depth_in_stack_map = current_inlining_depth_ - 1;
238
239 DexRegisterMap dex_register_map = IsInInlinedFrame()
240 ? code_info.GetDexRegisterMapAtDepth(depth_in_stack_map,
241 code_info.GetInlineInfoOf(stack_map, encoding),
242 encoding,
243 number_of_dex_registers)
244 : code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers);
245
246 if (!dex_register_map.IsValid()) {
247 return false;
248 }
249 DexRegisterLocation::Kind location_kind =
250 dex_register_map.GetLocationKind(vreg, number_of_dex_registers, code_info, encoding);
251 switch (location_kind) {
252 case DexRegisterLocation::Kind::kInStack: {
253 const int32_t offset = dex_register_map.GetStackOffsetInBytes(vreg,
254 number_of_dex_registers,
255 code_info,
256 encoding);
257 const uint8_t* addr = reinterpret_cast<const uint8_t*>(cur_quick_frame_) + offset;
258 *val = *reinterpret_cast<const uint32_t*>(addr);
259 return true;
260 }
261 case DexRegisterLocation::Kind::kInRegister:
262 case DexRegisterLocation::Kind::kInRegisterHigh:
263 case DexRegisterLocation::Kind::kInFpuRegister:
264 case DexRegisterLocation::Kind::kInFpuRegisterHigh: {
265 uint32_t reg =
266 dex_register_map.GetMachineRegister(vreg, number_of_dex_registers, code_info, encoding);
267 return GetRegisterIfAccessible(reg, kind, val);
268 }
269 case DexRegisterLocation::Kind::kConstant:
270 *val = dex_register_map.GetConstant(vreg, number_of_dex_registers, code_info, encoding);
271 return true;
272 case DexRegisterLocation::Kind::kNone:
273 return false;
274 default:
275 LOG(FATAL)
276 << "Unexpected location kind "
277 << dex_register_map.GetLocationInternalKind(vreg,
278 number_of_dex_registers,
279 code_info,
280 encoding);
281 UNREACHABLE();
282 }
283 }
284
GetRegisterIfAccessible(uint32_t reg,VRegKind kind,uint32_t * val) const285 bool StackVisitor::GetRegisterIfAccessible(uint32_t reg, VRegKind kind, uint32_t* val) const {
286 const bool is_float = (kind == kFloatVReg) || (kind == kDoubleLoVReg) || (kind == kDoubleHiVReg);
287
288 if (kRuntimeISA == InstructionSet::kX86 && is_float) {
289 // X86 float registers are 64-bit and each XMM register is provided as two separate
290 // 32-bit registers by the context.
291 reg = (kind == kDoubleHiVReg) ? (2 * reg + 1) : (2 * reg);
292 }
293
294 // MIPS32 float registers are used as 64-bit (for MIPS32r2 it is pair
295 // F(2n)-F(2n+1), and for MIPS32r6 it is 64-bit register F(2n)). When
296 // accessing upper 32-bits from double, reg + 1 should be used.
297 if ((kRuntimeISA == InstructionSet::kMips) && (kind == kDoubleHiVReg)) {
298 DCHECK_ALIGNED(reg, 2);
299 reg++;
300 }
301
302 if (!IsAccessibleRegister(reg, is_float)) {
303 return false;
304 }
305 uintptr_t ptr_val = GetRegister(reg, is_float);
306 const bool target64 = Is64BitInstructionSet(kRuntimeISA);
307 if (target64) {
308 const bool wide_lo = (kind == kLongLoVReg) || (kind == kDoubleLoVReg);
309 const bool wide_hi = (kind == kLongHiVReg) || (kind == kDoubleHiVReg);
310 int64_t value_long = static_cast<int64_t>(ptr_val);
311 if (wide_lo) {
312 ptr_val = static_cast<uintptr_t>(Low32Bits(value_long));
313 } else if (wide_hi) {
314 ptr_val = static_cast<uintptr_t>(High32Bits(value_long));
315 }
316 }
317 *val = ptr_val;
318 return true;
319 }
320
GetVRegPairFromDebuggerShadowFrame(uint16_t vreg,VRegKind kind_lo,VRegKind kind_hi,uint64_t * val) const321 bool StackVisitor::GetVRegPairFromDebuggerShadowFrame(uint16_t vreg,
322 VRegKind kind_lo,
323 VRegKind kind_hi,
324 uint64_t* val) const {
325 uint32_t low_32bits;
326 uint32_t high_32bits;
327 bool success = GetVRegFromDebuggerShadowFrame(vreg, kind_lo, &low_32bits);
328 success &= GetVRegFromDebuggerShadowFrame(vreg + 1, kind_hi, &high_32bits);
329 if (success) {
330 *val = (static_cast<uint64_t>(high_32bits) << 32) | static_cast<uint64_t>(low_32bits);
331 }
332 return success;
333 }
334
GetVRegPair(ArtMethod * m,uint16_t vreg,VRegKind kind_lo,VRegKind kind_hi,uint64_t * val) const335 bool StackVisitor::GetVRegPair(ArtMethod* m, uint16_t vreg, VRegKind kind_lo,
336 VRegKind kind_hi, uint64_t* val) const {
337 if (kind_lo == kLongLoVReg) {
338 DCHECK_EQ(kind_hi, kLongHiVReg);
339 } else if (kind_lo == kDoubleLoVReg) {
340 DCHECK_EQ(kind_hi, kDoubleHiVReg);
341 } else {
342 LOG(FATAL) << "Expected long or double: kind_lo=" << kind_lo << ", kind_hi=" << kind_hi;
343 UNREACHABLE();
344 }
345 // Check if there is value set by the debugger.
346 if (GetVRegPairFromDebuggerShadowFrame(vreg, kind_lo, kind_hi, val)) {
347 return true;
348 }
349 if (cur_quick_frame_ != nullptr) {
350 DCHECK(context_ != nullptr); // You can't reliably read registers without a context.
351 DCHECK(m == GetMethod());
352 DCHECK(cur_oat_quick_method_header_->IsOptimized());
353 return GetVRegPairFromOptimizedCode(m, vreg, kind_lo, kind_hi, val);
354 } else {
355 DCHECK(cur_shadow_frame_ != nullptr);
356 *val = cur_shadow_frame_->GetVRegLong(vreg);
357 return true;
358 }
359 }
360
GetVRegPairFromOptimizedCode(ArtMethod * m,uint16_t vreg,VRegKind kind_lo,VRegKind kind_hi,uint64_t * val) const361 bool StackVisitor::GetVRegPairFromOptimizedCode(ArtMethod* m, uint16_t vreg,
362 VRegKind kind_lo, VRegKind kind_hi,
363 uint64_t* val) const {
364 uint32_t low_32bits;
365 uint32_t high_32bits;
366 bool success = GetVRegFromOptimizedCode(m, vreg, kind_lo, &low_32bits);
367 success &= GetVRegFromOptimizedCode(m, vreg + 1, kind_hi, &high_32bits);
368 if (success) {
369 *val = (static_cast<uint64_t>(high_32bits) << 32) | static_cast<uint64_t>(low_32bits);
370 }
371 return success;
372 }
373
GetRegisterPairIfAccessible(uint32_t reg_lo,uint32_t reg_hi,VRegKind kind_lo,uint64_t * val) const374 bool StackVisitor::GetRegisterPairIfAccessible(uint32_t reg_lo, uint32_t reg_hi,
375 VRegKind kind_lo, uint64_t* val) const {
376 const bool is_float = (kind_lo == kDoubleLoVReg);
377 if (!IsAccessibleRegister(reg_lo, is_float) || !IsAccessibleRegister(reg_hi, is_float)) {
378 return false;
379 }
380 uintptr_t ptr_val_lo = GetRegister(reg_lo, is_float);
381 uintptr_t ptr_val_hi = GetRegister(reg_hi, is_float);
382 bool target64 = Is64BitInstructionSet(kRuntimeISA);
383 if (target64) {
384 int64_t value_long_lo = static_cast<int64_t>(ptr_val_lo);
385 int64_t value_long_hi = static_cast<int64_t>(ptr_val_hi);
386 ptr_val_lo = static_cast<uintptr_t>(Low32Bits(value_long_lo));
387 ptr_val_hi = static_cast<uintptr_t>(High32Bits(value_long_hi));
388 }
389 *val = (static_cast<uint64_t>(ptr_val_hi) << 32) | static_cast<uint32_t>(ptr_val_lo);
390 return true;
391 }
392
SetVReg(ArtMethod * m,uint16_t vreg,uint32_t new_value,VRegKind kind)393 bool StackVisitor::SetVReg(ArtMethod* m,
394 uint16_t vreg,
395 uint32_t new_value,
396 VRegKind kind) {
397 const DexFile::CodeItem* code_item = m->GetCodeItem();
398 if (code_item == nullptr) {
399 return false;
400 }
401 ShadowFrame* shadow_frame = GetCurrentShadowFrame();
402 if (shadow_frame == nullptr) {
403 // This is a compiled frame: we must prepare and update a shadow frame that will
404 // be executed by the interpreter after deoptimization of the stack.
405 const size_t frame_id = GetFrameId();
406 const uint16_t num_regs = code_item->registers_size_;
407 shadow_frame = thread_->FindOrCreateDebuggerShadowFrame(frame_id, num_regs, m, GetDexPc());
408 CHECK(shadow_frame != nullptr);
409 // Remember the vreg has been set for debugging and must not be overwritten by the
410 // original value during deoptimization of the stack.
411 thread_->GetUpdatedVRegFlags(frame_id)[vreg] = true;
412 }
413 if (kind == kReferenceVReg) {
414 shadow_frame->SetVRegReference(vreg, reinterpret_cast<mirror::Object*>(new_value));
415 } else {
416 shadow_frame->SetVReg(vreg, new_value);
417 }
418 return true;
419 }
420
SetVRegPair(ArtMethod * m,uint16_t vreg,uint64_t new_value,VRegKind kind_lo,VRegKind kind_hi)421 bool StackVisitor::SetVRegPair(ArtMethod* m,
422 uint16_t vreg,
423 uint64_t new_value,
424 VRegKind kind_lo,
425 VRegKind kind_hi) {
426 if (kind_lo == kLongLoVReg) {
427 DCHECK_EQ(kind_hi, kLongHiVReg);
428 } else if (kind_lo == kDoubleLoVReg) {
429 DCHECK_EQ(kind_hi, kDoubleHiVReg);
430 } else {
431 LOG(FATAL) << "Expected long or double: kind_lo=" << kind_lo << ", kind_hi=" << kind_hi;
432 UNREACHABLE();
433 }
434 const DexFile::CodeItem* code_item = m->GetCodeItem();
435 if (code_item == nullptr) {
436 return false;
437 }
438 ShadowFrame* shadow_frame = GetCurrentShadowFrame();
439 if (shadow_frame == nullptr) {
440 // This is a compiled frame: we must prepare for deoptimization (see SetVRegFromDebugger).
441 const size_t frame_id = GetFrameId();
442 const uint16_t num_regs = code_item->registers_size_;
443 shadow_frame = thread_->FindOrCreateDebuggerShadowFrame(frame_id, num_regs, m, GetDexPc());
444 CHECK(shadow_frame != nullptr);
445 // Remember the vreg pair has been set for debugging and must not be overwritten by the
446 // original value during deoptimization of the stack.
447 thread_->GetUpdatedVRegFlags(frame_id)[vreg] = true;
448 thread_->GetUpdatedVRegFlags(frame_id)[vreg + 1] = true;
449 }
450 shadow_frame->SetVRegLong(vreg, new_value);
451 return true;
452 }
453
IsAccessibleGPR(uint32_t reg) const454 bool StackVisitor::IsAccessibleGPR(uint32_t reg) const {
455 DCHECK(context_ != nullptr);
456 return context_->IsAccessibleGPR(reg);
457 }
458
GetGPRAddress(uint32_t reg) const459 uintptr_t* StackVisitor::GetGPRAddress(uint32_t reg) const {
460 DCHECK(cur_quick_frame_ != nullptr) << "This is a quick frame routine";
461 DCHECK(context_ != nullptr);
462 return context_->GetGPRAddress(reg);
463 }
464
GetGPR(uint32_t reg) const465 uintptr_t StackVisitor::GetGPR(uint32_t reg) const {
466 DCHECK(cur_quick_frame_ != nullptr) << "This is a quick frame routine";
467 DCHECK(context_ != nullptr);
468 return context_->GetGPR(reg);
469 }
470
IsAccessibleFPR(uint32_t reg) const471 bool StackVisitor::IsAccessibleFPR(uint32_t reg) const {
472 DCHECK(context_ != nullptr);
473 return context_->IsAccessibleFPR(reg);
474 }
475
GetFPR(uint32_t reg) const476 uintptr_t StackVisitor::GetFPR(uint32_t reg) const {
477 DCHECK(cur_quick_frame_ != nullptr) << "This is a quick frame routine";
478 DCHECK(context_ != nullptr);
479 return context_->GetFPR(reg);
480 }
481
GetReturnPc() const482 uintptr_t StackVisitor::GetReturnPc() const {
483 uint8_t* sp = reinterpret_cast<uint8_t*>(GetCurrentQuickFrame());
484 DCHECK(sp != nullptr);
485 uint8_t* pc_addr = sp + GetCurrentQuickFrameInfo().GetReturnPcOffset();
486 return *reinterpret_cast<uintptr_t*>(pc_addr);
487 }
488
SetReturnPc(uintptr_t new_ret_pc)489 void StackVisitor::SetReturnPc(uintptr_t new_ret_pc) {
490 uint8_t* sp = reinterpret_cast<uint8_t*>(GetCurrentQuickFrame());
491 CHECK(sp != nullptr);
492 uint8_t* pc_addr = sp + GetCurrentQuickFrameInfo().GetReturnPcOffset();
493 *reinterpret_cast<uintptr_t*>(pc_addr) = new_ret_pc;
494 }
495
ComputeNumFrames(Thread * thread,StackWalkKind walk_kind)496 size_t StackVisitor::ComputeNumFrames(Thread* thread, StackWalkKind walk_kind) {
497 struct NumFramesVisitor : public StackVisitor {
498 NumFramesVisitor(Thread* thread_in, StackWalkKind walk_kind_in)
499 : StackVisitor(thread_in, nullptr, walk_kind_in), frames(0) {}
500
501 bool VisitFrame() OVERRIDE {
502 frames++;
503 return true;
504 }
505
506 size_t frames;
507 };
508 NumFramesVisitor visitor(thread, walk_kind);
509 visitor.WalkStack(true);
510 return visitor.frames;
511 }
512
GetNextMethodAndDexPc(ArtMethod ** next_method,uint32_t * next_dex_pc)513 bool StackVisitor::GetNextMethodAndDexPc(ArtMethod** next_method, uint32_t* next_dex_pc) {
514 struct HasMoreFramesVisitor : public StackVisitor {
515 HasMoreFramesVisitor(Thread* thread,
516 StackWalkKind walk_kind,
517 size_t num_frames,
518 size_t frame_height)
519 : StackVisitor(thread, nullptr, walk_kind, num_frames),
520 frame_height_(frame_height),
521 found_frame_(false),
522 has_more_frames_(false),
523 next_method_(nullptr),
524 next_dex_pc_(0) {
525 }
526
527 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
528 if (found_frame_) {
529 ArtMethod* method = GetMethod();
530 if (method != nullptr && !method->IsRuntimeMethod()) {
531 has_more_frames_ = true;
532 next_method_ = method;
533 next_dex_pc_ = GetDexPc();
534 return false; // End stack walk once next method is found.
535 }
536 } else if (GetFrameHeight() == frame_height_) {
537 found_frame_ = true;
538 }
539 return true;
540 }
541
542 size_t frame_height_;
543 bool found_frame_;
544 bool has_more_frames_;
545 ArtMethod* next_method_;
546 uint32_t next_dex_pc_;
547 };
548 HasMoreFramesVisitor visitor(thread_, walk_kind_, GetNumFrames(), GetFrameHeight());
549 visitor.WalkStack(true);
550 *next_method = visitor.next_method_;
551 *next_dex_pc = visitor.next_dex_pc_;
552 return visitor.has_more_frames_;
553 }
554
DescribeStack(Thread * thread)555 void StackVisitor::DescribeStack(Thread* thread) {
556 struct DescribeStackVisitor : public StackVisitor {
557 explicit DescribeStackVisitor(Thread* thread_in)
558 : StackVisitor(thread_in, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames) {}
559
560 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
561 LOG(INFO) << "Frame Id=" << GetFrameId() << " " << DescribeLocation();
562 return true;
563 }
564 };
565 DescribeStackVisitor visitor(thread);
566 visitor.WalkStack(true);
567 }
568
DescribeLocation() const569 std::string StackVisitor::DescribeLocation() const {
570 std::string result("Visiting method '");
571 ArtMethod* m = GetMethod();
572 if (m == nullptr) {
573 return "upcall";
574 }
575 result += m->PrettyMethod();
576 result += StringPrintf("' at dex PC 0x%04x", GetDexPc());
577 if (!IsShadowFrame()) {
578 result += StringPrintf(" (native PC %p)", reinterpret_cast<void*>(GetCurrentQuickFramePc()));
579 }
580 return result;
581 }
582
SetMethod(ArtMethod * method)583 void StackVisitor::SetMethod(ArtMethod* method) {
584 DCHECK(GetMethod() != nullptr);
585 if (cur_shadow_frame_ != nullptr) {
586 cur_shadow_frame_->SetMethod(method);
587 } else {
588 DCHECK(cur_quick_frame_ != nullptr);
589 CHECK(!IsInInlinedFrame()) << "We do not support setting inlined method's ArtMethod!";
590 *cur_quick_frame_ = method;
591 }
592 }
593
AssertPcIsWithinQuickCode(ArtMethod * method,uintptr_t pc)594 static void AssertPcIsWithinQuickCode(ArtMethod* method, uintptr_t pc)
595 REQUIRES_SHARED(Locks::mutator_lock_) {
596 if (method->IsNative() || method->IsRuntimeMethod() || method->IsProxyMethod()) {
597 return;
598 }
599
600 if (pc == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc())) {
601 return;
602 }
603
604 Runtime* runtime = Runtime::Current();
605 if (runtime->UseJitCompilation() &&
606 runtime->GetJit()->GetCodeCache()->ContainsPc(reinterpret_cast<const void*>(pc))) {
607 return;
608 }
609
610 const void* code = method->GetEntryPointFromQuickCompiledCode();
611 if (code == GetQuickInstrumentationEntryPoint() || code == GetInvokeObsoleteMethodStub()) {
612 return;
613 }
614
615 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
616 if (class_linker->IsQuickToInterpreterBridge(code) ||
617 class_linker->IsQuickResolutionStub(code)) {
618 return;
619 }
620
621 if (runtime->UseJitCompilation() && runtime->GetJit()->GetCodeCache()->ContainsPc(code)) {
622 return;
623 }
624
625 uint32_t code_size = OatQuickMethodHeader::FromEntryPoint(code)->GetCodeSize();
626 uintptr_t code_start = reinterpret_cast<uintptr_t>(code);
627 CHECK(code_start <= pc && pc <= (code_start + code_size))
628 << method->PrettyMethod()
629 << " pc=" << std::hex << pc
630 << " code_start=" << code_start
631 << " code_size=" << code_size;
632 }
633
SanityCheckFrame() const634 void StackVisitor::SanityCheckFrame() const {
635 if (kIsDebugBuild) {
636 ArtMethod* method = GetMethod();
637 mirror::Class* declaring_class = method->GetDeclaringClass();
638 // Runtime methods have null declaring class.
639 if (!method->IsRuntimeMethod()) {
640 CHECK(declaring_class != nullptr);
641 CHECK_EQ(declaring_class->GetClass(), declaring_class->GetClass()->GetClass())
642 << declaring_class;
643 } else {
644 CHECK(declaring_class == nullptr);
645 }
646 Runtime* const runtime = Runtime::Current();
647 LinearAlloc* const linear_alloc = runtime->GetLinearAlloc();
648 if (!linear_alloc->Contains(method)) {
649 // Check class linker linear allocs.
650 // We get the canonical method as copied methods may have their declaring
651 // class from another class loader.
652 ArtMethod* canonical = method->GetCanonicalMethod();
653 mirror::Class* klass = canonical->GetDeclaringClass();
654 LinearAlloc* const class_linear_alloc = (klass != nullptr)
655 ? runtime->GetClassLinker()->GetAllocatorForClassLoader(klass->GetClassLoader())
656 : linear_alloc;
657 if (!class_linear_alloc->Contains(canonical)) {
658 // Check image space.
659 bool in_image = false;
660 for (auto& space : runtime->GetHeap()->GetContinuousSpaces()) {
661 if (space->IsImageSpace()) {
662 auto* image_space = space->AsImageSpace();
663 const auto& header = image_space->GetImageHeader();
664 const ImageSection& methods = header.GetMethodsSection();
665 const ImageSection& runtime_methods = header.GetRuntimeMethodsSection();
666 const size_t offset = reinterpret_cast<const uint8_t*>(canonical) - image_space->Begin();
667 if (methods.Contains(offset) || runtime_methods.Contains(offset)) {
668 in_image = true;
669 break;
670 }
671 }
672 }
673 CHECK(in_image) << canonical->PrettyMethod() << " not in linear alloc or image";
674 }
675 }
676 if (cur_quick_frame_ != nullptr) {
677 AssertPcIsWithinQuickCode(method, cur_quick_frame_pc_);
678 // Frame sanity.
679 size_t frame_size = GetCurrentQuickFrameInfo().FrameSizeInBytes();
680 CHECK_NE(frame_size, 0u);
681 // A rough guess at an upper size we expect to see for a frame.
682 // 256 registers
683 // 2 words HandleScope overhead
684 // 3+3 register spills
685 // TODO: this seems architecture specific for the case of JNI frames.
686 // TODO: 083-compiler-regressions ManyFloatArgs shows this estimate is wrong.
687 // const size_t kMaxExpectedFrameSize = (256 + 2 + 3 + 3) * sizeof(word);
688 const size_t kMaxExpectedFrameSize = 2 * KB;
689 CHECK_LE(frame_size, kMaxExpectedFrameSize) << method->PrettyMethod();
690 size_t return_pc_offset = GetCurrentQuickFrameInfo().GetReturnPcOffset();
691 CHECK_LT(return_pc_offset, frame_size);
692 }
693 }
694 }
695
696 // Counts the number of references in the parameter list of the corresponding method.
697 // Note: Thus does _not_ include "this" for non-static methods.
GetNumberOfReferenceArgsWithoutReceiver(ArtMethod * method)698 static uint32_t GetNumberOfReferenceArgsWithoutReceiver(ArtMethod* method)
699 REQUIRES_SHARED(Locks::mutator_lock_) {
700 uint32_t shorty_len;
701 const char* shorty = method->GetShorty(&shorty_len);
702 uint32_t refs = 0;
703 for (uint32_t i = 1; i < shorty_len ; ++i) {
704 if (shorty[i] == 'L') {
705 refs++;
706 }
707 }
708 return refs;
709 }
710
GetCurrentQuickFrameInfo() const711 QuickMethodFrameInfo StackVisitor::GetCurrentQuickFrameInfo() const {
712 if (cur_oat_quick_method_header_ != nullptr) {
713 return cur_oat_quick_method_header_->GetFrameInfo();
714 }
715
716 ArtMethod* method = GetMethod();
717 Runtime* runtime = Runtime::Current();
718
719 if (method->IsAbstract()) {
720 return runtime->GetCalleeSaveMethodFrameInfo(CalleeSaveType::kSaveRefsAndArgs);
721 }
722
723 // This goes before IsProxyMethod since runtime methods have a null declaring class.
724 if (method->IsRuntimeMethod()) {
725 return runtime->GetRuntimeMethodFrameInfo(method);
726 }
727
728 if (method->IsProxyMethod()) {
729 // There is only one direct method of a proxy class: the constructor. A direct method is
730 // cloned from the original java.lang.reflect.Proxy and is executed as usual quick
731 // compiled method without any stubs. Therefore the method must have a OatQuickMethodHeader.
732 DCHECK(!method->IsDirect() && !method->IsConstructor())
733 << "Constructors of proxy classes must have a OatQuickMethodHeader";
734 return runtime->GetCalleeSaveMethodFrameInfo(CalleeSaveType::kSaveRefsAndArgs);
735 }
736
737 // The only remaining case is if the method is native and uses the generic JNI stub.
738 DCHECK(method->IsNative());
739 ClassLinker* class_linker = runtime->GetClassLinker();
740 const void* entry_point = runtime->GetInstrumentation()->GetQuickCodeFor(method,
741 kRuntimePointerSize);
742 DCHECK(class_linker->IsQuickGenericJniStub(entry_point)) << method->PrettyMethod();
743 // Generic JNI frame.
744 uint32_t handle_refs = GetNumberOfReferenceArgsWithoutReceiver(method) + 1;
745 size_t scope_size = HandleScope::SizeOf(handle_refs);
746 QuickMethodFrameInfo callee_info =
747 runtime->GetCalleeSaveMethodFrameInfo(CalleeSaveType::kSaveRefsAndArgs);
748
749 // Callee saves + handle scope + method ref + alignment
750 // Note: -sizeof(void*) since callee-save frame stores a whole method pointer.
751 size_t frame_size = RoundUp(
752 callee_info.FrameSizeInBytes() - sizeof(void*) + sizeof(ArtMethod*) + scope_size,
753 kStackAlignment);
754 return QuickMethodFrameInfo(frame_size, callee_info.CoreSpillMask(), callee_info.FpSpillMask());
755 }
756
757 template <StackVisitor::CountTransitions kCount>
WalkStack(bool include_transitions)758 void StackVisitor::WalkStack(bool include_transitions) {
759 if (check_suspended_) {
760 DCHECK(thread_ == Thread::Current() || thread_->IsSuspended());
761 }
762 CHECK_EQ(cur_depth_, 0U);
763 bool exit_stubs_installed = Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled();
764 uint32_t instrumentation_stack_depth = 0;
765 size_t inlined_frames_count = 0;
766
767 for (const ManagedStack* current_fragment = thread_->GetManagedStack();
768 current_fragment != nullptr; current_fragment = current_fragment->GetLink()) {
769 cur_shadow_frame_ = current_fragment->GetTopShadowFrame();
770 cur_quick_frame_ = current_fragment->GetTopQuickFrame();
771 cur_quick_frame_pc_ = 0;
772 cur_oat_quick_method_header_ = nullptr;
773
774 if (cur_quick_frame_ != nullptr) { // Handle quick stack frames.
775 // Can't be both a shadow and a quick fragment.
776 DCHECK(current_fragment->GetTopShadowFrame() == nullptr);
777 ArtMethod* method = *cur_quick_frame_;
778 while (method != nullptr) {
779 cur_oat_quick_method_header_ = method->GetOatQuickMethodHeader(cur_quick_frame_pc_);
780 SanityCheckFrame();
781
782 if ((walk_kind_ == StackWalkKind::kIncludeInlinedFrames)
783 && (cur_oat_quick_method_header_ != nullptr)
784 && cur_oat_quick_method_header_->IsOptimized()) {
785 CodeInfo code_info = cur_oat_quick_method_header_->GetOptimizedCodeInfo();
786 CodeInfoEncoding encoding = code_info.ExtractEncoding();
787 uint32_t native_pc_offset =
788 cur_oat_quick_method_header_->NativeQuickPcOffset(cur_quick_frame_pc_);
789 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, encoding);
790 if (stack_map.IsValid() && stack_map.HasInlineInfo(encoding.stack_map.encoding)) {
791 InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
792 DCHECK_EQ(current_inlining_depth_, 0u);
793 for (current_inlining_depth_ = inline_info.GetDepth(encoding.inline_info.encoding);
794 current_inlining_depth_ != 0;
795 --current_inlining_depth_) {
796 bool should_continue = VisitFrame();
797 if (UNLIKELY(!should_continue)) {
798 return;
799 }
800 cur_depth_++;
801 inlined_frames_count++;
802 }
803 }
804 }
805
806 bool should_continue = VisitFrame();
807 if (UNLIKELY(!should_continue)) {
808 return;
809 }
810
811 QuickMethodFrameInfo frame_info = GetCurrentQuickFrameInfo();
812 if (context_ != nullptr) {
813 context_->FillCalleeSaves(reinterpret_cast<uint8_t*>(cur_quick_frame_), frame_info);
814 }
815 // Compute PC for next stack frame from return PC.
816 size_t frame_size = frame_info.FrameSizeInBytes();
817 size_t return_pc_offset = frame_size - sizeof(void*);
818 uint8_t* return_pc_addr = reinterpret_cast<uint8_t*>(cur_quick_frame_) + return_pc_offset;
819 uintptr_t return_pc = *reinterpret_cast<uintptr_t*>(return_pc_addr);
820
821 if (UNLIKELY(exit_stubs_installed)) {
822 // While profiling, the return pc is restored from the side stack, except when walking
823 // the stack for an exception where the side stack will be unwound in VisitFrame.
824 if (reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == return_pc) {
825 CHECK_LT(instrumentation_stack_depth, thread_->GetInstrumentationStack()->size());
826 const instrumentation::InstrumentationStackFrame& instrumentation_frame =
827 thread_->GetInstrumentationStack()->at(instrumentation_stack_depth);
828 instrumentation_stack_depth++;
829 if (GetMethod() ==
830 Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveAllCalleeSaves)) {
831 // Skip runtime save all callee frames which are used to deliver exceptions.
832 } else if (instrumentation_frame.interpreter_entry_) {
833 ArtMethod* callee =
834 Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs);
835 CHECK_EQ(GetMethod(), callee) << "Expected: " << ArtMethod::PrettyMethod(callee)
836 << " Found: " << ArtMethod::PrettyMethod(GetMethod());
837 } else {
838 // Instrumentation generally doesn't distinguish between a method's obsolete and
839 // non-obsolete version.
840 CHECK_EQ(instrumentation_frame.method_->GetNonObsoleteMethod(),
841 GetMethod()->GetNonObsoleteMethod())
842 << "Expected: "
843 << ArtMethod::PrettyMethod(instrumentation_frame.method_->GetNonObsoleteMethod())
844 << " Found: " << ArtMethod::PrettyMethod(GetMethod()->GetNonObsoleteMethod());
845 }
846 if (num_frames_ != 0) {
847 // Check agreement of frame Ids only if num_frames_ is computed to avoid infinite
848 // recursion.
849 size_t frame_id = instrumentation::Instrumentation::ComputeFrameId(
850 thread_,
851 cur_depth_,
852 inlined_frames_count);
853 CHECK_EQ(instrumentation_frame.frame_id_, frame_id);
854 }
855 return_pc = instrumentation_frame.return_pc_;
856 }
857 }
858
859 cur_quick_frame_pc_ = return_pc;
860 uint8_t* next_frame = reinterpret_cast<uint8_t*>(cur_quick_frame_) + frame_size;
861 cur_quick_frame_ = reinterpret_cast<ArtMethod**>(next_frame);
862
863 if (kDebugStackWalk) {
864 LOG(INFO) << ArtMethod::PrettyMethod(method) << "@" << method << " size=" << frame_size
865 << std::boolalpha
866 << " optimized=" << (cur_oat_quick_method_header_ != nullptr &&
867 cur_oat_quick_method_header_->IsOptimized())
868 << " native=" << method->IsNative()
869 << std::noboolalpha
870 << " entrypoints=" << method->GetEntryPointFromQuickCompiledCode()
871 << "," << (method->IsNative() ? method->GetEntryPointFromJni() : nullptr)
872 << " next=" << *cur_quick_frame_;
873 }
874
875 if (kCount == CountTransitions::kYes || !method->IsRuntimeMethod()) {
876 cur_depth_++;
877 }
878 method = *cur_quick_frame_;
879 }
880 } else if (cur_shadow_frame_ != nullptr) {
881 do {
882 SanityCheckFrame();
883 bool should_continue = VisitFrame();
884 if (UNLIKELY(!should_continue)) {
885 return;
886 }
887 cur_depth_++;
888 cur_shadow_frame_ = cur_shadow_frame_->GetLink();
889 } while (cur_shadow_frame_ != nullptr);
890 }
891 if (include_transitions) {
892 bool should_continue = VisitFrame();
893 if (!should_continue) {
894 return;
895 }
896 }
897 if (kCount == CountTransitions::kYes) {
898 cur_depth_++;
899 }
900 }
901 if (num_frames_ != 0) {
902 CHECK_EQ(cur_depth_, num_frames_);
903 }
904 }
905
906 template void StackVisitor::WalkStack<StackVisitor::CountTransitions::kYes>(bool);
907 template void StackVisitor::WalkStack<StackVisitor::CountTransitions::kNo>(bool);
908
909 } // namespace art
910