1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "compiler/code_info/code_info.h"
17 #include "runtime/include/stack_walker-inl.h"
18 #include "runtime/include/runtime.h"
19 #include "runtime/include/thread.h"
20 #include "runtime/include/panda_vm.h"
21 #include "libpandabase/mem/mem.h"
22 #include "runtime/interpreter/runtime_interface.h"
23
24 #include <iomanip>
25
26 namespace panda {
27
Create(const ManagedThread * thread,UnwindPolicy policy)28 StackWalker StackWalker::Create(const ManagedThread *thread, UnwindPolicy policy)
29 {
30 #ifndef NDEBUG
31 ASSERT(thread->IsRuntimeCallEnabled());
32 if (Runtime::GetOptions().IsVerifyCallStack()) {
33 StackWalker(thread->GetCurrentFrame(), thread->IsCurrentFrameCompiled(),
34 thread->GetNativePc(), policy).Verify();
35 }
36 #endif
37 return StackWalker(thread->GetCurrentFrame(), thread->IsCurrentFrameCompiled(), thread->GetNativePc(), policy);
38 }
39
40 // NOLINTNEXTLINE(cppcoreguidelines-pro-type-member-init)
StackWalker(void * fp,bool is_frame_compiled,uintptr_t npc,UnwindPolicy policy)41 StackWalker::StackWalker(void *fp, bool is_frame_compiled, uintptr_t npc, UnwindPolicy policy)
42 {
43 frame_ = GetTopFrameFromFp(fp, is_frame_compiled, npc);
44 if (policy == UnwindPolicy::SKIP_INLINED) {
45 inline_depth_ = -1;
46 }
47 }
48
Reset(const ManagedThread * thread)49 void StackWalker::Reset(const ManagedThread *thread)
50 {
51 frame_ = GetTopFrameFromFp(thread->GetCurrentFrame(), thread->IsCurrentFrameCompiled(), thread->GetNativePc());
52 }
53
54 /* static */
GetTopFrameFromFp(void * ptr,bool is_frame_compiled,uintptr_t npc)55 typename StackWalker::FrameVariant StackWalker::GetTopFrameFromFp(void *ptr, bool is_frame_compiled, uintptr_t npc)
56 {
57 if (is_frame_compiled) {
58 if (IsBoundaryFrame<FrameKind::INTERPRETER>(ptr)) {
59 auto bp = GetPrevFromBoundary<FrameKind::INTERPRETER>(ptr);
60 if (GetBoundaryFrameMethod<FrameKind::COMPILER>(bp) == BYPASS) {
61 return CreateCFrame(GetPrevFromBoundary<FrameKind::COMPILER>(bp),
62 GetReturnAddressFromBoundary<FrameKind::COMPILER>(bp),
63 GetCalleeStackFromBoundary<FrameKind::COMPILER>(bp));
64 }
65 return CreateCFrame(GetPrevFromBoundary<FrameKind::INTERPRETER>(ptr),
66 GetReturnAddressFromBoundary<FrameKind::INTERPRETER>(ptr),
67 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
68 reinterpret_cast<SlotType *>(ptr) +
69 BoundaryFrame<FrameKind::INTERPRETER>::CALLEES_OFFSET); // NOLINT
70 }
71 return CreateCFrame(reinterpret_cast<SlotType *>(ptr), npc, nullptr);
72 }
73 return reinterpret_cast<Frame *>(ptr);
74 }
75
GetMethod()76 Method *StackWalker::GetMethod()
77 {
78 ASSERT(HasFrame());
79 if (!IsCFrame()) {
80 return GetIFrame()->GetMethod();
81 }
82 auto &cframe = GetCFrame();
83 if (!cframe.IsNative()) {
84 ASSERT(stackmap_.IsValid());
85 if (IsInlined()) {
86 auto method_variant = code_info_.GetMethod(stackmap_, inline_depth_);
87 if (std::holds_alternative<uint32_t>(method_variant)) {
88 return Runtime::GetCurrent()->GetClassLinker()->GetMethod(
89 *cframe.GetMethod(), panda_file::File::EntityId(std::get<uint32_t>(method_variant)));
90 }
91 return reinterpret_cast<Method *>(std::get<void *>(method_variant));
92 }
93 }
94 return cframe.GetMethod();
95 }
96
97 template <bool create>
CreateCFrameForC2IBridge(Frame * frame)98 StackWalker::CFrameType StackWalker::CreateCFrameForC2IBridge(Frame *frame)
99 {
100 auto prev = GetPrevFromBoundary<FrameKind::INTERPRETER>(frame);
101 ASSERT(GetBoundaryFrameMethod<FrameKind::COMPILER>(prev) != FrameBridgeKind::BYPASS);
102 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
103 if constexpr (create) {
104 return CreateCFrame(reinterpret_cast<SlotType *>(prev),
105 GetReturnAddressFromBoundary<FrameKind::INTERPRETER>(frame),
106 GetCalleeStackFromBoundary<FrameKind::INTERPRETER>(frame));
107 }
108 return CFrameType(prev);
109 }
110
CreateCFrame(SlotType * ptr,uintptr_t npc,SlotType * callee_slots,CalleeStorage * prev_callees)111 StackWalker::CFrameType StackWalker::CreateCFrame(SlotType *ptr, uintptr_t npc, SlotType *callee_slots,
112 CalleeStorage *prev_callees)
113 {
114 CFrameType cframe(ptr);
115 if (cframe.IsNativeMethod()) {
116 return cframe;
117 }
118 const void *code_entry;
119 if (cframe.IsOsr()) {
120 code_entry = Thread::GetCurrent()->GetVM()->GetCompiler()->GetOsrCode(cframe.GetMethod());
121 } else if (cframe.ShouldDeoptimize()) {
122 // When method was deoptimized due to speculation failure, regular code entry become invalid,
123 // so we read entry from special backup field in the frame.
124 code_entry = cframe.GetDeoptCodeEntry();
125 } else {
126 code_entry = cframe.GetMethod()->GetCompiledEntryPoint();
127 }
128 new (&code_info_) CodeInfo(CodeInfo::GetCodeOriginFromEntryPoint(code_entry));
129 // StackOverflow stackmap has zero address
130 if (npc == 0) {
131 stackmap_ = code_info_.FindStackMapForNativePc(npc);
132 } else {
133 auto code = reinterpret_cast<uintptr_t>(code_info_.GetCode());
134 CHECK_GT(npc, code);
135 CHECK_LT(npc - code, std::numeric_limits<uint32_t>::max());
136 stackmap_ = code_info_.FindStackMapForNativePc(npc - code);
137 }
138 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
139 ASSERT_PRINT(
140 stackmap_.IsValid(), "Stackmap not found "
141 << cframe.GetMethod()->GetFullName() << ": npc=0x" << std::hex << npc << ", code=["
142 << reinterpret_cast<const void *>(code_info_.GetCode()) << ".."
143 << reinterpret_cast<const void *>(code_info_.GetCode() + code_info_.GetCodeSize())
144 << "]" << std::dec);
145 callee_stack_.int_regs_mask = code_info_.GetHeader().GetCalleeRegMask();
146 callee_stack_.fp_regs_mask = code_info_.GetHeader().GetCalleeFpRegMask();
147 inline_depth_ = code_info_.GetInlineDepth(stackmap_);
148
149 InitCalleeBuffer(callee_slots, prev_callees);
150
151 return cframe;
152 }
153
154 /**
155 * If all callees are saved then callee-saved regs are placed on the stack as follows:
156 *
157 * --------------------- <-- callee_slots
158 * LastCalleeReg (x28)
159 * --------------------- <-- callee_slots - 1
160 * ...
161 * ---------------------
162 * FirstCalleeReg (x19)
163 * --------------------- <-- callee_slots - CalleeRegsCount()
164 * LastCalleeFpReg (d15)
165 * ---------------------
166 * ...
167 * ---------------------
168 * FirstCalleeFpReg (d8)
169 * --------------------- <-- callee_slots - CalleeRegsCount() - CalleeFpRegsCount()
170 *
171 * If only used callees are saved, then the space is reserved for all callee registers,
172 * but only umasked regs are saved and there are no gaps between them.
173 *
174 * Suppose that regs masks are as follows:
175 *
176 * int_regs_mask = 0x00980000 (1001 1000 0000 0000 0000 0000, i.e. x19, x20 and x23 must be saved)
177 * fp_regs_mask = 0x0,
178 *
179 * then we have the following layout:
180 *
181 * -------------------- <-- callee_slots
182 * (x23)
183 * -------------------- <-- callee_slots - 1
184 * (x20)
185 * -------------------- <-- callee_slots - 2
186 * (x19)
187 * -------------------- <-- callee_slots - 3
188 * ...
189 * --------------------
190 * (---)
191 * -------------------- <-- callee_slots - CalleeIntRegsCount()
192 * ...
193 * --------------------
194 * (---)
195 * -------------------- <-- callee_slots - CalleeIntRegsCount() - CalleeFpRegsCount()
196 */
InitCalleeBuffer(SlotType * callee_slots,CalleeStorage * prev_callees)197 void StackWalker::InitCalleeBuffer(SlotType *callee_slots, CalleeStorage *prev_callees)
198 {
199 constexpr RegMask ArchIntRegsMask(panda::GetCalleeRegsMask(RUNTIME_ARCH, false));
200 constexpr RegMask ArchFpRegsMask(panda::GetCalleeRegsMask(RUNTIME_ARCH, true));
201
202 bool prev_is_native = IsCFrame() ? GetCFrame().IsNative() : false;
203 if (callee_slots != nullptr || prev_callees != nullptr) {
204 // Process scalar integer callee registers
205 for (size_t reg = FirstCalleeIntReg(); reg <= LastCalleeIntReg(); reg++) {
206 size_t offset = reg - FirstCalleeIntReg();
207 if (prev_callees == nullptr || prev_is_native) {
208 size_t slot = ArchIntRegsMask.GetDistanceFromHead(reg);
209 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
210 callee_stack_.stack[offset] = callee_slots - slot - 1;
211 } else if (prev_callees->int_regs_mask.Test(reg)) {
212 size_t slot = prev_callees->int_regs_mask.GetDistanceFromHead(reg);
213 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
214 callee_stack_.stack[offset] = callee_slots - slot - 1;
215 } else {
216 ASSERT(prev_callees->stack[offset] != nullptr);
217 callee_stack_.stack[offset] = prev_callees->stack[offset];
218 }
219 }
220 // Process SIMD and Floating-Point callee registers
221 for (size_t reg = FirstCalleeFpReg(); reg <= LastCalleeFpReg(); reg++) {
222 size_t offset = CalleeIntRegsCount() + reg - FirstCalleeFpReg();
223 if (prev_callees == nullptr || prev_is_native) {
224 size_t slot = ArchFpRegsMask.GetDistanceFromHead(reg);
225 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
226 callee_stack_.stack[offset] = callee_slots - CalleeIntRegsCount() - slot - 1;
227 } else if (prev_callees->fp_regs_mask.Test(reg)) {
228 size_t slot = prev_callees->fp_regs_mask.GetDistanceFromHead(reg);
229 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
230 callee_stack_.stack[offset] = callee_slots - CalleeIntRegsCount() - slot - 1;
231 } else {
232 ASSERT(prev_callees->stack[offset] != nullptr);
233 callee_stack_.stack[offset] = prev_callees->stack[offset];
234 }
235 }
236 }
237 }
238
GetCalleeRegsForDeoptimize()239 StackWalker::CalleeRegsBuffer &StackWalker::GetCalleeRegsForDeoptimize()
240 {
241 // Process scalar integer callee registers
242 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
243 SlotType *callee_src_slots = GetCFrame().GetCalleeSaveStack() - 1;
244 SlotType *callee_dst_slots = &deopt_callee_regs_[CalleeFpRegsCount()];
245 for (size_t reg = FirstCalleeIntReg(); reg <= LastCalleeIntReg(); reg++) {
246 size_t offset = reg - FirstCalleeIntReg();
247 if (callee_stack_.int_regs_mask.Test(reg)) {
248 size_t slot = callee_stack_.int_regs_mask.GetDistanceFromHead(reg);
249 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
250 callee_dst_slots[offset] = *(callee_src_slots - slot);
251 } else {
252 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
253 callee_dst_slots[offset] = *callee_stack_.stack[offset];
254 }
255 }
256 // Process SIMD and Floating-Point callee registers
257 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
258 callee_src_slots = GetCFrame().GetCalleeSaveStack() - CalleeIntRegsCount() - 1;
259 callee_dst_slots = deopt_callee_regs_.begin();
260 for (size_t reg = FirstCalleeFpReg(); reg <= LastCalleeFpReg(); reg++) {
261 size_t offset = reg - FirstCalleeFpReg();
262 if (callee_stack_.fp_regs_mask.Test(reg)) {
263 size_t slot = callee_stack_.fp_regs_mask.GetDistanceFromHead(reg);
264 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
265 callee_dst_slots[offset] = *(callee_src_slots - slot);
266 } else {
267 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
268 callee_dst_slots[offset] = *callee_stack_.stack[CalleeIntRegsCount() + offset];
269 }
270 }
271
272 return deopt_callee_regs_;
273 }
274
GetVRegValue(size_t vreg_num)275 interpreter::VRegister StackWalker::GetVRegValue(size_t vreg_num)
276 {
277 if (IsCFrame()) {
278 // TODO(msherstennikov): we need to cache vregs_list within single cframe
279 auto vregs_list = code_info_.GetVRegList(stackmap_, inline_depth_,
280 mem::InternalAllocator<>::GetInternalAllocatorFromRuntime());
281 ASSERT(vregs_list[vreg_num].GetIndex() == vreg_num);
282 interpreter::VRegister vreg0;
283 [[maybe_unused]] interpreter::VRegister vreg1;
284 GetCFrame().GetVRegValue(vregs_list[vreg_num], code_info_, callee_stack_.stack.data(),
285 interpreter::StaticVRegisterRef(&vreg0, &vreg1));
286 return vreg0;
287 }
288 ASSERT(vreg_num < GetIFrame()->GetSize());
289 return GetIFrame()->GetVReg(vreg_num);
290 }
291
292 template <bool is_dynamic, typename T>
SetVRegValue(VRegInfo reg_info,T value)293 void StackWalker::SetVRegValue(VRegInfo reg_info, T value)
294 {
295 if (IsCFrame()) {
296 auto &cframe = GetCFrame();
297 if (IsDynamicMethod()) {
298 if constexpr (sizeof(T) == sizeof(uint64_t)) { // NOLINT
299 cframe.SetVRegValue<true>(reg_info, bit_cast<uint64_t>(value), callee_stack_.stack.data());
300 } else { // NOLINT
301 static_assert(sizeof(T) == sizeof(uint32_t));
302 cframe.SetVRegValue<true>(reg_info, static_cast<uint64_t>(bit_cast<uint32_t>(value)),
303 callee_stack_.stack.data());
304 }
305 } else {
306 if constexpr (sizeof(T) == sizeof(uint64_t)) { // NOLINT
307 cframe.SetVRegValue(reg_info, bit_cast<uint64_t>(value), callee_stack_.stack.data());
308 } else { // NOLINT
309 static_assert(sizeof(T) == sizeof(uint32_t));
310 cframe.SetVRegValue(reg_info, static_cast<uint64_t>(bit_cast<uint32_t>(value)),
311 callee_stack_.stack.data());
312 }
313 }
314 } else {
315 auto vreg = GetFrameHandler<is_dynamic>(GetIFrame()).GetVReg(reg_info.GetIndex());
316 if constexpr (std::is_same_v<T, ObjectHeader *>) { // NOLINT
317 ASSERT(vreg.HasObject() && "Trying to change object variable by scalar value");
318 vreg.SetReference(value);
319 } else { // NOLINT
320 ASSERT(!vreg.HasObject() && "Trying to change object variable by scalar value");
321 vreg.Set(value);
322 }
323 }
324 }
325
326 template void StackWalker::SetVRegValue(VRegInfo reg_info, uint32_t value);
327 template void StackWalker::SetVRegValue(VRegInfo reg_info, int32_t value);
328 template void StackWalker::SetVRegValue(VRegInfo reg_info, uint64_t value);
329 template void StackWalker::SetVRegValue(VRegInfo reg_info, int64_t value);
330 template void StackWalker::SetVRegValue(VRegInfo reg_info, float value);
331 template void StackWalker::SetVRegValue(VRegInfo reg_info, double value);
332 template void StackWalker::SetVRegValue(VRegInfo reg_info, ObjectHeader *value);
333 template void StackWalker::SetVRegValue<true>(VRegInfo reg_info, uint32_t value);
334 template void StackWalker::SetVRegValue<true>(VRegInfo reg_info, int32_t value);
335 template void StackWalker::SetVRegValue<true>(VRegInfo reg_info, uint64_t value);
336 template void StackWalker::SetVRegValue<true>(VRegInfo reg_info, int64_t value);
337 template void StackWalker::SetVRegValue<true>(VRegInfo reg_info, float value);
338 template void StackWalker::SetVRegValue<true>(VRegInfo reg_info, double value);
339 template void StackWalker::SetVRegValue<true>(VRegInfo reg_info, ObjectHeader *value);
340
NextFrame()341 void StackWalker::NextFrame()
342 {
343 if (IsCFrame()) {
344 NextFromCFrame();
345 } else {
346 NextFromIFrame();
347 }
348 }
349
NextFromCFrame()350 void StackWalker::NextFromCFrame()
351 {
352 if (IsInlined()) {
353 if (policy_ != UnwindPolicy::SKIP_INLINED) {
354 inline_depth_--;
355 return;
356 }
357 inline_depth_ = -1;
358 }
359 if (policy_ == UnwindPolicy::ONLY_INLINED) {
360 frame_ = nullptr;
361 return;
362 }
363 auto prev = GetCFrame().GetPrevFrame();
364 if (prev == nullptr) {
365 frame_ = nullptr;
366 return;
367 }
368 auto frame_method = GetBoundaryFrameMethod<FrameKind::COMPILER>(prev);
369 switch (frame_method) {
370 case FrameBridgeKind::INTERPRETER_TO_COMPILED_CODE: {
371 auto prev_frame = reinterpret_cast<Frame *>(GetPrevFromBoundary<FrameKind::COMPILER>(prev));
372 if (prev_frame != nullptr && IsBoundaryFrame<FrameKind::INTERPRETER>(prev_frame)) {
373 frame_ = CreateCFrameForC2IBridge<true>(prev_frame);
374 break;
375 }
376
377 frame_ = reinterpret_cast<Frame *>(prev_frame);
378 break;
379 }
380 case FrameBridgeKind::BYPASS: {
381 auto prev_frame = reinterpret_cast<Frame *>(GetPrevFromBoundary<FrameKind::COMPILER>(prev));
382 if (prev_frame != nullptr && IsBoundaryFrame<FrameKind::INTERPRETER>(prev_frame)) {
383 frame_ = CreateCFrameForC2IBridge<true>(prev_frame);
384 break;
385 }
386 frame_ = CreateCFrame(reinterpret_cast<SlotType *>(GetPrevFromBoundary<FrameKind::COMPILER>(prev)),
387 GetReturnAddressFromBoundary<FrameKind::COMPILER>(prev),
388 GetCalleeStackFromBoundary<FrameKind::COMPILER>(prev));
389 break;
390 }
391 default:
392 prev_callee_stack_ = callee_stack_;
393 frame_ = CreateCFrame(reinterpret_cast<SlotType *>(prev), GetCFrame().GetLr(),
394 GetCFrame().GetCalleeSaveStack(), &prev_callee_stack_);
395 break;
396 }
397 }
398
NextFromIFrame()399 void StackWalker::NextFromIFrame()
400 {
401 if (policy_ == UnwindPolicy::ONLY_INLINED) {
402 frame_ = nullptr;
403 return;
404 }
405 auto prev = GetIFrame()->GetPrevFrame();
406 if (prev == nullptr) {
407 frame_ = nullptr;
408 return;
409 }
410 if (IsBoundaryFrame<FrameKind::INTERPRETER>(prev)) {
411 auto bp = GetPrevFromBoundary<FrameKind::INTERPRETER>(prev);
412 if (GetBoundaryFrameMethod<FrameKind::COMPILER>(bp) == BYPASS) {
413 frame_ = CreateCFrame(GetPrevFromBoundary<FrameKind::COMPILER>(bp),
414 GetReturnAddressFromBoundary<FrameKind::COMPILER>(bp),
415 GetCalleeStackFromBoundary<FrameKind::COMPILER>(bp));
416 } else {
417 frame_ = CreateCFrameForC2IBridge<true>(prev);
418 }
419 } else {
420 frame_ = reinterpret_cast<Frame *>(prev);
421 }
422 }
423
GetNextFrame()424 FrameAccessor StackWalker::GetNextFrame()
425 {
426 if (IsCFrame()) {
427 if (IsInlined()) {
428 return FrameAccessor(frame_);
429 }
430 auto prev = GetCFrame().GetPrevFrame();
431 if (prev == nullptr) {
432 return FrameAccessor(nullptr);
433 }
434 auto frame_method = GetBoundaryFrameMethod<FrameKind::COMPILER>(prev);
435 switch (frame_method) {
436 case FrameBridgeKind::INTERPRETER_TO_COMPILED_CODE: {
437 auto prev_frame = reinterpret_cast<Frame *>(GetPrevFromBoundary<FrameKind::COMPILER>(prev));
438 if (prev_frame != nullptr && IsBoundaryFrame<FrameKind::INTERPRETER>(prev_frame)) {
439 return FrameAccessor(CreateCFrameForC2IBridge<false>(prev_frame));
440 }
441 return FrameAccessor(prev_frame);
442 }
443 case FrameBridgeKind::BYPASS: {
444 auto prev_frame = reinterpret_cast<Frame *>(GetPrevFromBoundary<FrameKind::COMPILER>(prev));
445 if (prev_frame != nullptr && IsBoundaryFrame<FrameKind::INTERPRETER>(prev_frame)) {
446 return FrameAccessor(CreateCFrameForC2IBridge<false>(prev_frame));
447 }
448 return FrameAccessor(
449 CFrameType(reinterpret_cast<SlotType *>(GetPrevFromBoundary<FrameKind::COMPILER>(prev))));
450 }
451 default:
452 return FrameAccessor(CFrameType(reinterpret_cast<SlotType *>(prev)));
453 }
454 } else {
455 auto prev = GetIFrame()->GetPrevFrame();
456 if (prev == nullptr) {
457 return FrameAccessor(nullptr);
458 }
459 if (IsBoundaryFrame<FrameKind::INTERPRETER>(prev)) {
460 auto bp = GetPrevFromBoundary<FrameKind::INTERPRETER>(prev);
461 if (GetBoundaryFrameMethod<FrameKind::COMPILER>(bp) == BYPASS) {
462 return FrameAccessor(CreateCFrame(GetPrevFromBoundary<FrameKind::COMPILER>(bp),
463 GetReturnAddressFromBoundary<FrameKind::COMPILER>(bp),
464 GetCalleeStackFromBoundary<FrameKind::COMPILER>(bp)));
465 }
466 return FrameAccessor(CreateCFrameForC2IBridge<false>(prev));
467 }
468 return FrameAccessor(reinterpret_cast<Frame *>(prev));
469 }
470 }
471
GetPreviousFrameKind() const472 FrameKind StackWalker::GetPreviousFrameKind() const
473 {
474 if (IsCFrame()) {
475 auto prev = GetCFrame().GetPrevFrame();
476 if (prev == nullptr) {
477 return FrameKind::NONE;
478 }
479 if (IsBoundaryFrame<FrameKind::COMPILER>(prev)) {
480 return FrameKind::INTERPRETER;
481 }
482 return FrameKind::COMPILER;
483 }
484 auto prev = GetIFrame()->GetPrevFrame();
485 if (prev == nullptr) {
486 return FrameKind::NONE;
487 }
488 if (IsBoundaryFrame<FrameKind::INTERPRETER>(prev)) {
489 return FrameKind::COMPILER;
490 }
491 return FrameKind::INTERPRETER;
492 }
493
IsCompilerBoundFrame(SlotType * prev)494 bool StackWalker::IsCompilerBoundFrame(SlotType *prev)
495 {
496 if (IsBoundaryFrame<FrameKind::COMPILER>(prev)) {
497 return true;
498 }
499 if (GetBoundaryFrameMethod<FrameKind::COMPILER>(prev) == FrameBridgeKind::BYPASS) {
500 auto prev_frame = reinterpret_cast<Frame *>(GetPrevFromBoundary<FrameKind::COMPILER>(prev));
501 // Case for clinit:
502 // Compiled code -> C2I -> InitializeClass -> call clinit -> I2C -> compiled code for clinit
503 if (prev_frame != nullptr && IsBoundaryFrame<FrameKind::INTERPRETER>(prev_frame)) {
504 return true;
505 }
506 }
507
508 return false;
509 }
510
ConvertToIFrame(FrameKind * prev_frame_kind,uint32_t * num_inlined_methods)511 Frame *StackWalker::ConvertToIFrame(FrameKind *prev_frame_kind, uint32_t *num_inlined_methods)
512 {
513 if (!IsCFrame()) {
514 return GetIFrame();
515 }
516 auto &cframe = GetCFrame();
517
518 auto inline_depth = inline_depth_;
519 bool is_invoke = false;
520
521 void *prev_frame;
522 if (IsInlined()) {
523 inline_depth_--;
524 *num_inlined_methods = *num_inlined_methods + 1;
525 prev_frame = ConvertToIFrame(prev_frame_kind, num_inlined_methods);
526 } else {
527 auto prev = cframe.GetPrevFrame();
528 if (prev == nullptr) {
529 *prev_frame_kind = FrameKind::NONE;
530 prev_frame = nullptr;
531 } else {
532 if (IsCompilerBoundFrame(prev)) {
533 is_invoke = true;
534 prev_frame = reinterpret_cast<Frame *>(
535 StackWalker::GetPrevFromBoundary<FrameKind::COMPILER>(cframe.GetPrevFrame()));
536 if (prev_frame_kind != nullptr) {
537 *prev_frame_kind = FrameKind::INTERPRETER;
538 }
539 } else {
540 prev_frame = cframe.GetPrevFrame();
541 if (prev_frame_kind != nullptr) {
542 *prev_frame_kind = FrameKind::COMPILER;
543 }
544 }
545 }
546 }
547 inline_depth_ = inline_depth;
548 auto vreg_list =
549 code_info_.GetVRegList(stackmap_, inline_depth_, mem::InternalAllocator<>::GetInternalAllocatorFromRuntime());
550
551 auto method = GetMethod();
552 Frame *frame;
553
554 if (IsDynamicMethod()) {
555 /* If there is a usage of rest arguments in dynamic function, then a managed object to contain actual arguments
556 * is constructed in prologue. Thus there is no need to reconstruct rest arguments here
557 */
558 auto num_actual_args = method->GetNumArgs();
559 /* If there are no arguments-keeping object construction in execution path, the number of actual args may be
560 * retreived from cframe
561 */
562
563 size_t frame_num_vregs = method->GetNumVregs() + num_actual_args;
564 frame = interpreter::RuntimeInterface::CreateFrameWithActualArgs<true>(frame_num_vregs, num_actual_args, method,
565 reinterpret_cast<Frame *>(prev_frame));
566 frame->SetDynamic();
567 DynamicFrameHandler frame_handler(frame);
568 for (size_t i = 0; i < vreg_list.size(); i++) {
569 auto vreg = vreg_list[i];
570 if (!vreg.IsLive()) {
571 continue;
572 }
573
574 bool is_acc = i == (vreg_list.size() - 1);
575 auto reg_ref = is_acc ? frame->GetAccAsVReg<true>() : frame_handler.GetVReg(i);
576 GetCFrame().GetPackVRegValue(vreg, code_info_, callee_stack_.stack.data(), reg_ref);
577 }
578 } else {
579 auto frame_num_vregs = method->GetNumVregs() + method->GetNumArgs();
580 ASSERT((frame_num_vregs + 1) >= vreg_list.size());
581 frame =
582 interpreter::RuntimeInterface::CreateFrame(frame_num_vregs, method, reinterpret_cast<Frame *>(prev_frame));
583 StaticFrameHandler frame_handler(frame);
584 for (size_t i = 0; i < vreg_list.size(); i++) {
585 auto vreg = vreg_list[i];
586 if (!vreg.IsLive()) {
587 continue;
588 }
589
590 bool is_acc = i == (vreg_list.size() - 1);
591 auto reg_ref = is_acc ? frame->GetAccAsVReg() : frame_handler.GetVReg(i);
592 GetCFrame().GetVRegValue(vreg, code_info_, callee_stack_.stack.data(), reg_ref);
593 }
594 }
595
596 frame->SetDeoptimized();
597 frame->SetBytecodeOffset(GetBytecodePc());
598 if (is_invoke) {
599 frame->SetInvoke();
600 }
601
602 auto context = ManagedThread::GetCurrent()->GetVM()->GetLanguageContext();
603 context.DeoptimizeBegin(frame, inline_depth);
604
605 return frame;
606 }
607
IsDynamicMethod() const608 bool StackWalker::IsDynamicMethod() const
609 {
610 // Dynamic method may have no class
611 return GetMethod()->GetClass() == nullptr ||
612 panda::panda_file::IsDynamicLanguage(Runtime::GetCurrent()->GetLanguageContext(*GetMethod()).GetLanguage());
613 }
614
Verify()615 void StackWalker::Verify()
616 {
617 for (; HasFrame(); NextFrame()) {
618 #ifndef NDEBUG
619 ASSERT(GetMethod() != nullptr);
620 IterateVRegsWithInfo([this]([[maybe_unused]] const auto ®_info, const auto &vreg) {
621 if (reg_info.GetType() == compiler::VRegInfo::Type::ANY) {
622 ASSERT(IsDynamicMethod());
623 return true;
624 }
625
626 if (vreg.HasObject()) {
627 // Use Frame::VRegister::HasObject() to detect objects
628 ASSERT(reg_info.IsObject());
629 if (ObjectHeader *object = vreg.GetReference(); object != nullptr) {
630 auto cls = object->ClassAddr<Class>();
631 if (!IsInObjectsAddressSpace(reinterpret_cast<uintptr_t>(cls)) || cls == nullptr) {
632 StackWalker::Create(ManagedThread::GetCurrent()).Dump(std::cerr, true);
633 LOG(FATAL, INTEROP) << "Wrong class " << cls << " for object " << object << "\n";
634 } else {
635 cls->GetName();
636 }
637 }
638 } else {
639 ASSERT(!reg_info.IsObject());
640 vreg.GetLong();
641 }
642 return true;
643 });
644
645 if (IsCFrame()) {
646 IterateObjects([this](const auto &vreg) {
647 if (IsDynamicMethod()) {
648 ASSERT(vreg.HasObject());
649 return true;
650 }
651
652 ASSERT(vreg.HasObject());
653 if (ObjectHeader *object = vreg.GetReference(); object != nullptr) {
654 ASSERT(IsInObjectsAddressSpace(reinterpret_cast<uintptr_t>(object)));
655 auto cls = object->ClassAddr<Class>();
656 if (!IsInObjectsAddressSpace(reinterpret_cast<uintptr_t>(cls)) || cls == nullptr) {
657 StackWalker::Create(ManagedThread::GetCurrent()).Dump(std::cerr, true);
658 LOG(FATAL, INTEROP) << "Wrong class " << cls << " for object " << object << "\n";
659 } else {
660 cls->GetName();
661 }
662 }
663 return true;
664 });
665 }
666 #endif // ifndef NDEBUG
667 }
668 }
669
670 // Dump function change StackWalker object-state, that's why it may be called only
671 // with rvalue reference.
Dump(std::ostream & os,bool print_vregs)672 void StackWalker::Dump(std::ostream &os, bool print_vregs /* = false */) &&
673 {
674 [[maybe_unused]] static constexpr size_t WIDTH_INDEX = 4;
675 [[maybe_unused]] static constexpr size_t WIDTH_REG = 4;
676 [[maybe_unused]] static constexpr size_t WIDTH_FRAME = 8;
677 [[maybe_unused]] static constexpr size_t WIDTH_LOCATION = 12;
678 [[maybe_unused]] static constexpr size_t WIDTH_TYPE = 20;
679
680 size_t frame_index = 0;
681 os << "Panda call stack:\n";
682 for (; HasFrame(); NextFrame()) {
683 os << std::setw(WIDTH_INDEX) << std::setfill(' ') << std::right << std::dec << frame_index << ": "
684 << std::setfill('0');
685 os << std::setw(WIDTH_FRAME) << std::hex;
686 os << (IsCFrame() ? reinterpret_cast<Frame *>(GetCFrame().GetFrameOrigin()) : GetIFrame()) << " in ";
687 DumpFrame(os);
688 os << std::endl;
689 if (print_vregs) {
690 IterateVRegsWithInfo([this, &os](auto reg_info, const auto &vreg) {
691 os << " " << std::setw(WIDTH_REG) << std::setfill(' ') << std::right
692 << (reg_info.IsAccumulator() ? "acc" : (std::string("v") + std::to_string(reg_info.GetIndex())));
693 os << " = ";
694 if (reg_info.GetType() == compiler::VRegInfo::Type::ANY) {
695 os << "0x";
696 }
697 os << std::left;
698 os << std::setw(WIDTH_TYPE) << std::setfill(' ');
699 switch (reg_info.GetType()) {
700 case compiler::VRegInfo::Type::INT64:
701 case compiler::VRegInfo::Type::INT32:
702 os << std::dec << vreg.GetLong();
703 break;
704 case compiler::VRegInfo::Type::FLOAT64:
705 os << vreg.GetDouble();
706 break;
707 case compiler::VRegInfo::Type::FLOAT32:
708 os << vreg.GetFloat();
709 break;
710 case compiler::VRegInfo::Type::BOOL:
711 os << (vreg.Get() ? "true" : "false");
712 break;
713 case compiler::VRegInfo::Type::OBJECT:
714 os << vreg.GetReference();
715 break;
716 case compiler::VRegInfo::Type::ANY: {
717 os << std::hex << static_cast<uint64_t>(vreg.GetValue());
718 break;
719 }
720 case compiler::VRegInfo::Type::UNDEFINED:
721 os << "undfined";
722 break;
723 default:
724 os << "unknown";
725 break;
726 }
727 os << std::setw(WIDTH_LOCATION) << std::setfill(' ') << reg_info.GetTypeString(); // NOLINT
728 if (IsCFrame()) {
729 os << reg_info.GetLocationString() << ":" << std::dec << helpers::ToSigned(reg_info.GetValue());
730 } else {
731 os << '-';
732 }
733 os << std::endl;
734 return true;
735 });
736 }
737 if (IsCFrame() && print_vregs) {
738 os << "roots:";
739 IterateObjectsWithInfo([&os](auto ®_info, const auto &vreg) {
740 ASSERT(vreg.HasObject());
741 os << " " << reg_info.GetLocationString() << "[" << std::dec << reg_info.GetValue() << "]=" << std::hex
742 << vreg.GetReference();
743 return true;
744 });
745 os << std::endl;
746 }
747 frame_index++;
748 }
749 }
750
DumpFrame(std::ostream & os)751 void StackWalker::DumpFrame(std::ostream &os)
752 {
753 os << (IsDynamicMethod() ? "[dynamic function]" : GetMethod()->GetFullName());
754 if (IsCFrame()) {
755 if (GetCFrame().IsNative()) {
756 os << " (native)";
757 } else {
758 os << " (compiled" << (GetCFrame().IsOsr() ? "/osr" : "") << ": npc=" << GetNativePc()
759 << (IsInlined() ? ", inlined) " : ") ");
760 if (IsInlined()) {
761 code_info_.DumpInlineInfo(os, stackmap_, inline_depth_);
762 } else {
763 code_info_.Dump(os, stackmap_);
764 }
765 }
766
767 } else {
768 os << " (managed)";
769 }
770 }
771
772 } // namespace panda
773