1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef PANDA_CODE_INFO_H
17 #define PANDA_CODE_INFO_H
18
19 #include "code_info_tables.h"
20 #include "utils/bit_field.h"
21 #include "utils/bit_table.h"
22 #include "utils/cframe_layout.h"
23 #include "utils/small_vector.h"
24 #include "utils/span.h"
25
26 namespace panda::compiler {
27
28 /*
29 *
30 * Compiled code layout:
31 * +-------------+
32 * | CodePrefix |
33 * +-------------+ <- Method::CompiledCodeEntrypoint
34 * | Code |
35 * +-------------+-----------------+
36 * | CodeInfo | CodeInfoHeader |
37 * | |-----------------+----------------------+
38 * | | | StackMap |
39 * | | | InlineInfo |
40 * | | | Roots Reg Mask |
41 * | | | Roots Stack Mask |
42 * | | Bit Tables | Method indexes |
43 * | | | VRegs mask |
44 * | | | VRegs map |
45 * | | | VRegs catalogue |
46 * | | | Implicit Nullchecks |
47 * | | | Constants |
48 * |-------------+-----------------+----------------------+
49 */
50
51 struct CodePrefix {
52 static constexpr uint32_t MAGIC = 0xaccadeca;
53 uint32_t magic {MAGIC};
54 uint32_t code_size {};
55 uint32_t code_info_offset {};
56 uint32_t code_info_size {};
57
58 static constexpr size_t STRUCT_SIZE = 16;
59 };
60
61 static_assert(sizeof(CodePrefix) == CodePrefix::STRUCT_SIZE);
62
63 class CodeInfoHeader {
64 public:
65 enum Elements { PROPERTIES, CALLEE_REG_MASK, CALLEE_FP_REG_MASK, TABLE_MASK, VREGS_COUNT, SIZE };
66
SetFrameSize(uint32_t size)67 void SetFrameSize(uint32_t size)
68 {
69 ASSERT(MinimumBitsToStore(size) <= FRAME_SIZE_FIELD_WIDTH);
70 FieldFrameSize::Set(size, &data_[PROPERTIES]);
71 }
GetFrameSize()72 uint32_t GetFrameSize() const
73 {
74 return FieldFrameSize::Get(data_[PROPERTIES]);
75 }
76
SetCalleeRegMask(uint32_t value)77 void SetCalleeRegMask(uint32_t value)
78 {
79 data_[CALLEE_REG_MASK] = value;
80 }
GetCalleeRegMask()81 uint32_t GetCalleeRegMask() const
82 {
83 return data_[CALLEE_REG_MASK];
84 }
85
SetCalleeFpRegMask(uint32_t value)86 void SetCalleeFpRegMask(uint32_t value)
87 {
88 data_[CALLEE_FP_REG_MASK] = value;
89 }
GetCalleeFpRegMask()90 uint32_t GetCalleeFpRegMask() const
91 {
92 return data_[CALLEE_FP_REG_MASK];
93 }
94
SetTableMask(uint32_t value)95 void SetTableMask(uint32_t value)
96 {
97 data_[TABLE_MASK] = value;
98 }
GetTableMask()99 uint32_t GetTableMask() const
100 {
101 return data_[TABLE_MASK];
102 }
103
SetVRegsCount(uint32_t value)104 void SetVRegsCount(uint32_t value)
105 {
106 data_[VREGS_COUNT] = value;
107 }
GetVRegsCount()108 uint32_t GetVRegsCount() const
109 {
110 return data_[VREGS_COUNT];
111 }
112
SetHasFloatRegs(bool value)113 void SetHasFloatRegs(bool value)
114 {
115 HasFloatRegsFlag::Set(value, &data_[PROPERTIES]);
116 }
HasFloatRegs()117 bool HasFloatRegs() const
118 {
119 return HasFloatRegsFlag::Get(data_[PROPERTIES]);
120 }
121
122 template <typename Container>
Encode(BitMemoryStreamOut<Container> & out)123 void Encode(BitMemoryStreamOut<Container> &out)
124 {
125 VarintPack::Write(out, data_);
126 }
Decode(BitMemoryStreamIn * in)127 void Decode(BitMemoryStreamIn *in)
128 {
129 data_ = VarintPack::Read<SIZE>(in);
130 }
131
132 private:
133 std::array<uint32_t, SIZE> data_;
134
135 static constexpr size_t FRAME_SIZE_FIELD_WIDTH = 16;
136 using FieldFrameSize = BitField<uint32_t, 0, FRAME_SIZE_FIELD_WIDTH>;
137 using HasFloatRegsFlag = FieldFrameSize::NextFlag;
138 };
139
140 class CodeInfo final {
141 public:
142 static constexpr size_t TABLES_COUNT = 10;
143 static constexpr size_t VREG_LIST_STATIC_SIZE = 16;
144 static constexpr size_t ALIGNMENT = sizeof(uint64_t);
145 static constexpr size_t SIZE_ALIGNMENT = sizeof(uint64_t);
146
147 template <typename Allocator>
148 using VRegList = SmallVector<VRegInfo, VREG_LIST_STATIC_SIZE, Allocator, true>;
149
150 NO_COPY_SEMANTIC(CodeInfo);
151 NO_MOVE_SEMANTIC(CodeInfo);
152
153 CodeInfo() = default;
154
CodeInfo(const void * data,size_t size)155 CodeInfo(const void *data, size_t size)
156 : CodeInfo(Span<const uint8_t>(reinterpret_cast<const uint8_t *>(data), size))
157 {
158 }
159
CodeInfo(Span<const uint8_t> code)160 explicit CodeInfo(Span<const uint8_t> code) : CodeInfo(code.data())
161 {
162 ASSERT(GetDataSize() <= code.size());
163 }
164
CodeInfo(Span<uint8_t> code)165 explicit CodeInfo(Span<uint8_t> code) : CodeInfo(code.data())
166 {
167 ASSERT(GetDataSize() <= code.size());
168 }
169
CodeInfo(const void * code_entry)170 explicit CodeInfo(const void *code_entry)
171 {
172 ASSERT(code_entry != nullptr);
173 auto prefix = reinterpret_cast<const CodePrefix *>(code_entry);
174 ASSERT(prefix->magic == CodePrefix::MAGIC);
175 data_ = Span(reinterpret_cast<const uint8_t *>(code_entry), prefix->code_info_offset + prefix->code_info_size);
176 auto code_info = Span<const uint8_t>(&data_[prefix->code_info_offset], prefix->code_info_size);
177 Decode(code_info);
178 }
179
180 virtual ~CodeInfo() = default;
181
GetCodeOriginFromEntryPoint(const void * data)182 static const void *GetCodeOriginFromEntryPoint(const void *data)
183 {
184 return reinterpret_cast<const void *>(reinterpret_cast<uintptr_t>(data) -
185 CodeInfo::GetCodeOffset(RUNTIME_ARCH));
186 }
187
CreateFromCodeEntryPoint(const void * data)188 static CodeInfo CreateFromCodeEntryPoint(const void *data)
189 {
190 ASSERT(data != nullptr);
191 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
192 return CodeInfo(reinterpret_cast<const uint8_t *>(data) - CodeInfo::GetCodeOffset(RUNTIME_ARCH));
193 }
194
Decode(Span<const uint8_t> code_info)195 void Decode(Span<const uint8_t> code_info)
196 {
197 BitMemoryStreamIn stream(const_cast<uint8_t *>(code_info.data()), code_info.size() * BITS_PER_BYTE);
198 header_.Decode(&stream);
199 EnumerateTables([this, &stream](size_t index, auto member) {
200 if (HasTable(index)) {
201 (this->*member).Decode(&stream);
202 }
203 });
204 }
205
GetHeader()206 const CodeInfoHeader &GetHeader() const
207 {
208 return header_;
209 }
GetHeader()210 CodeInfoHeader &GetHeader()
211 {
212 return header_;
213 }
214
GetPrefix()215 const CodePrefix *GetPrefix() const
216 {
217 return reinterpret_cast<const CodePrefix *>(data_.data());
218 }
219
GetFrameSize()220 uint32_t GetFrameSize() const
221 {
222 return GetHeader().GetFrameSize();
223 }
224
GetData()225 const uint8_t *GetData()
226 {
227 return data_.data();
228 }
229
GetDataSize()230 size_t GetDataSize()
231 {
232 return data_.size();
233 }
234
GetCode()235 const uint8_t *GetCode() const
236 {
237 return &data_[CodeInfo::GetCodeOffset(RUNTIME_ARCH)];
238 }
239
GetCodeSize()240 size_t GetCodeSize() const
241 {
242 return GetPrefix()->code_size;
243 }
244
GetCodeSpan()245 Span<const uint8_t> GetCodeSpan() const
246 {
247 return {&data_[CodeInfo::GetCodeOffset(RUNTIME_ARCH)], GetCodeSize()};
248 }
249
GetInfoSize()250 size_t GetInfoSize() const
251 {
252 return GetPrefix()->code_info_size;
253 }
254
HasTable(size_t index)255 bool HasTable(size_t index) const
256 {
257 return (GetHeader().GetTableMask() & (1U << index)) != 0;
258 }
259
GetMethod(const StackMap & stackmap,int inline_depth)260 std::variant<void *, uint32_t> GetMethod(const StackMap &stackmap, int inline_depth)
261 {
262 ASSERT(inline_depth >= 0);
263 auto inline_info = inline_infos_.GetRow(stackmap.GetInlineInfoIndex() + inline_depth);
264 if (inline_info.HasMethodLow()) {
265 if constexpr (ArchTraits<RUNTIME_ARCH>::IS_64_BITS) {
266 uintptr_t val =
267 inline_info.GetMethodLow() | (static_cast<uint64_t>(inline_info.GetMethodHi()) << BITS_PER_UINT32);
268 return reinterpret_cast<void *>(val);
269 } else {
270 return reinterpret_cast<void *>(inline_info.GetMethodLow());
271 }
272 }
273 return method_ids_.GetRow(inline_info.GetMethodIdIndex()).GetId();
274 }
275
GetConstant(const VRegInfo & vreg)276 uint64_t GetConstant(const VRegInfo &vreg) const
277 {
278 ASSERT(vreg.GetLocation() == VRegInfo::Location::CONSTANT);
279 uint64_t low = constant_table_.GetRow(vreg.GetConstantLowIndex()).GetValue();
280 uint64_t hi = constant_table_.GetRow(vreg.GetConstantHiIndex()).GetValue();
281 return low | (hi << BITS_PER_UINT32);
282 }
283
GetCodeOffset(Arch arch)284 static size_t GetCodeOffset(Arch arch)
285 {
286 return RoundUp(CodePrefix::STRUCT_SIZE, GetCodeAlignment(arch));
287 }
288
GetSavedCalleeRegsMask(bool is_fp)289 uint32_t GetSavedCalleeRegsMask(bool is_fp) const
290 {
291 return is_fp ? GetHeader().GetCalleeFpRegMask() : GetHeader().GetCalleeRegMask();
292 }
293
GetVRegMask(const StackMap & stack_map)294 auto GetVRegMask(const StackMap &stack_map)
295 {
296 return stack_map.HasVRegMaskIndex() ? vreg_masks_.GetBitMemoryRegion(stack_map.GetVRegMaskIndex())
297 : BitMemoryRegion<const uint8_t>();
298 }
299
GetVRegMask(const StackMap & stack_map)300 auto GetVRegMask(const StackMap &stack_map) const
301 {
302 return const_cast<CodeInfo *>(this)->GetVRegMask(stack_map);
303 }
304
GetVRegCount(const StackMap & stack_map)305 size_t GetVRegCount(const StackMap &stack_map) const
306 {
307 return GetVRegMask(stack_map).Popcount();
308 }
309
GetRootsRegMask(const StackMap & stack_map)310 uint32_t GetRootsRegMask(const StackMap &stack_map) const
311 {
312 return stack_map.HasRootsRegMaskIndex() ? roots_reg_masks_.GetRow(stack_map.GetRootsRegMaskIndex()).GetMask()
313 : 0;
314 }
315
GetRootsStackMask(const StackMap & stack_map)316 auto GetRootsStackMask(const StackMap &stack_map) const
317 {
318 return stack_map.HasRootsStackMaskIndex()
319 ? roots_stack_masks_.GetBitMemoryRegion(stack_map.GetRootsStackMaskIndex())
320 : BitMemoryRegion<const uint8_t>();
321 }
322
GetInlineInfos(const StackMap & stack_map)323 auto GetInlineInfos(const StackMap &stack_map)
324 {
325 if (!stack_map.HasInlineInfoIndex()) {
326 return inline_infos_.GetRangeReversed(0, 0);
327 }
328 auto index = stack_map.GetInlineInfoIndex();
329 uint32_t size = index;
330 for (; inline_infos_.GetRow(size).GetIsLast() == 0; size++) {
331 }
332
333 return inline_infos_.GetRangeReversed(index, helpers::ToSigned(size) + 1);
334 }
335
GetInlineInfo(const StackMap & stack_map,int inline_depth)336 auto GetInlineInfo(const StackMap &stack_map, int inline_depth) const
337 {
338 ASSERT(stack_map.HasInlineInfoIndex());
339 CHECK_GE(GetInlineDepth(stack_map), inline_depth);
340 return inline_infos_.GetRow(stack_map.GetInlineInfoIndex() + inline_depth);
341 }
342
GetInlineDepth(const StackMap & stack_map)343 int GetInlineDepth(const StackMap &stack_map) const
344 {
345 if (!stack_map.HasInlineInfoIndex()) {
346 return -1;
347 }
348 int index = stack_map.GetInlineInfoIndex();
349 int depth = index;
350 for (; inline_infos_.GetRow(depth).GetIsLast() == 0; depth++) {
351 }
352 return depth - index;
353 }
354
355 StackMap FindStackMapForNativePc(uint32_t pc, Arch arch = RUNTIME_ARCH) const
356 {
357 auto it =
358 std::lower_bound(stack_maps_.begin(), stack_maps_.end(), pc, [arch](const auto &a, uintptr_t counter) {
359 return a.GetNativePcUnpacked(arch) < counter;
360 });
361 return (it == stack_maps_.end() || it->GetNativePcUnpacked(arch) != pc) ? stack_maps_.GetInvalidRow() : *it;
362 }
363
FindOsrStackMap(uint32_t pc)364 StackMap FindOsrStackMap(uint32_t pc) const
365 {
366 auto it = std::find_if(stack_maps_.begin(), stack_maps_.end(),
367 [pc](const auto &a) { return a.GetBytecodePc() == pc && a.IsOsr(); });
368 return it == stack_maps_.end() ? stack_maps_.GetInvalidRow() : *it;
369 }
370
GetStackMap(size_t index)371 auto GetStackMap(size_t index) const
372 {
373 return StackMap(&stack_maps_, index);
374 }
375
GetStackMaps()376 auto &GetStackMaps()
377 {
378 return stack_maps_;
379 }
380
GetVRegCatalogue()381 auto &GetVRegCatalogue()
382 {
383 return vregs_catalogue_;
384 }
385
GetVRegMapTable()386 auto &GetVRegMapTable()
387 {
388 return vregs_map_;
389 }
390
GetVRegMaskTable()391 auto &GetVRegMaskTable()
392 {
393 return vreg_masks_;
394 }
395
GetInlineInfosTable()396 auto &GetInlineInfosTable()
397 {
398 return inline_infos_;
399 }
400
GetConstantTable()401 auto &GetConstantTable()
402 {
403 return constant_table_;
404 }
405
GetImplicitNullChecksTable()406 const auto &GetImplicitNullChecksTable() const
407 {
408 return implicit_nullchecks_;
409 }
410
HasFloatRegs()411 bool HasFloatRegs() const
412 {
413 return GetHeader().HasFloatRegs();
414 }
415
416 template <typename Func>
EnumerateTables(Func func)417 static void EnumerateTables(Func func)
418 {
419 size_t index = 0;
420 func(index++, &CodeInfo::stack_maps_);
421 func(index++, &CodeInfo::inline_infos_);
422 func(index++, &CodeInfo::roots_reg_masks_);
423 func(index++, &CodeInfo::roots_stack_masks_);
424 func(index++, &CodeInfo::method_ids_);
425 func(index++, &CodeInfo::vreg_masks_);
426 func(index++, &CodeInfo::vregs_map_);
427 func(index++, &CodeInfo::vregs_catalogue_);
428 func(index++, &CodeInfo::implicit_nullchecks_);
429 func(index++, &CodeInfo::constant_table_);
430 ASSERT(index == TABLES_COUNT);
431 }
432
433 template <typename Callback>
EnumerateStaticRoots(const StackMap & stack_map,Callback callback)434 void EnumerateStaticRoots(const StackMap &stack_map, Callback callback)
435 {
436 return EnumerateRoots<Callback, false>(stack_map, callback);
437 }
438
439 template <typename Callback>
EnumerateDynamicRoots(const StackMap & stack_map,Callback callback)440 void EnumerateDynamicRoots(const StackMap &stack_map, Callback callback)
441 {
442 return EnumerateRoots<Callback, true>(stack_map, callback);
443 }
444
445 template <typename Allocator>
446 VRegList<Allocator> GetVRegList(StackMap stack_map, uint32_t first_vreg, uint32_t vregs_count,
447 Allocator *allocator = nullptr) const
448 {
449 if (vregs_count == 0 || !stack_map.HasRegMap()) {
450 return CodeInfo::VRegList<Allocator>(allocator);
451 }
452 VRegList<Allocator> vreg_list(allocator);
453 vreg_list.resize(vregs_count, VRegInfo());
454 ASSERT(!vreg_list[0].IsLive());
455 std::vector<bool> reg_set(vregs_count);
456
457 uint32_t remaining_registers = vregs_count;
458 for (int sindex = stack_map.GetRow(); sindex >= 0 && remaining_registers > 0; sindex--) {
459 stack_map = GetStackMap(sindex);
460 if (!stack_map.HasVRegMaskIndex()) {
461 continue;
462 }
463 // Skip stackmaps that are not in the same inline depth
464 auto vreg_mask = GetVRegMask(stack_map);
465 if (vreg_mask.Size() <= first_vreg) {
466 continue;
467 }
468 ASSERT(stack_map.HasVRegMapIndex());
469 uint32_t map_index = stack_map.GetVRegMapIndex();
470
471 map_index += vreg_mask.Popcount(0, first_vreg);
472 vreg_mask = vreg_mask.Subregion(first_vreg, vreg_mask.Size() - first_vreg);
473
474 uint32_t end = std::min<uint32_t>(vreg_mask.Size(), vregs_count);
475 for (size_t i = 0; i < end; i += BITS_PER_UINT32) {
476 uint32_t mask = vreg_mask.Read(i, std::min<uint32_t>(end - i, BITS_PER_UINT32));
477 while (mask != 0) {
478 uint32_t reg_idx = Ctz(mask);
479 if (!reg_set[i + reg_idx]) {
480 auto vreg_index = vregs_map_.GetRow(map_index);
481 if (vreg_index.GetIndex() != StackMap::NO_VALUE) {
482 ASSERT(!vreg_list[i + reg_idx].IsLive());
483 vreg_list[i + reg_idx] = vregs_catalogue_.GetRow(vreg_index.GetIndex()).GetVRegInfo();
484 vreg_list[i + reg_idx].SetIndex(i + reg_idx);
485 }
486 remaining_registers--;
487 reg_set[i + reg_idx] = true;
488 }
489 map_index++;
490 mask ^= 1U << reg_idx;
491 }
492 }
493 }
494 return vreg_list;
495 }
496
497 template <typename Allocator>
498 VRegList<Allocator> GetVRegList(StackMap stack_map, int inline_depth, Allocator *allocator = nullptr) const
499 {
500 if (inline_depth < 0) {
501 return GetVRegList<Allocator>(stack_map, 0, GetHeader().GetVRegsCount(), allocator);
502 }
503 ASSERT(stack_map.HasInlineInfoIndex());
504 auto inline_info = GetInlineInfo(stack_map, inline_depth);
505 if (inline_info.GetVRegsCount() == 0) {
506 return VRegList<Allocator>(allocator);
507 }
508 auto depth = inline_info.GetRow() - stack_map.GetInlineInfoIndex();
509 uint32_t first =
510 depth == 0 ? GetHeader().GetVRegsCount() : inline_infos_.GetRow(inline_info.GetRow() - 1).GetVRegsCount();
511 ASSERT(inline_info.GetVRegsCount() >= first);
512 return GetVRegList<Allocator>(stack_map, first, inline_info.GetVRegsCount() - first, allocator);
513 }
514
515 template <typename Allocator>
516 VRegList<Allocator> GetVRegList(StackMap stack_map, Allocator *allocator = nullptr) const
517 {
518 return GetVRegList<Allocator>(stack_map, -1, allocator);
519 }
520
VerifyCompiledEntry(uintptr_t compiled_entry)521 static bool VerifyCompiledEntry(uintptr_t compiled_entry)
522 {
523 auto codeheader = compiled_entry - GetCodeOffset(RUNTIME_ARCH);
524 return (*reinterpret_cast<const uint32_t *>(codeheader) == CodePrefix::MAGIC);
525 }
526
527 void Dump(std::ostream &stream) const;
528
529 void Dump(std::ostream &stream, const StackMap &stack_map, Arch arch = RUNTIME_ARCH) const;
530
531 void DumpInlineInfo(std::ostream &stream, const StackMap &stack_map, int depth) const;
532
CountSpillSlots()533 size_t CountSpillSlots()
534 {
535 auto frame_slots = GetFrameSize() / PointerSize(RUNTIME_ARCH);
536 auto spills_count = frame_slots - (static_cast<size_t>(CFrameSlots::Start()) + GetRegsCount(RUNTIME_ARCH) + 1U);
537 // Reverse 'CFrameLayout::AlignSpillCount' counting
538 if (RUNTIME_ARCH == Arch::AARCH32) {
539 spills_count = spills_count / 2U - 1;
540 }
541 if (spills_count % 2U != 0) {
542 spills_count--;
543 }
544 return spills_count;
545 }
546
547 private:
548 template <typename Callback, bool is_dynamic>
549 void EnumerateRoots(const StackMap &stack_map, Callback callback);
550
551 BitTable<StackMap> stack_maps_;
552 BitTable<InlineInfo> inline_infos_;
553 BitTable<RegisterMask> roots_reg_masks_;
554 BitTable<StackMask> roots_stack_masks_;
555 BitTable<MethodId> method_ids_;
556 BitTable<VRegisterInfo> vregs_catalogue_;
557 BitTable<VRegisterCatalogueIndex> vregs_map_;
558 BitTable<VRegisterMask> vreg_masks_;
559 BitTable<ImplicitNullChecks> implicit_nullchecks_;
560 BitTable<ConstantTable> constant_table_;
561
562 CodeInfoHeader header_ {};
563
564 Span<const uint8_t> data_;
565 };
566
567 template <typename Callback, bool is_dynamic>
EnumerateRoots(const StackMap & stack_map,Callback callback)568 void CodeInfo::EnumerateRoots(const StackMap &stack_map, Callback callback)
569 {
570 auto root_type = is_dynamic ? VRegInfo::Type::ANY : VRegInfo::Type::OBJECT;
571
572 if (stack_map.HasRootsRegMaskIndex()) {
573 auto reg_mask = roots_reg_masks_.GetRow(stack_map.GetRootsRegMaskIndex()).GetMask();
574 ArenaBitVectorSpan vec(®_mask, BITS_PER_UINT32);
575 for (auto reg_idx : vec.GetSetBitsIndices()) {
576 if (!callback(VRegInfo(reg_idx, VRegInfo::Location::REGISTER, root_type, false))) {
577 return;
578 }
579 }
580 }
581 // Simplify after renumbering stack slots
582 if (stack_map.HasRootsStackMaskIndex()) {
583 auto stack_slots_count = CountSpillSlots();
584 auto reg_mask = roots_stack_masks_.GetBitMemoryRegion(stack_map.GetRootsStackMaskIndex());
585 for (auto reg_idx : reg_mask) {
586 if (reg_idx >= stack_slots_count) {
587 // Parameter-slots' indexes are added to the root-mask with `stack_slots_count` offset to distinct them
588 // from spill-slots
589 auto param_slot_idx = reg_idx - stack_slots_count;
590 reg_idx = static_cast<size_t>(CFrameLayout::StackArgSlot::Start()) - param_slot_idx -
591 static_cast<size_t>(CFrameSlots::Start());
592 } else {
593 if constexpr (!ArchTraits<RUNTIME_ARCH>::IS_64_BITS) { // NOLINT
594 reg_idx = (reg_idx << 1U) + 1;
595 }
596 // Stack roots are began from spill/fill stack origin, so we need to adjust it according to registers
597 // buffer
598 reg_idx += GetRegsCount(RUNTIME_ARCH);
599 }
600 VRegInfo vreg(reg_idx, VRegInfo::Location::SLOT, root_type, false);
601 if (!callback(vreg)) {
602 return;
603 }
604 }
605 }
606 }
607
608 } // namespace panda::compiler
609
610 #endif // PANDA_CODE_INFO_H
611