• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef PANDA_CODE_INFO_H
17 #define PANDA_CODE_INFO_H
18 
19 #include "code_info_tables.h"
20 #include "utils/bit_field.h"
21 #include "utils/bit_table.h"
22 #include "utils/cframe_layout.h"
23 #include "utils/small_vector.h"
24 #include "utils/span.h"
25 
26 namespace ark::compiler {
27 
28 /*
29  *
30  * Compiled code layout:
31  * +-------------+
32  * | CodePrefix  |
33  * +-------------+ <- Method::CompiledCodeEntrypoint
34  * | Code        |
35  * +-------------+-----------------+
36  * | CodeInfo    | CodeInfoHeader  |
37  * |             |-----------------+----------------------+
38  * |             |                 |  StackMap            |
39  * |             |                 |  InlineInfo          |
40  * |             |                 |  Roots Reg Mask      |
41  * |             |                 |  Roots Stack Mask    |
42  * |             |   Bit Tables    |  Method indexes      |
43  * |             |                 |  VRegs mask          |
44  * |             |                 |  VRegs map           |
45  * |             |                 |  VRegs catalogue     |
46  * |             |                 |  Implicit Nullchecks |
47  * |             |                 |  Constants           |
48  * |-------------+-----------------+----------------------+
49  */
50 
51 struct CodePrefix {
52     static constexpr uint32_t MAGIC = 0xaccadeca;
53     uint32_t magic {MAGIC};
54     uint32_t codeSize {};
55     uint32_t codeInfoOffset {};
56     uint32_t codeInfoSize {};
57 
58     static constexpr size_t STRUCT_SIZE = 16;
59 };
60 
61 static_assert(sizeof(CodePrefix) == CodePrefix::STRUCT_SIZE);
62 
63 class CodeInfoHeader {
64 public:
65     enum Elements { PROPERTIES, CALLEE_REG_MASK, CALLEE_FP_REG_MASK, TABLE_MASK, VREGS_COUNT, SIZE };
66 
SetFrameSize(uint32_t size)67     void SetFrameSize(uint32_t size)
68     {
69         ASSERT(MinimumBitsToStore(size) <= FRAME_SIZE_FIELD_WIDTH);
70         FieldFrameSize::Set(size, &data_[PROPERTIES]);
71     }
GetFrameSize()72     uint32_t GetFrameSize() const
73     {
74         return FieldFrameSize::Get(data_[PROPERTIES]);
75     }
76 
SetCalleeRegMask(uint32_t value)77     void SetCalleeRegMask(uint32_t value)
78     {
79         data_[CALLEE_REG_MASK] = value;
80     }
GetCalleeRegMask()81     uint32_t GetCalleeRegMask() const
82     {
83         return data_[CALLEE_REG_MASK];
84     }
85 
SetCalleeFpRegMask(uint32_t value)86     void SetCalleeFpRegMask(uint32_t value)
87     {
88         data_[CALLEE_FP_REG_MASK] = value;
89     }
GetCalleeFpRegMask()90     uint32_t GetCalleeFpRegMask() const
91     {
92         return data_[CALLEE_FP_REG_MASK];
93     }
94 
SetTableMask(uint32_t value)95     void SetTableMask(uint32_t value)
96     {
97         data_[TABLE_MASK] = value;
98     }
GetTableMask()99     uint32_t GetTableMask() const
100     {
101         return data_[TABLE_MASK];
102     }
103 
SetVRegsCount(uint32_t value)104     void SetVRegsCount(uint32_t value)
105     {
106         data_[VREGS_COUNT] = value;
107     }
GetVRegsCount()108     uint32_t GetVRegsCount() const
109     {
110         return data_[VREGS_COUNT];
111     }
112 
SetHasFloatRegs(bool value)113     void SetHasFloatRegs(bool value)
114     {
115         HasFloatRegsFlag::Set(value, &data_[PROPERTIES]);
116     }
HasFloatRegs()117     bool HasFloatRegs() const
118     {
119         return HasFloatRegsFlag::Get(data_[PROPERTIES]);
120     }
121 
122     template <typename Container>
Encode(BitMemoryStreamOut<Container> & out)123     void Encode(BitMemoryStreamOut<Container> &out)
124     {
125         VarintPack::Write(out, data_);
126     }
Decode(BitMemoryStreamIn * in)127     void Decode(BitMemoryStreamIn *in)
128     {
129         data_ = VarintPack::Read<SIZE>(in);
130     }
131 
132 private:
133     std::array<uint32_t, SIZE> data_;
134 
135     static constexpr size_t FRAME_SIZE_FIELD_WIDTH = 16;
136     static constexpr size_t LANG_EXT_OFFSET_FIELD_WIDTH = 13;
137     using FieldFrameSize = BitField<uint32_t, 0, FRAME_SIZE_FIELD_WIDTH>;
138     using HasFloatRegsFlag = FieldFrameSize::NextFlag;
139 };
140 
141 class CodeInfo final {
142 public:
143     static constexpr size_t TABLES_COUNT = 10;
144     static constexpr size_t VREG_LIST_STATIC_SIZE = 16;
145     static constexpr size_t ALIGNMENT = sizeof(uint64_t);
146     static constexpr size_t SIZE_ALIGNMENT = sizeof(uint64_t);
147 
148     template <typename Allocator>
149     using VRegList = SmallVector<VRegInfo, VREG_LIST_STATIC_SIZE, Allocator, true>;
150 
151     NO_COPY_SEMANTIC(CodeInfo);
152     NO_MOVE_SEMANTIC(CodeInfo);
153 
154     CodeInfo() = default;
155 
CodeInfo(const void * data,size_t size)156     CodeInfo(const void *data, size_t size)
157         : CodeInfo(Span<const uint8_t>(reinterpret_cast<const uint8_t *>(data), size))
158     {
159     }
160 
CodeInfo(Span<const uint8_t> code)161     explicit CodeInfo(Span<const uint8_t> code) : CodeInfo(code.data())
162     {
163         ASSERT(GetDataSize() <= code.size());
164     }
165 
CodeInfo(Span<uint8_t> code)166     explicit CodeInfo(Span<uint8_t> code) : CodeInfo(code.data())
167     {
168         ASSERT(GetDataSize() <= code.size());
169     }
170 
CodeInfo(const void * codeEntry)171     explicit CodeInfo(const void *codeEntry)
172     {
173         ASSERT(codeEntry != nullptr);
174         auto prefix = reinterpret_cast<const CodePrefix *>(codeEntry);
175         ASSERT(prefix->magic == CodePrefix::MAGIC);
176         data_ = Span(reinterpret_cast<const uint8_t *>(codeEntry), prefix->codeInfoOffset + prefix->codeInfoSize);
177         auto codeInfo = Span<const uint8_t>(&data_[prefix->codeInfoOffset], prefix->codeInfoSize);
178         Decode(codeInfo);
179     }
180 
181     virtual ~CodeInfo() = default;
182 
GetCodeOriginFromEntryPoint(const void * data)183     static const void *GetCodeOriginFromEntryPoint(const void *data)
184     {
185         return reinterpret_cast<const void *>(reinterpret_cast<uintptr_t>(data) -
186                                               CodeInfo::GetCodeOffset(RUNTIME_ARCH));
187     }
188 
CreateFromCodeEntryPoint(const void * data)189     static CodeInfo CreateFromCodeEntryPoint(const void *data)
190     {
191         ASSERT(data != nullptr);
192         // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
193         return CodeInfo(reinterpret_cast<const uint8_t *>(data) - CodeInfo::GetCodeOffset(RUNTIME_ARCH));
194     }
195 
Decode(Span<const uint8_t> codeInfo)196     void Decode(Span<const uint8_t> codeInfo)
197     {
198         BitMemoryStreamIn stream(const_cast<uint8_t *>(codeInfo.data()), codeInfo.size() * BITS_PER_BYTE);
199         header_.Decode(&stream);
200         EnumerateTables([this, &stream](size_t index, auto member) {
201             if (HasTable(index)) {
202                 (this->*member).Decode(&stream);
203             }
204         });
205     }
206 
GetHeader()207     const CodeInfoHeader &GetHeader() const
208     {
209         return header_;
210     }
GetHeader()211     CodeInfoHeader &GetHeader()
212     {
213         return header_;
214     }
215 
GetPrefix()216     const CodePrefix *GetPrefix() const
217     {
218         return reinterpret_cast<const CodePrefix *>(data_.data());
219     }
220 
GetFrameSize()221     uint32_t GetFrameSize() const
222     {
223         return GetHeader().GetFrameSize();
224     }
225 
GetData()226     const uint8_t *GetData()
227     {
228         return data_.data();
229     }
230 
GetDataSize()231     size_t GetDataSize()
232     {
233         return data_.size();
234     }
235 
GetCode()236     const uint8_t *GetCode() const
237     {
238         return &data_[CodeInfo::GetCodeOffset(RUNTIME_ARCH)];
239     }
240 
GetCodeSize()241     size_t GetCodeSize() const
242     {
243         return GetPrefix()->codeSize;
244     }
245 
GetCodeSpan()246     Span<const uint8_t> GetCodeSpan() const
247     {
248         return {&data_[CodeInfo::GetCodeOffset(RUNTIME_ARCH)], GetCodeSize()};
249     }
250 
GetInfoSize()251     size_t GetInfoSize() const
252     {
253         return GetPrefix()->codeInfoSize;
254     }
255 
HasTable(size_t index)256     bool HasTable(size_t index) const
257     {
258         return (GetHeader().GetTableMask() & (1U << index)) != 0;
259     }
260 
GetMethod(const StackMap & stackmap,int inlineDepth)261     std::variant<void *, uint32_t> GetMethod(const StackMap &stackmap, int inlineDepth)
262     {
263         ASSERT(inlineDepth >= 0);
264         auto inlineInfo = inlineInfos_.GetRow(stackmap.GetInlineInfoIndex() + inlineDepth);
265         if (inlineInfo.HasMethodLow()) {
266             if constexpr (ArchTraits<RUNTIME_ARCH>::IS_64_BITS) {
267                 uintptr_t val =
268                     inlineInfo.GetMethodLow() | (static_cast<uint64_t>(inlineInfo.GetMethodHi()) << BITS_PER_UINT32);
269                 return reinterpret_cast<void *>(val);
270             } else {
271                 return reinterpret_cast<void *>(inlineInfo.GetMethodLow());
272             }
273         }
274         return methodIds_.GetRow(inlineInfo.GetMethodIdIndex()).GetId();
275     }
276 
GetConstant(const VRegInfo & vreg)277     uint64_t GetConstant(const VRegInfo &vreg) const
278     {
279         ASSERT(vreg.GetLocation() == VRegInfo::Location::CONSTANT);
280         uint64_t low = constantTable_.GetRow(vreg.GetConstantLowIndex()).GetValue();
281         uint64_t hi = constantTable_.GetRow(vreg.GetConstantHiIndex()).GetValue();
282         return low | (hi << BITS_PER_UINT32);
283     }
284 
GetCodeOffset(Arch arch)285     static size_t GetCodeOffset(Arch arch)
286     {
287         return RoundUp(CodePrefix::STRUCT_SIZE, GetCodeAlignment(arch));
288     }
289 
GetSavedCalleeRegsMask(bool isFp)290     uint32_t GetSavedCalleeRegsMask(bool isFp) const
291     {
292         return isFp ? GetHeader().GetCalleeFpRegMask() : GetHeader().GetCalleeRegMask();
293     }
294 
GetVRegMask(const StackMap & stackMap)295     auto GetVRegMask(const StackMap &stackMap)
296     {
297         return stackMap.HasVRegMaskIndex() ? vregMasks_.GetBitMemoryRegion(stackMap.GetVRegMaskIndex())
298                                            : BitMemoryRegion<const uint8_t>();
299     }
300 
GetVRegMask(const StackMap & stackMap)301     auto GetVRegMask(const StackMap &stackMap) const
302     {
303         return const_cast<CodeInfo *>(this)->GetVRegMask(stackMap);
304     }
305 
GetVRegCount(const StackMap & stackMap)306     size_t GetVRegCount(const StackMap &stackMap) const
307     {
308         return GetVRegMask(stackMap).Popcount();
309     }
310 
GetRootsRegMask(const StackMap & stackMap)311     uint32_t GetRootsRegMask(const StackMap &stackMap) const
312     {
313         return stackMap.HasRootsRegMaskIndex() ? rootsRegMasks_.GetRow(stackMap.GetRootsRegMaskIndex()).GetMask() : 0;
314     }
315 
GetRootsStackMask(const StackMap & stackMap)316     auto GetRootsStackMask(const StackMap &stackMap) const
317     {
318         return stackMap.HasRootsStackMaskIndex()
319                    ? rootsStackMasks_.GetBitMemoryRegion(stackMap.GetRootsStackMaskIndex())
320                    : BitMemoryRegion<const uint8_t>();
321     }
322 
GetInlineInfos(const StackMap & stackMap)323     auto GetInlineInfos(const StackMap &stackMap)
324     {
325         if (!stackMap.HasInlineInfoIndex()) {
326             return inlineInfos_.GetRangeReversed(0, 0);
327         }
328         auto index = stackMap.GetInlineInfoIndex();
329         uint32_t size = index;
330         for (; inlineInfos_.GetRow(size).GetIsLast() == 0; size++) {
331         }
332 
333         return inlineInfos_.GetRangeReversed(index, helpers::ToSigned(size) + 1);
334     }
335 
GetInlineInfo(const StackMap & stackMap,int inlineDepth)336     auto GetInlineInfo(const StackMap &stackMap, int inlineDepth) const
337     {
338         ASSERT(stackMap.HasInlineInfoIndex());
339         CHECK_GE(GetInlineDepth(stackMap), inlineDepth);
340         return inlineInfos_.GetRow(stackMap.GetInlineInfoIndex() + inlineDepth);
341     }
342 
GetInlineDepth(const StackMap & stackMap)343     int GetInlineDepth(const StackMap &stackMap) const
344     {
345         if (!stackMap.HasInlineInfoIndex()) {
346             return -1;
347         }
348         int index = stackMap.GetInlineInfoIndex();
349         int depth = index;
350         for (; inlineInfos_.GetRow(depth).GetIsLast() == 0; depth++) {
351         }
352         return depth - index;
353     }
354 
355     StackMap FindStackMapForNativePc(uint32_t pc, Arch arch = RUNTIME_ARCH) const
356     {
357         auto it = std::lower_bound(stackMaps_.begin(), stackMaps_.end(), pc, [arch](const auto &a, uintptr_t counter) {
358             return a.GetNativePcUnpacked(arch) < counter;
359         });
360         return (it == stackMaps_.end() || it->GetNativePcUnpacked(arch) != pc) ? stackMaps_.GetInvalidRow() : *it;
361     }
362 
FindOsrStackMap(uint32_t pc)363     StackMap FindOsrStackMap(uint32_t pc) const
364     {
365         auto it = std::find_if(stackMaps_.begin(), stackMaps_.end(),
366                                [pc](const auto &a) { return a.GetBytecodePc() == pc && a.IsOsr(); });
367         return it == stackMaps_.end() ? stackMaps_.GetInvalidRow() : *it;
368     }
369 
GetStackMap(size_t index)370     auto GetStackMap(size_t index) const
371     {
372         return StackMap(&stackMaps_, index);
373     }
374 
GetStackMaps()375     auto &GetStackMaps()
376     {
377         return stackMaps_;
378     }
379 
GetVRegCatalogue()380     auto &GetVRegCatalogue()
381     {
382         return vregsCatalogue_;
383     }
384 
GetVRegMapTable()385     auto &GetVRegMapTable()
386     {
387         return vregsMap_;
388     }
389 
GetVRegMaskTable()390     auto &GetVRegMaskTable()
391     {
392         return vregMasks_;
393     }
394 
GetInlineInfosTable()395     auto &GetInlineInfosTable()
396     {
397         return inlineInfos_;
398     }
399 
GetConstantTable()400     auto &GetConstantTable()
401     {
402         return constantTable_;
403     }
404 
GetImplicitNullChecksTable()405     const auto &GetImplicitNullChecksTable() const
406     {
407         return implicitNullchecks_;
408     }
409 
HasFloatRegs()410     bool HasFloatRegs() const
411     {
412         return GetHeader().HasFloatRegs();
413     }
414 
415     template <typename Func>
EnumerateTables(Func func)416     static void EnumerateTables(Func func)
417     {
418         size_t index = 0;
419         func(index++, &CodeInfo::stackMaps_);
420         func(index++, &CodeInfo::inlineInfos_);
421         func(index++, &CodeInfo::rootsRegMasks_);
422         func(index++, &CodeInfo::rootsStackMasks_);
423         func(index++, &CodeInfo::methodIds_);
424         func(index++, &CodeInfo::vregMasks_);
425         func(index++, &CodeInfo::vregsMap_);
426         func(index++, &CodeInfo::vregsCatalogue_);
427         func(index++, &CodeInfo::implicitNullchecks_);
428         func(index++, &CodeInfo::constantTable_);
429         ASSERT(index == TABLES_COUNT);
430     }
431 
432     template <typename Callback>
EnumerateStaticRoots(const StackMap & stackMap,Callback callback)433     void EnumerateStaticRoots(const StackMap &stackMap, Callback callback)
434     {
435         return EnumerateRoots<Callback, false>(stackMap, callback);
436     }
437 
438     template <typename Callback>
EnumerateDynamicRoots(const StackMap & stackMap,Callback callback)439     void EnumerateDynamicRoots(const StackMap &stackMap, Callback callback)
440     {
441         return EnumerateRoots<Callback, true>(stackMap, callback);
442     }
443 
444     template <typename Allocator>
445     VRegList<Allocator> GetVRegList(StackMap stackMap, uint32_t firstVreg, uint32_t vregsCount,
446                                     Allocator *allocator = nullptr) const
447     {
448         if (vregsCount == 0 || !stackMap.HasRegMap()) {
449             return CodeInfo::VRegList<Allocator>(allocator);
450         }
451         VRegList<Allocator> vregList(allocator);
452         vregList.resize(vregsCount, VRegInfo());
453         ASSERT(!vregList[0].IsLive());
454         std::vector<bool> regSet(vregsCount);
455 
456         uint32_t remainingRegisters = vregsCount;
457         for (int sindex = static_cast<int64_t>(stackMap.GetRow()); sindex >= 0 && remainingRegisters > 0; sindex--) {
458             stackMap = GetStackMap(sindex);
459             if (!stackMap.HasVRegMaskIndex()) {
460                 continue;
461             }
462             // Skip stackmaps that are not in the same inline depth
463             auto vregMask = GetVRegMask(stackMap);
464             if (vregMask.Size() <= firstVreg) {
465                 continue;
466             }
467             ASSERT(stackMap.HasVRegMapIndex());
468             uint32_t mapIndex = stackMap.GetVRegMapIndex();
469 
470             mapIndex += vregMask.Popcount(0, firstVreg);
471             vregMask = vregMask.Subregion(firstVreg, vregMask.Size() - firstVreg);
472 
473             uint32_t end = std::min<uint32_t>(vregMask.Size(), vregsCount);
474             for (size_t i = 0; i < end; i += BITS_PER_UINT32) {
475                 uint32_t mask = vregMask.Read(i, std::min<uint32_t>(end - i, BITS_PER_UINT32));
476                 while (mask != 0) {
477                     auto regIdx = static_cast<size_t>(Ctz(mask));
478                     if (!regSet[i + regIdx]) {
479                         auto vregIndex = vregsMap_.GetRow(mapIndex);
480                         if (vregIndex.GetIndex() != StackMap::NO_VALUE) {
481                             ASSERT(!vregList[i + regIdx].IsLive());
482                             vregList[i + regIdx] = vregsCatalogue_.GetRow(vregIndex.GetIndex()).GetVRegInfo();
483                             vregList[i + regIdx].SetIndex(i + regIdx);
484                         }
485                         remainingRegisters--;
486                         regSet[i + regIdx] = true;
487                     }
488                     mapIndex++;
489                     mask ^= 1U << regIdx;
490                 }
491             }
492         }
493         return vregList;
494     }
495 
496     template <typename Allocator>
497     VRegList<Allocator> GetVRegList(StackMap stackMap, int inlineDepth, Allocator *allocator = nullptr) const
498     {
499         if (inlineDepth < 0) {
500             return GetVRegList<Allocator>(stackMap, 0, GetHeader().GetVRegsCount(), allocator);
501         }
502         ASSERT(stackMap.HasInlineInfoIndex());
503         auto inlineInfo = GetInlineInfo(stackMap, inlineDepth);
504         if (inlineInfo.GetVRegsCount() == 0) {
505             return VRegList<Allocator>(allocator);
506         }
507         auto depth = inlineInfo.GetRow() - stackMap.GetInlineInfoIndex();
508         uint32_t first =
509             depth == 0 ? GetHeader().GetVRegsCount() : inlineInfos_.GetRow(inlineInfo.GetRow() - 1).GetVRegsCount();
510         ASSERT(inlineInfo.GetVRegsCount() >= first);
511         return GetVRegList<Allocator>(stackMap, first, inlineInfo.GetVRegsCount() - first, allocator);
512     }
513 
514     template <typename Allocator>
515     VRegList<Allocator> GetVRegList(StackMap stackMap, Allocator *allocator = nullptr) const
516     {
517         return GetVRegList<Allocator>(stackMap, -1, allocator);
518     }
519 
VerifyCompiledEntry(uintptr_t compiledEntry)520     static bool VerifyCompiledEntry(uintptr_t compiledEntry)
521     {
522         auto codeheader = compiledEntry - GetCodeOffset(RUNTIME_ARCH);
523         return (*reinterpret_cast<const uint32_t *>(codeheader) == CodePrefix::MAGIC);
524     }
525 
526     void Dump(std::ostream &stream) const;
527 
528     void Dump(std::ostream &stream, const StackMap &stackMap, Arch arch = RUNTIME_ARCH) const;
529 
530     void DumpInlineInfo(std::ostream &stream, const StackMap &stackMap, int depth) const;
531 
CountSpillSlots()532     size_t CountSpillSlots()
533     {
534         size_t frameSlots = GetFrameSize() / PointerSize(RUNTIME_ARCH);
535         static_assert(CFrameSlots::Start() >= 0);
536         size_t spillsCount = frameSlots - (static_cast<size_t>(CFrameSlots::Start()) + GetRegsCount(RUNTIME_ARCH) + 1U);
537         // Reverse 'CFrameLayout::AlignSpillCount' counting
538         if (RUNTIME_ARCH == Arch::AARCH32) {
539             spillsCount = spillsCount / 2U - 1;
540         }
541         if (spillsCount % 2U != 0) {
542             spillsCount--;
543         }
544         return spillsCount;
545     }
546 
547 private:
548     template <typename Callback, bool IS_DYNAMIC>
549     void EnumerateRoots(const StackMap &stackMap, Callback callback);
550 
551     BitTable<StackMap> stackMaps_;
552     BitTable<InlineInfo> inlineInfos_;
553     BitTable<RegisterMask> rootsRegMasks_;
554     BitTable<StackMask> rootsStackMasks_;
555     BitTable<MethodId> methodIds_;
556     BitTable<VRegisterInfo> vregsCatalogue_;
557     BitTable<VRegisterCatalogueIndex> vregsMap_;
558     BitTable<VRegisterMask> vregMasks_;
559     BitTable<ImplicitNullChecks> implicitNullchecks_;
560     BitTable<ConstantTable> constantTable_;
561 
562     CodeInfoHeader header_ {};
563 
564     Span<const uint8_t> data_;
565 };
566 
567 template <typename Callback, bool IS_DYNAMIC>
EnumerateRoots(const StackMap & stackMap,Callback callback)568 void CodeInfo::EnumerateRoots(const StackMap &stackMap, Callback callback)
569 {
570     auto rootType = IS_DYNAMIC ? VRegInfo::Type::ANY : VRegInfo::Type::OBJECT;
571 
572     if (stackMap.HasRootsRegMaskIndex()) {
573         auto regMask = rootsRegMasks_.GetRow(stackMap.GetRootsRegMaskIndex()).GetMask();
574         ArenaBitVectorSpan vec(&regMask, BITS_PER_UINT32);
575         for (auto regIdx : vec.GetSetBitsIndices()) {
576             if (!callback(VRegInfo(regIdx, VRegInfo::Location::REGISTER, rootType, VRegInfo::VRegType::VREG))) {
577                 return;
578             }
579         }
580     }
581     if (!stackMap.HasRootsStackMaskIndex()) {
582         return;
583     }
584     // Simplify after renumbering stack slots
585     auto stackSlotsCount = CountSpillSlots();
586     auto regMask = rootsStackMasks_.GetBitMemoryRegion(stackMap.GetRootsStackMaskIndex());
587     for (auto regIdx : regMask) {
588         if (regIdx >= stackSlotsCount) {
589             // Parameter-slots' indexes are added to the root-mask with `stack_slots_count` offset to distinct them
590             // from spill-slots
591             auto paramSlotIdx = regIdx - stackSlotsCount;
592             regIdx = static_cast<size_t>(CFrameLayout::StackArgSlot::Start()) - paramSlotIdx -
593                      static_cast<size_t>(CFrameSlots::Start());
594         } else {
595             if constexpr (!ArchTraits<RUNTIME_ARCH>::IS_64_BITS) {  // NOLINT
596                 regIdx = (regIdx << 1U) + 1;
597             }
598             // Stack roots are began from spill/fill stack origin, so we need to adjust it according to registers
599             // buffer
600             regIdx += GetRegsCount(RUNTIME_ARCH);
601         }
602         VRegInfo vreg(regIdx, VRegInfo::Location::SLOT, rootType, VRegInfo::VRegType::VREG);
603         if (!callback(vreg)) {
604             return;
605         }
606     }
607 }
608 
609 }  // namespace ark::compiler
610 
611 #endif  // PANDA_CODE_INFO_H
612