1 /*
2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef PANDA_CODE_INFO_H
17 #define PANDA_CODE_INFO_H
18
19 #include "code_info_tables.h"
20 #include "utils/bit_field.h"
21 #include "utils/bit_table.h"
22 #include "utils/cframe_layout.h"
23 #include "utils/small_vector.h"
24 #include "utils/span.h"
25
26 namespace ark::compiler {
27
28 /*
29 *
30 * Compiled code layout:
31 * +-------------+
32 * | CodePrefix |
33 * +-------------+ <- Method::CompiledCodeEntrypoint
34 * | Code |
35 * +-------------+-----------------+
36 * | CodeInfo | CodeInfoHeader |
37 * | |-----------------+----------------------+
38 * | | | StackMap |
39 * | | | InlineInfo |
40 * | | | Roots Reg Mask |
41 * | | | Roots Stack Mask |
42 * | | Bit Tables | Method indexes |
43 * | | | VRegs mask |
44 * | | | VRegs map |
45 * | | | VRegs catalogue |
46 * | | | Implicit Nullchecks |
47 * | | | Constants |
48 * |-------------+-----------------+----------------------+
49 */
50
51 struct CodePrefix {
52 static constexpr uint32_t MAGIC = 0xaccadeca;
53 uint32_t magic {MAGIC};
54 uint32_t codeSize {};
55 uint32_t codeInfoOffset {};
56 uint32_t codeInfoSize {};
57
58 static constexpr size_t STRUCT_SIZE = 16;
59 };
60
61 static_assert(sizeof(CodePrefix) == CodePrefix::STRUCT_SIZE);
62
63 class CodeInfoHeader {
64 public:
65 enum Elements { PROPERTIES, CALLEE_REG_MASK, CALLEE_FP_REG_MASK, TABLE_MASK, VREGS_COUNT, SIZE };
66
SetFrameSize(uint32_t size)67 void SetFrameSize(uint32_t size)
68 {
69 ASSERT(MinimumBitsToStore(size) <= FRAME_SIZE_FIELD_WIDTH);
70 FieldFrameSize::Set(size, &data_[PROPERTIES]);
71 }
GetFrameSize()72 uint32_t GetFrameSize() const
73 {
74 return FieldFrameSize::Get(data_[PROPERTIES]);
75 }
76
SetCalleeRegMask(uint32_t value)77 void SetCalleeRegMask(uint32_t value)
78 {
79 data_[CALLEE_REG_MASK] = value;
80 }
GetCalleeRegMask()81 uint32_t GetCalleeRegMask() const
82 {
83 return data_[CALLEE_REG_MASK];
84 }
85
SetCalleeFpRegMask(uint32_t value)86 void SetCalleeFpRegMask(uint32_t value)
87 {
88 data_[CALLEE_FP_REG_MASK] = value;
89 }
GetCalleeFpRegMask()90 uint32_t GetCalleeFpRegMask() const
91 {
92 return data_[CALLEE_FP_REG_MASK];
93 }
94
SetTableMask(uint32_t value)95 void SetTableMask(uint32_t value)
96 {
97 data_[TABLE_MASK] = value;
98 }
GetTableMask()99 uint32_t GetTableMask() const
100 {
101 return data_[TABLE_MASK];
102 }
103
SetVRegsCount(uint32_t value)104 void SetVRegsCount(uint32_t value)
105 {
106 data_[VREGS_COUNT] = value;
107 }
GetVRegsCount()108 uint32_t GetVRegsCount() const
109 {
110 return data_[VREGS_COUNT];
111 }
112
SetHasFloatRegs(bool value)113 void SetHasFloatRegs(bool value)
114 {
115 HasFloatRegsFlag::Set(value, &data_[PROPERTIES]);
116 }
HasFloatRegs()117 bool HasFloatRegs() const
118 {
119 return HasFloatRegsFlag::Get(data_[PROPERTIES]);
120 }
121
122 template <typename Container>
Encode(BitMemoryStreamOut<Container> & out)123 void Encode(BitMemoryStreamOut<Container> &out)
124 {
125 VarintPack::Write(out, data_);
126 }
Decode(BitMemoryStreamIn * in)127 void Decode(BitMemoryStreamIn *in)
128 {
129 data_ = VarintPack::Read<SIZE>(in);
130 }
131
132 private:
133 std::array<uint32_t, SIZE> data_;
134
135 static constexpr size_t FRAME_SIZE_FIELD_WIDTH = 16;
136 static constexpr size_t LANG_EXT_OFFSET_FIELD_WIDTH = 13;
137 using FieldFrameSize = BitField<uint32_t, 0, FRAME_SIZE_FIELD_WIDTH>;
138 using HasFloatRegsFlag = FieldFrameSize::NextFlag;
139 };
140
141 class CodeInfo final {
142 public:
143 static constexpr size_t TABLES_COUNT = 10;
144 static constexpr size_t VREG_LIST_STATIC_SIZE = 16;
145 static constexpr size_t ALIGNMENT = sizeof(uint64_t);
146 static constexpr size_t SIZE_ALIGNMENT = sizeof(uint64_t);
147
148 template <typename Allocator>
149 using VRegList = SmallVector<VRegInfo, VREG_LIST_STATIC_SIZE, Allocator, true>;
150 using VRegNumberPair = std::pair<uint32_t *, uint32_t *>;
151 using RegionType = BitMemoryRegion<const uint8_t>;
152
153 NO_COPY_SEMANTIC(CodeInfo);
154 NO_MOVE_SEMANTIC(CodeInfo);
155
156 CodeInfo() = default;
157
CodeInfo(const void * data,size_t size)158 CodeInfo(const void *data, size_t size)
159 : CodeInfo(Span<const uint8_t>(reinterpret_cast<const uint8_t *>(data), size))
160 {
161 }
162
CodeInfo(Span<const uint8_t> code)163 explicit CodeInfo(Span<const uint8_t> code) : CodeInfo(code.data())
164 {
165 ASSERT(GetDataSize() <= code.size());
166 }
167
CodeInfo(Span<uint8_t> code)168 explicit CodeInfo(Span<uint8_t> code) : CodeInfo(code.data())
169 {
170 ASSERT(GetDataSize() <= code.size());
171 }
172
CodeInfo(const void * codeEntry)173 explicit CodeInfo(const void *codeEntry)
174 {
175 ASSERT(codeEntry != nullptr);
176 auto prefix = reinterpret_cast<const CodePrefix *>(codeEntry);
177 ASSERT(prefix->magic == CodePrefix::MAGIC);
178 data_ = Span(reinterpret_cast<const uint8_t *>(codeEntry), prefix->codeInfoOffset + prefix->codeInfoSize);
179 auto codeInfo = Span<const uint8_t>(&data_[prefix->codeInfoOffset], prefix->codeInfoSize);
180 Decode(codeInfo);
181 }
182
183 virtual ~CodeInfo() = default;
184
GetCodeOriginFromEntryPoint(const void * data)185 static const void *GetCodeOriginFromEntryPoint(const void *data)
186 {
187 return reinterpret_cast<const void *>(reinterpret_cast<uintptr_t>(data) -
188 CodeInfo::GetCodeOffset(RUNTIME_ARCH));
189 }
190
CreateFromCodeEntryPoint(const void * data)191 static CodeInfo CreateFromCodeEntryPoint(const void *data)
192 {
193 ASSERT(data != nullptr);
194 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
195 return CodeInfo(reinterpret_cast<const uint8_t *>(data) - CodeInfo::GetCodeOffset(RUNTIME_ARCH));
196 }
197
Decode(Span<const uint8_t> codeInfo)198 void Decode(Span<const uint8_t> codeInfo)
199 {
200 BitMemoryStreamIn stream(const_cast<uint8_t *>(codeInfo.data()), codeInfo.size() * BITS_PER_BYTE);
201 header_.Decode(&stream);
202 EnumerateTables([this, &stream](size_t index, auto member) {
203 if (HasTable(index)) {
204 (this->*member).Decode(&stream);
205 }
206 });
207 }
208
GetHeader()209 const CodeInfoHeader &GetHeader() const
210 {
211 return header_;
212 }
GetHeader()213 CodeInfoHeader &GetHeader()
214 {
215 return header_;
216 }
217
GetPrefix()218 const CodePrefix *GetPrefix() const
219 {
220 return reinterpret_cast<const CodePrefix *>(data_.data());
221 }
222
GetFrameSize()223 uint32_t GetFrameSize() const
224 {
225 return GetHeader().GetFrameSize();
226 }
227
GetData()228 const uint8_t *GetData()
229 {
230 return data_.data();
231 }
232
GetDataSize()233 size_t GetDataSize()
234 {
235 return data_.size();
236 }
237
GetCode()238 const uint8_t *GetCode() const
239 {
240 return &data_[CodeInfo::GetCodeOffset(RUNTIME_ARCH)];
241 }
242
GetCodeSize()243 size_t GetCodeSize() const
244 {
245 return GetPrefix()->codeSize;
246 }
247
GetCodeSpan()248 Span<const uint8_t> GetCodeSpan() const
249 {
250 return {&data_[CodeInfo::GetCodeOffset(RUNTIME_ARCH)], GetCodeSize()};
251 }
252
GetInfoSize()253 size_t GetInfoSize() const
254 {
255 return GetPrefix()->codeInfoSize;
256 }
257
HasTable(size_t index)258 bool HasTable(size_t index) const
259 {
260 return (GetHeader().GetTableMask() & (1U << index)) != 0;
261 }
262
GetMethod(const StackMap & stackmap,int inlineDepth)263 std::variant<void *, uint32_t> GetMethod(const StackMap &stackmap, int inlineDepth)
264 {
265 ASSERT(inlineDepth >= 0);
266 auto inlineInfo = inlineInfos_.GetRow(stackmap.GetInlineInfoIndex() + inlineDepth);
267 if (inlineInfo.HasMethodLow()) {
268 if constexpr (ArchTraits<RUNTIME_ARCH>::IS_64_BITS) {
269 uintptr_t val =
270 inlineInfo.GetMethodLow() | (static_cast<uint64_t>(inlineInfo.GetMethodHi()) << BITS_PER_UINT32);
271 return reinterpret_cast<void *>(val);
272 } else {
273 return reinterpret_cast<void *>(inlineInfo.GetMethodLow());
274 }
275 }
276 return methodIds_.GetRow(inlineInfo.GetMethodIdIndex()).GetId();
277 }
278
GetConstant(const VRegInfo & vreg)279 uint64_t GetConstant(const VRegInfo &vreg) const
280 {
281 ASSERT(vreg.GetLocation() == VRegInfo::Location::CONSTANT);
282 uint64_t low = constantTable_.GetRow(vreg.GetConstantLowIndex()).GetValue();
283 uint64_t hi = constantTable_.GetRow(vreg.GetConstantHiIndex()).GetValue();
284 return low | (hi << BITS_PER_UINT32);
285 }
286
GetCodeOffset(Arch arch)287 static size_t GetCodeOffset(Arch arch)
288 {
289 return RoundUp(CodePrefix::STRUCT_SIZE, GetCodeAlignment(arch));
290 }
291
GetSavedCalleeRegsMask(bool isFp)292 uint32_t GetSavedCalleeRegsMask(bool isFp) const
293 {
294 return isFp ? GetHeader().GetCalleeFpRegMask() : GetHeader().GetCalleeRegMask();
295 }
296
GetVRegMask(const StackMap & stackMap)297 auto GetVRegMask(const StackMap &stackMap)
298 {
299 return stackMap.HasVRegMaskIndex() ? vregMasks_.GetBitMemoryRegion(stackMap.GetVRegMaskIndex())
300 : BitMemoryRegion<const uint8_t>();
301 }
302
GetVRegMask(const StackMap & stackMap)303 auto GetVRegMask(const StackMap &stackMap) const
304 {
305 return const_cast<CodeInfo *>(this)->GetVRegMask(stackMap);
306 }
307
GetVRegCount(const StackMap & stackMap)308 size_t GetVRegCount(const StackMap &stackMap) const
309 {
310 return GetVRegMask(stackMap).Popcount();
311 }
312
GetRootsRegMask(const StackMap & stackMap)313 uint32_t GetRootsRegMask(const StackMap &stackMap) const
314 {
315 return stackMap.HasRootsRegMaskIndex() ? rootsRegMasks_.GetRow(stackMap.GetRootsRegMaskIndex()).GetMask() : 0;
316 }
317
GetRootsStackMask(const StackMap & stackMap)318 auto GetRootsStackMask(const StackMap &stackMap) const
319 {
320 return stackMap.HasRootsStackMaskIndex()
321 ? rootsStackMasks_.GetBitMemoryRegion(stackMap.GetRootsStackMaskIndex())
322 : BitMemoryRegion<const uint8_t>();
323 }
324
GetInlineInfos(const StackMap & stackMap)325 auto GetInlineInfos(const StackMap &stackMap)
326 {
327 if (!stackMap.HasInlineInfoIndex()) {
328 return inlineInfos_.GetRangeReversed(0, 0);
329 }
330 auto index = stackMap.GetInlineInfoIndex();
331 uint32_t size = index;
332 for (; inlineInfos_.GetRow(size).GetIsLast() == 0; size++) {
333 }
334
335 return inlineInfos_.GetRangeReversed(index, helpers::ToSigned(size) + 1);
336 }
337
GetInlineInfo(const StackMap & stackMap,int inlineDepth)338 auto GetInlineInfo(const StackMap &stackMap, int inlineDepth) const
339 {
340 ASSERT(stackMap.HasInlineInfoIndex());
341 CHECK_GE(GetInlineDepth(stackMap), inlineDepth);
342 return inlineInfos_.GetRow(stackMap.GetInlineInfoIndex() + inlineDepth);
343 }
344
GetInlineDepth(const StackMap & stackMap)345 int GetInlineDepth(const StackMap &stackMap) const
346 {
347 if (!stackMap.HasInlineInfoIndex()) {
348 return -1;
349 }
350 int index = stackMap.GetInlineInfoIndex();
351 int depth = index;
352 for (; inlineInfos_.GetRow(depth).GetIsLast() == 0; depth++) {
353 }
354 return depth - index;
355 }
356
357 StackMap FindStackMapForNativePc(uint32_t pc, Arch arch = RUNTIME_ARCH) const
358 {
359 auto it = std::lower_bound(stackMaps_.begin(), stackMaps_.end(), pc, [arch](const auto &a, uintptr_t counter) {
360 return a.GetNativePcUnpacked(arch) < counter;
361 });
362 return (it == stackMaps_.end() || it->GetNativePcUnpacked(arch) != pc) ? stackMaps_.GetInvalidRow() : *it;
363 }
364
FindOsrStackMap(uint32_t pc)365 StackMap FindOsrStackMap(uint32_t pc) const
366 {
367 auto it = std::find_if(stackMaps_.begin(), stackMaps_.end(),
368 [pc](const auto &a) { return a.GetBytecodePc() == pc && a.IsOsr(); });
369 return it == stackMaps_.end() ? stackMaps_.GetInvalidRow() : *it;
370 }
371
GetStackMap(size_t index)372 auto GetStackMap(size_t index) const
373 {
374 return StackMap(&stackMaps_, index);
375 }
376
GetStackMaps()377 auto &GetStackMaps()
378 {
379 return stackMaps_;
380 }
381
GetVRegCatalogue()382 auto &GetVRegCatalogue()
383 {
384 return vregsCatalogue_;
385 }
386
GetVRegMapTable()387 auto &GetVRegMapTable()
388 {
389 return vregsMap_;
390 }
391
GetVRegMaskTable()392 auto &GetVRegMaskTable()
393 {
394 return vregMasks_;
395 }
396
GetInlineInfosTable()397 auto &GetInlineInfosTable()
398 {
399 return inlineInfos_;
400 }
401
GetConstantTable()402 auto &GetConstantTable()
403 {
404 return constantTable_;
405 }
406
GetImplicitNullChecksTable()407 const auto &GetImplicitNullChecksTable() const
408 {
409 return implicitNullchecks_;
410 }
411
HasFloatRegs()412 bool HasFloatRegs() const
413 {
414 return GetHeader().HasFloatRegs();
415 }
416
417 template <typename Func>
EnumerateTables(Func func)418 static void EnumerateTables(Func func)
419 {
420 size_t index = 0;
421 func(index++, &CodeInfo::stackMaps_);
422 func(index++, &CodeInfo::inlineInfos_);
423 func(index++, &CodeInfo::rootsRegMasks_);
424 func(index++, &CodeInfo::rootsStackMasks_);
425 func(index++, &CodeInfo::methodIds_);
426 func(index++, &CodeInfo::vregMasks_);
427 func(index++, &CodeInfo::vregsMap_);
428 func(index++, &CodeInfo::vregsCatalogue_);
429 func(index++, &CodeInfo::implicitNullchecks_);
430 func(index++, &CodeInfo::constantTable_);
431 ASSERT(index == TABLES_COUNT);
432 }
433
434 template <typename Callback>
EnumerateStaticRoots(const StackMap & stackMap,Callback callback)435 void EnumerateStaticRoots(const StackMap &stackMap, Callback callback)
436 {
437 return EnumerateRoots<Callback, false>(stackMap, callback);
438 }
439
440 template <typename Callback>
EnumerateDynamicRoots(const StackMap & stackMap,Callback callback)441 void EnumerateDynamicRoots(const StackMap &stackMap, Callback callback)
442 {
443 return EnumerateRoots<Callback, true>(stackMap, callback);
444 }
445
446 template <typename Allocator>
447 VRegList<Allocator> GetVRegList(StackMap stackMap, uint32_t firstVreg, uint32_t vregsCount,
448 Allocator *allocator = nullptr) const
449 {
450 if (vregsCount == 0 || !stackMap.HasRegMap()) {
451 return CodeInfo::VRegList<Allocator>(allocator);
452 }
453 VRegList<Allocator> vregList(allocator);
454 vregList.resize(vregsCount, VRegInfo());
455 ASSERT(!vregList[0].IsLive());
456 std::vector<bool> regSet(vregsCount);
457
458 uint32_t remainingRegisters = vregsCount;
459 for (int sindex = static_cast<int64_t>(stackMap.GetRow()); sindex >= 0 && remainingRegisters > 0; sindex--) {
460 stackMap = GetStackMap(sindex);
461 if (!stackMap.HasVRegMaskIndex()) {
462 continue;
463 }
464 // Skip stackmaps that are not in the same inline depth
465 auto vregMask = GetVRegMask(stackMap);
466 if (vregMask.Size() <= firstVreg) {
467 continue;
468 }
469 ASSERT(stackMap.HasVRegMapIndex());
470 uint32_t mapIndex = stackMap.GetVRegMapIndex();
471
472 mapIndex += vregMask.Popcount(0, firstVreg);
473 vregMask = vregMask.Subregion(firstVreg, vregMask.Size() - firstVreg);
474
475 FillVRegList<Allocator>(vregMask, vregList, regSet, {&vregsCount, &remainingRegisters}, &mapIndex);
476 }
477 return vregList;
478 }
479
480 template <typename Allocator>
FillVRegList(RegionType & vregMask,VRegList<Allocator> & vregList,std::vector<bool> & regSet,VRegNumberPair vregPair,uint32_t * mapIndex)481 void FillVRegList(RegionType &vregMask, VRegList<Allocator> &vregList, std::vector<bool> ®Set,
482 VRegNumberPair vregPair, uint32_t *mapIndex) const
483 {
484 auto [vregsCount, remainingRegisters] = vregPair;
485 uint32_t end = std::min<uint32_t>(vregMask.Size(), *vregsCount);
486 for (size_t i = 0; i < end; i += BITS_PER_UINT32) {
487 uint32_t mask = vregMask.Read(i, std::min<uint32_t>(end - i, BITS_PER_UINT32));
488 while (mask != 0) {
489 auto regIdx = static_cast<size_t>(Ctz(mask));
490 if (regSet[i + regIdx]) {
491 (*mapIndex)++;
492 mask ^= 1U << regIdx;
493 continue;
494 }
495 auto vregIndex = vregsMap_.GetRow(*mapIndex);
496 if (vregIndex.GetIndex() != StackMap::NO_VALUE) {
497 ASSERT(!vregList[i + regIdx].IsLive());
498 vregList[i + regIdx] = vregsCatalogue_.GetRow(vregIndex.GetIndex()).GetVRegInfo();
499 vregList[i + regIdx].SetIndex(i + regIdx);
500 }
501 (*remainingRegisters)--;
502 regSet[i + regIdx] = true;
503 (*mapIndex)++;
504 mask ^= 1U << regIdx;
505 }
506 }
507 }
508
509 template <typename Allocator>
510 VRegList<Allocator> GetVRegList(StackMap stackMap, int inlineDepth, Allocator *allocator = nullptr) const
511 {
512 if (inlineDepth < 0) {
513 return GetVRegList<Allocator>(stackMap, 0, GetHeader().GetVRegsCount(), allocator);
514 }
515 ASSERT(stackMap.HasInlineInfoIndex());
516 auto inlineInfo = GetInlineInfo(stackMap, inlineDepth);
517 if (inlineInfo.GetVRegsCount() == 0) {
518 return VRegList<Allocator>(allocator);
519 }
520 auto depth = inlineInfo.GetRow() - stackMap.GetInlineInfoIndex();
521 uint32_t first =
522 depth == 0 ? GetHeader().GetVRegsCount() : inlineInfos_.GetRow(inlineInfo.GetRow() - 1).GetVRegsCount();
523 ASSERT(inlineInfo.GetVRegsCount() >= first);
524 return GetVRegList<Allocator>(stackMap, first, inlineInfo.GetVRegsCount() - first, allocator);
525 }
526
527 template <typename Allocator>
528 VRegList<Allocator> GetVRegList(StackMap stackMap, Allocator *allocator = nullptr) const
529 {
530 return GetVRegList<Allocator>(stackMap, -1, allocator);
531 }
532
VerifyCompiledEntry(uintptr_t compiledEntry)533 static bool VerifyCompiledEntry(uintptr_t compiledEntry)
534 {
535 auto codeheader = compiledEntry - GetCodeOffset(RUNTIME_ARCH);
536 return (*reinterpret_cast<const uint32_t *>(codeheader) == CodePrefix::MAGIC);
537 }
538
539 void Dump(std::ostream &stream) const;
540
541 void Dump(std::ostream &stream, const StackMap &stackMap, Arch arch = RUNTIME_ARCH) const;
542
543 void DumpInlineInfo(std::ostream &stream, const StackMap &stackMap, int depth) const;
544
CountSpillSlots()545 size_t CountSpillSlots()
546 {
547 size_t frameSlots = GetFrameSize() / PointerSize(RUNTIME_ARCH);
548 static_assert(CFrameSlots::Start() >= 0);
549 size_t spillsCount = frameSlots - (static_cast<size_t>(CFrameSlots::Start()) + GetRegsCount(RUNTIME_ARCH) + 1U);
550 // Reverse 'CFrameLayout::AlignSpillCount' counting
551 if (RUNTIME_ARCH == Arch::AARCH32) {
552 spillsCount = spillsCount / 2U - 1;
553 }
554 if (spillsCount % 2U != 0) {
555 spillsCount--;
556 }
557 return spillsCount;
558 }
559
560 private:
561 template <typename Callback, bool IS_DYNAMIC>
562 void EnumerateRoots(const StackMap &stackMap, Callback callback);
563
564 BitTable<StackMap> stackMaps_;
565 BitTable<InlineInfo> inlineInfos_;
566 BitTable<RegisterMask> rootsRegMasks_;
567 BitTable<StackMask> rootsStackMasks_;
568 BitTable<MethodId> methodIds_;
569 BitTable<VRegisterInfo> vregsCatalogue_;
570 BitTable<VRegisterCatalogueIndex> vregsMap_;
571 BitTable<VRegisterMask> vregMasks_;
572 BitTable<ImplicitNullChecks> implicitNullchecks_;
573 BitTable<ConstantTable> constantTable_;
574
575 CodeInfoHeader header_ {};
576
577 Span<const uint8_t> data_;
578 };
579
580 template <typename Callback, bool IS_DYNAMIC>
EnumerateRoots(const StackMap & stackMap,Callback callback)581 void CodeInfo::EnumerateRoots(const StackMap &stackMap, Callback callback)
582 {
583 auto rootType = IS_DYNAMIC ? VRegInfo::Type::ANY : VRegInfo::Type::OBJECT;
584
585 if (stackMap.HasRootsRegMaskIndex()) {
586 auto regMask = rootsRegMasks_.GetRow(stackMap.GetRootsRegMaskIndex()).GetMask();
587 ArenaBitVectorSpan vec(®Mask, BITS_PER_UINT32);
588 for (auto regIdx : vec.GetSetBitsIndices()) {
589 if (!callback(VRegInfo(regIdx, VRegInfo::Location::REGISTER, rootType, VRegInfo::VRegType::VREG))) {
590 return;
591 }
592 }
593 }
594 if (!stackMap.HasRootsStackMaskIndex()) {
595 return;
596 }
597 // Simplify after renumbering stack slots
598 auto stackSlotsCount = CountSpillSlots();
599 auto regMask = rootsStackMasks_.GetBitMemoryRegion(stackMap.GetRootsStackMaskIndex());
600 for (auto regIdx : regMask) {
601 if (regIdx >= stackSlotsCount) {
602 // Parameter-slots' indexes are added to the root-mask with `stack_slots_count` offset to distinct them
603 // from spill-slots
604 auto paramSlotIdx = regIdx - stackSlotsCount;
605 regIdx = static_cast<size_t>(CFrameLayout::StackArgSlot::Start()) - paramSlotIdx -
606 static_cast<size_t>(CFrameSlots::Start());
607 } else {
608 if constexpr (!ArchTraits<RUNTIME_ARCH>::IS_64_BITS) { // NOLINT
609 regIdx = (regIdx << 1U) + 1;
610 }
611 // Stack roots are began from spill/fill stack origin, so we need to adjust it according to registers
612 // buffer
613 regIdx += GetRegsCount(RUNTIME_ARCH);
614 }
615 VRegInfo vreg(regIdx, VRegInfo::Location::SLOT, rootType, VRegInfo::VRegType::VREG);
616 if (!callback(vreg)) {
617 return;
618 }
619 }
620 }
621
622 } // namespace ark::compiler
623
624 #endif // PANDA_CODE_INFO_H
625