• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "code_info_builder.h"
17 #include "utils/bit_memory_region-inl.h"
18 
19 namespace panda::compiler {
20 
BeginMethod(uint32_t frameSize,uint32_t vregsCount)21 void CodeInfoBuilder::BeginMethod(uint32_t frameSize, uint32_t vregsCount)
22 {
23 #ifndef NDEBUG
24     ASSERT(!wasMethodBegin_);
25     ASSERT(!wasStackMapBegin_);
26     ASSERT(!wasInlineInfoBegin_);
27     wasMethodBegin_ = true;
28 #endif
29 
30     SetFrameSize(frameSize);
31     vregsCount_ = vregsCount;
32     constantTable_.Add({0});
33 }
34 
EndMethod()35 void CodeInfoBuilder::EndMethod()
36 {
37 #ifndef NDEBUG
38     ASSERT(wasMethodBegin_);
39     ASSERT(!wasStackMapBegin_);
40     ASSERT(!wasInlineInfoBegin_);
41     wasMethodBegin_ = false;
42 #endif
43 }
44 
BeginStackMap(uint32_t bpc,uint32_t npc,ArenaBitVector * stackRoots,uint32_t regsRoots,bool requireVregMap,bool isOsr)45 void CodeInfoBuilder::BeginStackMap(uint32_t bpc, uint32_t npc, ArenaBitVector *stackRoots, uint32_t regsRoots,
46                                     bool requireVregMap, bool isOsr)
47 {
48 #ifndef NDEBUG
49     ASSERT(wasMethodBegin_);
50     ASSERT(!wasStackMapBegin_);
51     ASSERT(!wasInlineInfoBegin_);
52     wasStackMapBegin_ = true;
53 #endif
54     inlineInfoStack_.clear();
55     currentVregs_.clear();
56 
57     ASSERT(stackMaps_.GetSize() == 0 || npc >= stackMaps_.GetLast()[StackMap::COLUMN_NATIVE_PC]);
58 
59     currentVregsCount_ = requireVregMap ? vregsCount_ : 0;
60 
61     currentStackMap_ = BitTableBuilder<StackMap>::Entry();
62     currentStackMap_[StackMap::COLUMN_PROPERTIES] = StackMap::CreateProperties(isOsr, requireVregMap);
63     currentStackMap_[StackMap::COLUMN_BYTECODE_PC] = bpc;
64     currentStackMap_[StackMap::COLUMN_NATIVE_PC] = StackMap::PackAddress(npc, arch_);
65     if (regsRoots != 0) {
66         currentStackMap_[StackMap::COLUMN_ROOTS_REG_MASK_INDEX] = rootsRegMasks_.Add({regsRoots});
67     }
68     if (stackRoots != nullptr && !stackRoots->empty()) {
69         currentStackMap_[StackMap::COLUMN_ROOTS_STACK_MASK_INDEX] = rootsStackMasks_.Add(stackRoots->GetFixed());
70     }
71     // Ensure that stackmaps are inserted in sorted order
72     if (stackMaps_.GetRowsCount() != 0) {
73         ASSERT(currentStackMap_[StackMap::COLUMN_NATIVE_PC] >= stackMaps_.GetLast()[StackMap::COLUMN_NATIVE_PC]);
74     }
75 }
76 
EndStackMap()77 void CodeInfoBuilder::EndStackMap()
78 {
79 #ifndef NDEBUG
80     ASSERT(wasMethodBegin_);
81     ASSERT(wasStackMapBegin_);
82     ASSERT(!wasInlineInfoBegin_);
83     wasStackMapBegin_ = false;
84 #endif
85     if (!inlineInfoStack_.empty()) {
86         inlineInfoStack_.back()[InlineInfo::COLUMN_IS_LAST] = static_cast<uint32_t>(true);
87         currentStackMap_[StackMap::COLUMN_INLINE_INFO_INDEX] = inlineInfos_.AddArray(Span(inlineInfoStack_));
88     }
89 
90     EmitVRegs();
91 
92     stackMaps_.Add(currentStackMap_);
93 }
94 
DumpCurrentStackMap(std::ostream & stream) const95 void CodeInfoBuilder::DumpCurrentStackMap(std::ostream &stream) const
96 {
97     stream << "Stackmap #" << (stackMaps_.GetRowsCount() - 1) << ": npc=0x" << std::hex
98            << StackMap::UnpackAddress(currentStackMap_[StackMap::COLUMN_NATIVE_PC], arch_) << ", bpc=0x" << std::hex
99            << currentStackMap_[StackMap::COLUMN_BYTECODE_PC];
100     if (currentStackMap_[StackMap::COLUMN_INLINE_INFO_INDEX] != StackMap::NO_VALUE) {
101         stream << ", inline_depth=" << inlineInfoStack_.size();
102     }
103     if (currentStackMap_[StackMap::COLUMN_ROOTS_REG_MASK_INDEX] != StackMap::NO_VALUE ||
104         currentStackMap_[StackMap::COLUMN_ROOTS_STACK_MASK_INDEX] != StackMap::NO_VALUE) {
105         stream << ", roots=[";
106         const char *sep = "";
107         if (currentStackMap_[StackMap::COLUMN_ROOTS_REG_MASK_INDEX] != StackMap::NO_VALUE) {
108             auto &entry = rootsRegMasks_.GetEntry(currentStackMap_[StackMap::COLUMN_ROOTS_REG_MASK_INDEX]);
109             stream << "r:0x" << std::hex << entry[RegisterMask::COLUMN_MASK];
110             sep = ",";
111         }
112         if (currentStackMap_[StackMap::COLUMN_ROOTS_STACK_MASK_INDEX] != StackMap::NO_VALUE) {
113             auto region = rootsStackMasks_.GetEntry(currentStackMap_[StackMap::COLUMN_ROOTS_STACK_MASK_INDEX]);
114             stream << sep << "s:" << region;
115         }
116         stream << "]";
117     }
118     if (currentStackMap_[StackMap::COLUMN_VREG_MASK_INDEX] != StackMap::NO_VALUE) {
119         stream << ", vregs=" << vregMasks_.GetEntry(currentStackMap_[StackMap::COLUMN_VREG_MASK_INDEX]);
120     }
121 }
122 
BeginInlineInfo(void * method,uint32_t methodId,uint32_t bpc,uint32_t vregsCount)123 void CodeInfoBuilder::BeginInlineInfo(void *method, uint32_t methodId, uint32_t bpc, uint32_t vregsCount)
124 {
125 #ifndef NDEBUG
126     ASSERT(wasMethodBegin_);
127     ASSERT(wasStackMapBegin_);
128     wasInlineInfoBegin_ = true;
129 #endif
130     BitTableBuilder<InlineInfo>::Entry inlineInfo;
131     currentVregsCount_ += vregsCount;
132 
133     inlineInfo[InlineInfo::COLUMN_IS_LAST] = static_cast<uint32_t>(false);
134     inlineInfo[InlineInfo::COLUMN_BYTECODE_PC] = bpc;
135     inlineInfo[InlineInfo::COLUMN_VREGS_COUNT] = currentVregsCount_;
136     if (method != nullptr) {
137         inlineInfo[InlineInfo::COLUMN_METHOD_HI] = High32Bits(method);
138         inlineInfo[InlineInfo::COLUMN_METHOD_LOW] = Low32Bits(method);
139     } else {
140         ASSERT(methodId != 0);
141         inlineInfo[InlineInfo::COLUMN_METHOD_ID_INDEX] = methodIds_.Add({methodId});
142     }
143 
144     inlineInfoStack_.push_back(inlineInfo);
145 }
146 
EndInlineInfo()147 void CodeInfoBuilder::EndInlineInfo()
148 {
149 #ifndef NDEBUG
150     ASSERT(wasMethodBegin_);
151     ASSERT(wasStackMapBegin_);
152     ASSERT(wasInlineInfoBegin_);
153     wasInlineInfoBegin_ = false;
154 #endif
155     ASSERT(currentVregs_.size() == currentVregsCount_);
156 }
157 
AddConstant(uint64_t value,VRegInfo::Type type,VRegInfo::VRegType vregType)158 void CodeInfoBuilder::AddConstant(uint64_t value, VRegInfo::Type type, VRegInfo::VRegType vregType)
159 {
160     VRegInfo vreg(0, VRegInfo::Location::CONSTANT, type, vregType);
161     uint32_t low = value & ((1LLU << BITS_PER_UINT32) - 1);
162     uint32_t hi = (value >> BITS_PER_UINT32) & ((1LLU << BITS_PER_UINT32) - 1);
163     vreg.SetConstantIndices(constantTable_.Add({low}), constantTable_.Add({hi}));
164     currentVregs_.push_back(vreg);
165 }
166 
EmitVRegs()167 void CodeInfoBuilder::EmitVRegs()
168 {
169     ASSERT(currentVregs_.size() == currentVregsCount_);
170     if (currentVregs_.empty()) {
171         return;
172     }
173 
174     if (currentVregs_.size() > lastVregs_.size()) {
175         lastVregs_.resize(currentVregs_.size(), VRegInfo::Invalid());
176         vregsLastChange_.resize(currentVregs_.size());
177     }
178 
179     ArenaVector<BitTableBuilder<VRegisterCatalogueIndex>::Entry> &vregsMap = vregsMapStorage_;
180     ArenaBitVector &vregsMask = vregsMaskStorage_;
181     vregsMap.clear();
182     vregsMask.clear();
183 
184     for (size_t i = 0; i < currentVregs_.size(); i++) {
185         auto &vreg = currentVregs_[i];
186         uint32_t distatnce = stackMaps_.GetRowsCount() - vregsLastChange_[i];
187         if (lastVregs_[i] != vreg || distatnce > MAX_VREG_LIVE_DISTANCE) {
188             BitTableBuilder<VRegisterInfo>::Entry vregEntry;
189             vregEntry[VRegisterInfo::COLUMN_INFO] = vreg.GetInfo();
190             vregEntry[VRegisterInfo::COLUMN_VALUE] = vreg.GetValue();
191             uint32_t index = vreg.IsLive() ? vregsCatalogue_.Add(vregEntry) : decltype(vregsCatalogue_)::NO_VALUE;
192             vregsMap.push_back({index});
193             vregsMask.SetBit(i);
194             lastVregs_[i] = vreg;
195             vregsLastChange_[i] = stackMaps_.GetRowsCount();
196         }
197     }
198 
199     BitMemoryRegion rgn(vregsMask.data(), vregsMask.size());
200     ASSERT(vregsMask.PopCount() == vregsMap.size());
201     if (vregsMask.PopCount() != 0) {
202         currentStackMap_[StackMap::COLUMN_VREG_MASK_INDEX] = vregMasks_.Add(vregsMask.GetFixed());
203     }
204     if (!currentVregs_.empty()) {
205         currentStackMap_[StackMap::COLUMN_VREG_MAP_INDEX] = vregsMap_.AddArray(Span(vregsMap));
206     }
207 }
208 
Encode(ArenaVector<uint8_t> * stream,size_t offset)209 void CodeInfoBuilder::Encode(ArenaVector<uint8_t> *stream, size_t offset)
210 {
211     BitMemoryStreamOut out(stream, offset);
212 
213     uint32_t tablesMask = 0;
214     EnumerateTables([&tablesMask](size_t index, const auto &table) {
215         if (table->GetRowsCount() != 0) {
216             tablesMask |= (1U << index);
217         }
218     });
219 
220     header_.SetTableMask(tablesMask);
221     header_.SetVRegsCount(vregsCount_);
222     header_.Encode(out);
223 
224     EnumerateTables([&out]([[maybe_unused]] size_t index, const auto &table) {
225         if (table->GetRowsCount() != 0) {
226             table->Encode(out);
227         }
228     });
229     stream->resize(RoundUp(stream->size(), CodeInfo::SIZE_ALIGNMENT));
230 }
231 
232 }  // namespace panda::compiler
233