• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "code_info_builder.h"
17 #include "utils/bit_memory_region-inl.h"
18 #include "optimizer/ir/inst.h"
19 
20 namespace ark::compiler {
21 
BeginMethod(uint32_t frameSize,uint32_t vregsCount)22 void CodeInfoBuilder::BeginMethod(uint32_t frameSize, uint32_t vregsCount)
23 {
24 #ifndef NDEBUG
25     ASSERT(!wasMethodBegin_);
26     ASSERT(!wasStackMapBegin_);
27     ASSERT(!wasInlineInfoBegin_);
28     wasMethodBegin_ = true;
29 #endif
30 
31     SetFrameSize(frameSize);
32     vregsCount_ = vregsCount;
33     constantTable_.Add({0});
34 }
35 
EndMethod()36 void CodeInfoBuilder::EndMethod()
37 {
38 #ifndef NDEBUG
39     ASSERT(wasMethodBegin_);
40     ASSERT(!wasStackMapBegin_);
41     ASSERT(!wasInlineInfoBegin_);
42     wasMethodBegin_ = false;
43 #endif
44 }
45 
BeginStackMap(uint32_t bpc,uint32_t npc,SaveStateInst * ss,bool requireVregMap)46 void CodeInfoBuilder::BeginStackMap(uint32_t bpc, uint32_t npc, SaveStateInst *ss, bool requireVregMap)
47 {
48 #ifndef NDEBUG
49     ASSERT(wasMethodBegin_);
50     ASSERT(!wasStackMapBegin_);
51     ASSERT(!wasInlineInfoBegin_);
52     wasStackMapBegin_ = true;
53 #endif
54     ArenaBitVector *stackRoots = ss->GetRootsStackMask();
55     uint32_t regsRoots = ss->GetRootsRegsMask().to_ulong();
56     bool isOsr = ss->GetOpcode() == Opcode::SaveStateOsr;
57 
58     inlineInfoStack_.clear();
59     currentVregs_.clear();
60 
61     ASSERT(stackMaps_.GetSize() == 0 || npc >= stackMaps_.GetLast()[StackMap::COLUMN_NATIVE_PC]);
62 
63     currentVregsCount_ = requireVregMap ? vregsCount_ : 0;
64 
65     currentStackMap_ = BitTableBuilder<StackMap>::Entry();
66     currentStackMap_[StackMap::COLUMN_PROPERTIES] = StackMap::CreateProperties(isOsr, requireVregMap);
67     currentStackMap_[StackMap::COLUMN_BYTECODE_PC] = bpc;
68     currentStackMap_[StackMap::COLUMN_NATIVE_PC] = StackMap::PackAddress(npc, arch_);
69     if (regsRoots != 0) {
70         currentStackMap_[StackMap::COLUMN_ROOTS_REG_MASK_INDEX] = rootsRegMasks_.Add({regsRoots});
71     }
72     if (stackRoots != nullptr && !stackRoots->empty()) {
73         currentStackMap_[StackMap::COLUMN_ROOTS_STACK_MASK_INDEX] = rootsStackMasks_.Add(stackRoots->GetFixed());
74     }
75     // Ensure that stackmaps are inserted in sorted order
76     if (stackMaps_.GetRowsCount() != 0) {
77         ASSERT(currentStackMap_[StackMap::COLUMN_NATIVE_PC] >= stackMaps_.GetLast()[StackMap::COLUMN_NATIVE_PC]);
78     }
79 }
80 
EndStackMap()81 void CodeInfoBuilder::EndStackMap()
82 {
83 #ifndef NDEBUG
84     ASSERT(wasMethodBegin_);
85     ASSERT(wasStackMapBegin_);
86     ASSERT(!wasInlineInfoBegin_);
87     wasStackMapBegin_ = false;
88 #endif
89     if (!inlineInfoStack_.empty()) {
90         inlineInfoStack_.back()[InlineInfo::COLUMN_IS_LAST] = static_cast<uint32_t>(true);
91         currentStackMap_[StackMap::COLUMN_INLINE_INFO_INDEX] = inlineInfos_.AddArray(Span(inlineInfoStack_));
92     }
93 
94     EmitVRegs();
95 
96     stackMaps_.Add(currentStackMap_);
97 }
98 
DumpCurrentStackMap(std::ostream & stream) const99 void CodeInfoBuilder::DumpCurrentStackMap(std::ostream &stream) const
100 {
101     stream << "Stackmap #" << (stackMaps_.GetRowsCount() - 1) << ": npc=0x" << std::hex
102            << StackMap::UnpackAddress(currentStackMap_[StackMap::COLUMN_NATIVE_PC], arch_) << ", bpc=0x" << std::hex
103            << currentStackMap_[StackMap::COLUMN_BYTECODE_PC];
104     if (currentStackMap_[StackMap::COLUMN_INLINE_INFO_INDEX] != StackMap::NO_VALUE) {
105         stream << ", inline_depth=" << inlineInfoStack_.size();
106     }
107     if (currentStackMap_[StackMap::COLUMN_ROOTS_REG_MASK_INDEX] != StackMap::NO_VALUE ||
108         currentStackMap_[StackMap::COLUMN_ROOTS_STACK_MASK_INDEX] != StackMap::NO_VALUE) {
109         stream << ", roots=[";
110         const char *sep = "";
111         if (currentStackMap_[StackMap::COLUMN_ROOTS_REG_MASK_INDEX] != StackMap::NO_VALUE) {
112             auto &entry = rootsRegMasks_.GetEntry(currentStackMap_[StackMap::COLUMN_ROOTS_REG_MASK_INDEX]);
113             stream << "r:0x" << std::hex << entry[RegisterMask::COLUMN_MASK];
114             sep = ",";
115         }
116         if (currentStackMap_[StackMap::COLUMN_ROOTS_STACK_MASK_INDEX] != StackMap::NO_VALUE) {
117             auto region = rootsStackMasks_.GetEntry(currentStackMap_[StackMap::COLUMN_ROOTS_STACK_MASK_INDEX]);
118             stream << sep << "s:" << region;
119         }
120         stream << "]";
121     }
122     if (currentStackMap_[StackMap::COLUMN_VREG_MASK_INDEX] != StackMap::NO_VALUE) {
123         stream << ", vregs=" << vregMasks_.GetEntry(currentStackMap_[StackMap::COLUMN_VREG_MASK_INDEX]);
124     }
125 }
126 
BeginInlineInfo(void * method,uint32_t methodId,uint32_t bpc,uint32_t vregsCount)127 void CodeInfoBuilder::BeginInlineInfo(void *method, uint32_t methodId, uint32_t bpc, uint32_t vregsCount)
128 {
129 #ifndef NDEBUG
130     ASSERT(wasMethodBegin_);
131     ASSERT(wasStackMapBegin_);
132     wasInlineInfoBegin_ = true;
133 #endif
134     BitTableBuilder<InlineInfo>::Entry inlineInfo;
135     currentVregsCount_ += vregsCount;
136 
137     inlineInfo[InlineInfo::COLUMN_IS_LAST] = static_cast<uint32_t>(false);
138     inlineInfo[InlineInfo::COLUMN_BYTECODE_PC] = bpc;
139     inlineInfo[InlineInfo::COLUMN_VREGS_COUNT] = currentVregsCount_;
140     if (method != nullptr) {
141         inlineInfo[InlineInfo::COLUMN_METHOD_HI] = High32Bits(method);
142         inlineInfo[InlineInfo::COLUMN_METHOD_LOW] = Low32Bits(method);
143     } else {
144         ASSERT(methodId != 0);
145         inlineInfo[InlineInfo::COLUMN_METHOD_ID_INDEX] = methodIds_.Add({methodId});
146     }
147 
148     inlineInfoStack_.push_back(inlineInfo);
149 }
150 
EndInlineInfo()151 void CodeInfoBuilder::EndInlineInfo()
152 {
153 #ifndef NDEBUG
154     ASSERT(wasMethodBegin_);
155     ASSERT(wasStackMapBegin_);
156     ASSERT(wasInlineInfoBegin_);
157     wasInlineInfoBegin_ = false;
158 #endif
159     ASSERT(currentVregs_.size() == currentVregsCount_);
160 }
161 
AddConstant(uint64_t value,VRegInfo::Type type,VRegInfo::VRegType vregType)162 void CodeInfoBuilder::AddConstant(uint64_t value, VRegInfo::Type type, VRegInfo::VRegType vregType)
163 {
164     VRegInfo vreg(0, VRegInfo::Location::CONSTANT, type, vregType);
165     uint32_t low = value & ((1LLU << BITS_PER_UINT32) - 1);
166     uint32_t hi = (value >> BITS_PER_UINT32) & ((1LLU << BITS_PER_UINT32) - 1);
167     vreg.SetConstantIndices(constantTable_.Add({low}), constantTable_.Add({hi}));
168     currentVregs_.push_back(vreg);
169 }
170 
EmitVRegs()171 void CodeInfoBuilder::EmitVRegs()
172 {
173     ASSERT(currentVregs_.size() == currentVregsCount_);
174     if (currentVregs_.empty()) {
175         return;
176     }
177 
178     if (currentVregs_.size() > lastVregs_.size()) {
179         lastVregs_.resize(currentVregs_.size(), VRegInfo::Invalid());
180         vregsLastChange_.resize(currentVregs_.size());
181     }
182 
183     ArenaVector<BitTableBuilder<VRegisterCatalogueIndex>::Entry> &vregsMap = vregsMapStorage_;
184     ArenaBitVector &vregsMask = vregsMaskStorage_;
185     vregsMap.clear();
186     vregsMask.clear();
187 
188     for (size_t i = 0; i < currentVregs_.size(); i++) {
189         auto &vreg = currentVregs_[i];
190         uint32_t distatnce = stackMaps_.GetRowsCount() - vregsLastChange_[i];
191         if (lastVregs_[i] != vreg || distatnce > MAX_VREG_LIVE_DISTANCE) {
192             BitTableBuilder<VRegisterInfo>::Entry vregEntry;
193             vregEntry[VRegisterInfo::COLUMN_INFO] = vreg.GetInfo();
194             vregEntry[VRegisterInfo::COLUMN_VALUE] = vreg.GetValue();
195             uint32_t index = vreg.IsLive() ? vregsCatalogue_.Add(vregEntry) : decltype(vregsCatalogue_)::NO_VALUE;
196             vregsMap.push_back({index});
197             vregsMask.SetBit(i);
198             lastVregs_[i] = vreg;
199             vregsLastChange_[i] = stackMaps_.GetRowsCount();
200         }
201     }
202 
203     BitMemoryRegion rgn(vregsMask.data(), vregsMask.size());
204     ASSERT(vregsMask.PopCount() == vregsMap.size());
205     if (vregsMask.PopCount() != 0) {
206         currentStackMap_[StackMap::COLUMN_VREG_MASK_INDEX] = vregMasks_.Add(vregsMask.GetFixed());
207     }
208     if (!currentVregs_.empty()) {
209         currentStackMap_[StackMap::COLUMN_VREG_MAP_INDEX] = vregsMap_.AddArray(Span(vregsMap));
210     }
211 }
212 
Encode(ArenaVector<uint8_t> * stream,size_t offset)213 void CodeInfoBuilder::Encode(ArenaVector<uint8_t> *stream, size_t offset)
214 {
215     BitMemoryStreamOut out(stream, offset);
216 
217     uint32_t tablesMask = 0;
218     EnumerateTables([&tablesMask](size_t index, const auto &table) {
219         if (table->GetRowsCount() != 0) {
220             tablesMask |= (1U << index);
221         }
222     });
223 
224     header_.SetTableMask(tablesMask);
225     header_.SetVRegsCount(vregsCount_);
226     header_.Encode(out);
227 
228     EnumerateTables([&out]([[maybe_unused]] size_t index, const auto &table) {
229         if (table->GetRowsCount() != 0) {
230             table->Encode(out);
231         }
232     });
233     stream->resize(RoundUp(stream->size(), CodeInfo::SIZE_ALIGNMENT));
234 }
235 
236 }  // namespace ark::compiler
237