1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "code_info_builder.h"
17 #include "utils/bit_memory_region-inl.h"
18
19 namespace panda::compiler {
20
BeginMethod(uint32_t frame_size,uint32_t vregs_count)21 void CodeInfoBuilder::BeginMethod(uint32_t frame_size, uint32_t vregs_count)
22 {
23 #ifndef NDEBUG
24 ASSERT(!was_method_begin_);
25 ASSERT(!was_stack_map_begin_);
26 ASSERT(!was_inline_info_begin_);
27 was_method_begin_ = true;
28 #endif
29
30 header_.SetFrameSize(frame_size);
31 vregs_count_ = vregs_count;
32 constant_table_.Add({0});
33 }
34
EndMethod()35 void CodeInfoBuilder::EndMethod()
36 {
37 #ifndef NDEBUG
38 ASSERT(was_method_begin_);
39 ASSERT(!was_stack_map_begin_);
40 ASSERT(!was_inline_info_begin_);
41 was_method_begin_ = false;
42 #endif
43 }
44
BeginStackMap(uint32_t bpc,uint32_t npc,ArenaBitVector * stack_roots,uint32_t regs_roots,bool require_vreg_map,bool is_osr)45 void CodeInfoBuilder::BeginStackMap(uint32_t bpc, uint32_t npc, ArenaBitVector *stack_roots, uint32_t regs_roots,
46 bool require_vreg_map, bool is_osr)
47 {
48 #ifndef NDEBUG
49 ASSERT(was_method_begin_);
50 ASSERT(!was_stack_map_begin_);
51 ASSERT(!was_inline_info_begin_);
52 was_stack_map_begin_ = true;
53 #endif
54 inline_info_stack_.clear();
55 current_vregs_.clear();
56
57 ASSERT(stack_maps_.GetSize() == 0 || npc >= stack_maps_.GetLast()[StackMap::COLUMN_NATIVE_PC]);
58
59 current_vregs_count_ = require_vreg_map ? vregs_count_ : 0;
60
61 current_stack_map_ = BitTableBuilder<StackMap>::Entry();
62 current_stack_map_[StackMap::COLUMN_PROPERTIES] = StackMap::CreateProperties(is_osr, require_vreg_map);
63 current_stack_map_[StackMap::COLUMN_BYTECODE_PC] = bpc;
64 current_stack_map_[StackMap::COLUMN_NATIVE_PC] = StackMap::PackAddress(npc, arch_);
65 if (regs_roots != 0) {
66 current_stack_map_[StackMap::COLUMN_ROOTS_REG_MASK_INDEX] = roots_reg_masks_.Add({regs_roots});
67 }
68 if (stack_roots != nullptr && !stack_roots->empty()) {
69 current_stack_map_[StackMap::COLUMN_ROOTS_STACK_MASK_INDEX] = roots_stack_masks_.Add(stack_roots->GetFixed());
70 }
71 // Ensure that stackmaps are inserted in sorted order
72 if (stack_maps_.GetRowsCount() != 0) {
73 ASSERT(current_stack_map_[StackMap::COLUMN_NATIVE_PC] >= stack_maps_.GetLast()[StackMap::COLUMN_NATIVE_PC]);
74 }
75 }
76
EndStackMap()77 void CodeInfoBuilder::EndStackMap()
78 {
79 #ifndef NDEBUG
80 ASSERT(was_method_begin_);
81 ASSERT(was_stack_map_begin_);
82 ASSERT(!was_inline_info_begin_);
83 was_stack_map_begin_ = false;
84 #endif
85 if (!inline_info_stack_.empty()) {
86 inline_info_stack_.back()[InlineInfo::COLUMN_IS_LAST] = static_cast<uint32_t>(true);
87 current_stack_map_[StackMap::COLUMN_INLINE_INFO_INDEX] = inline_infos_.AddArray(Span(inline_info_stack_));
88 }
89
90 EmitVRegs();
91
92 stack_maps_.Add(current_stack_map_);
93 }
94
DumpCurrentStackMap(std::ostream & stream) const95 void CodeInfoBuilder::DumpCurrentStackMap(std::ostream &stream) const
96 {
97 stream << "Stackmap #" << stack_maps_.GetRowsCount() - 1 << ": npc=0x" << std::hex
98 << StackMap::UnpackAddress(current_stack_map_[StackMap::COLUMN_NATIVE_PC], arch_) << ", bpc=0x" << std::hex
99 << current_stack_map_[StackMap::COLUMN_BYTECODE_PC];
100 if (current_stack_map_[StackMap::COLUMN_INLINE_INFO_INDEX] != StackMap::NO_VALUE) {
101 stream << ", inline_depth=" << inline_info_stack_.size();
102 }
103 if (current_stack_map_[StackMap::COLUMN_ROOTS_REG_MASK_INDEX] != StackMap::NO_VALUE ||
104 current_stack_map_[StackMap::COLUMN_ROOTS_STACK_MASK_INDEX] != StackMap::NO_VALUE) {
105 stream << ", roots=[";
106 const char *sep = "";
107 if (current_stack_map_[StackMap::COLUMN_ROOTS_REG_MASK_INDEX] != StackMap::NO_VALUE) {
108 auto &entry = roots_reg_masks_.GetEntry(current_stack_map_[StackMap::COLUMN_ROOTS_REG_MASK_INDEX]);
109 stream << "r:0x" << std::hex << entry[RegisterMask::COLUMN_MASK];
110 sep = ",";
111 }
112 if (current_stack_map_[StackMap::COLUMN_ROOTS_STACK_MASK_INDEX] != StackMap::NO_VALUE) {
113 auto region = roots_stack_masks_.GetEntry(current_stack_map_[StackMap::COLUMN_ROOTS_STACK_MASK_INDEX]);
114 stream << sep << "s:" << region;
115 }
116 stream << "]";
117 }
118 if (current_stack_map_[StackMap::COLUMN_VREG_MASK_INDEX] != StackMap::NO_VALUE) {
119 stream << ", vregs=" << vreg_masks_.GetEntry(current_stack_map_[StackMap::COLUMN_VREG_MASK_INDEX]);
120 }
121 }
122
BeginInlineInfo(void * method,uint32_t method_id,uint32_t bpc,uint32_t vregs_count)123 void CodeInfoBuilder::BeginInlineInfo(void *method, uint32_t method_id, uint32_t bpc, uint32_t vregs_count)
124 {
125 #ifndef NDEBUG
126 ASSERT(was_method_begin_);
127 ASSERT(was_stack_map_begin_);
128 was_inline_info_begin_ = true;
129 #endif
130 BitTableBuilder<InlineInfo>::Entry inline_info;
131 current_vregs_count_ += vregs_count;
132
133 inline_info[InlineInfo::COLUMN_IS_LAST] = static_cast<uint32_t>(false);
134 inline_info[InlineInfo::COLUMN_BYTECODE_PC] = bpc;
135 inline_info[InlineInfo::COLUMN_VREGS_COUNT] = current_vregs_count_;
136 if (method != nullptr) {
137 inline_info[InlineInfo::COLUMN_METHOD_HI] = High32Bits(method);
138 inline_info[InlineInfo::COLUMN_METHOD_LOW] = Low32Bits(method);
139 } else {
140 ASSERT(method_id != 0);
141 inline_info[InlineInfo::COLUMN_METHOD_ID_INDEX] = method_ids_.Add({method_id});
142 }
143
144 inline_info_stack_.push_back(inline_info);
145 }
146
EndInlineInfo()147 void CodeInfoBuilder::EndInlineInfo()
148 {
149 #ifndef NDEBUG
150 ASSERT(was_method_begin_);
151 ASSERT(was_stack_map_begin_);
152 ASSERT(was_inline_info_begin_);
153 was_inline_info_begin_ = false;
154 #endif
155 ASSERT(current_vregs_.size() == current_vregs_count_);
156 }
157
AddConstant(uint64_t value,VRegInfo::Type type,bool is_acc)158 void CodeInfoBuilder::AddConstant(uint64_t value, VRegInfo::Type type, bool is_acc)
159 {
160 VRegInfo vreg(0, VRegInfo::Location::CONSTANT, type, is_acc);
161 uint32_t low = value & ((1LLU << BITS_PER_UINT32) - 1);
162 uint32_t hi = (value >> BITS_PER_UINT32) & ((1LLU << BITS_PER_UINT32) - 1);
163 vreg.SetConstantIndices(constant_table_.Add({low}), constant_table_.Add({hi}));
164 current_vregs_.push_back(vreg);
165 }
166
EmitVRegs()167 void CodeInfoBuilder::EmitVRegs()
168 {
169 ASSERT(current_vregs_.size() == current_vregs_count_);
170 if (current_vregs_.empty()) {
171 return;
172 }
173
174 if (current_vregs_.size() > last_vregs_.size()) {
175 last_vregs_.resize(current_vregs_.size(), VRegInfo::Invalid());
176 vregs_last_change_.resize(current_vregs_.size());
177 }
178
179 ArenaVector<BitTableBuilder<VRegisterCatalogueIndex>::Entry> &vregs_map = vregs_map_storage_;
180 ArenaBitVector &vregs_mask = vregs_mask_storage_;
181 vregs_map.clear();
182 vregs_mask.clear();
183
184 for (size_t i = 0; i < current_vregs_.size(); i++) {
185 auto &vreg = current_vregs_[i];
186 uint32_t distatnce = stack_maps_.GetRowsCount() - vregs_last_change_[i];
187 if (last_vregs_[i] != vreg || distatnce > MAX_VREG_LIVE_DISTANCE) {
188 BitTableBuilder<VRegisterInfo>::Entry vreg_entry;
189 vreg_entry[VRegisterInfo::COLUMN_INFO] = vreg.GetInfo();
190 vreg_entry[VRegisterInfo::COLUMN_VALUE] = vreg.GetValue();
191 uint32_t index = vreg.IsLive() ? vregs_catalogue_.Add(vreg_entry) : decltype(vregs_catalogue_)::NO_VALUE;
192 vregs_map.push_back({index});
193 vregs_mask.SetBit(i);
194 last_vregs_[i] = vreg;
195 vregs_last_change_[i] = stack_maps_.GetRowsCount();
196 }
197 }
198
199 BitMemoryRegion rgn(vregs_mask.data(), vregs_mask.size());
200 ASSERT(vregs_mask.PopCount() == vregs_map.size());
201 if (vregs_mask.PopCount() != 0) {
202 current_stack_map_[StackMap::COLUMN_VREG_MASK_INDEX] = vreg_masks_.Add(vregs_mask.GetFixed());
203 }
204 if (!current_vregs_.empty()) {
205 current_stack_map_[StackMap::COLUMN_VREG_MAP_INDEX] = vregs_map_.AddArray(Span(vregs_map));
206 }
207 }
208
Encode(ArenaVector<uint8_t> * stream,size_t offset)209 void CodeInfoBuilder::Encode(ArenaVector<uint8_t> *stream, size_t offset)
210 {
211 BitMemoryStreamOut out(stream, offset);
212
213 uint32_t tables_mask = 0;
214 EnumerateTables([&tables_mask](size_t index, const auto &table) {
215 if (table->GetRowsCount() != 0) {
216 tables_mask |= (1U << index);
217 }
218 });
219
220 header_.SetTableMask(tables_mask);
221 header_.SetVRegsCount(vregs_count_);
222 header_.Encode(out);
223
224 EnumerateTables([&out]([[maybe_unused]] size_t index, const auto &table) {
225 if (table->GetRowsCount() != 0) {
226 table->Encode(out);
227 }
228 });
229 stream->resize(RoundUp(stream->size(), CodeInfo::SIZE_ALIGNMENT));
230 }
231
232 } // namespace panda::compiler
233