1 /*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "stack_map_stream.h"
18
19 #include <memory>
20
21 #include "art_method-inl.h"
22 #include "base/stl_util.h"
23 #include "dex/dex_file_types.h"
24 #include "optimizing/optimizing_compiler.h"
25 #include "runtime.h"
26 #include "scoped_thread_state_change-inl.h"
27 #include "stack_map.h"
28
29 namespace art {
30
31 constexpr static bool kVerifyStackMaps = kIsDebugBuild;
32
GetStackMapNativePcOffset(size_t i)33 uint32_t StackMapStream::GetStackMapNativePcOffset(size_t i) {
34 return StackMap::UnpackNativePc(stack_maps_[i][StackMap::kPackedNativePc], instruction_set_);
35 }
36
SetStackMapNativePcOffset(size_t i,uint32_t native_pc_offset)37 void StackMapStream::SetStackMapNativePcOffset(size_t i, uint32_t native_pc_offset) {
38 stack_maps_[i][StackMap::kPackedNativePc] =
39 StackMap::PackNativePc(native_pc_offset, instruction_set_);
40 }
41
BeginMethod(size_t frame_size_in_bytes,size_t core_spill_mask,size_t fp_spill_mask,uint32_t num_dex_registers,bool baseline)42 void StackMapStream::BeginMethod(size_t frame_size_in_bytes,
43 size_t core_spill_mask,
44 size_t fp_spill_mask,
45 uint32_t num_dex_registers,
46 bool baseline) {
47 DCHECK(!in_method_) << "Mismatched Begin/End calls";
48 in_method_ = true;
49 DCHECK_EQ(packed_frame_size_, 0u) << "BeginMethod was already called";
50
51 DCHECK_ALIGNED(frame_size_in_bytes, kStackAlignment);
52 packed_frame_size_ = frame_size_in_bytes / kStackAlignment;
53 core_spill_mask_ = core_spill_mask;
54 fp_spill_mask_ = fp_spill_mask;
55 num_dex_registers_ = num_dex_registers;
56 baseline_ = baseline;
57
58 if (kVerifyStackMaps) {
59 dchecks_.emplace_back([=](const CodeInfo& code_info) {
60 DCHECK_EQ(code_info.packed_frame_size_, frame_size_in_bytes / kStackAlignment);
61 DCHECK_EQ(code_info.core_spill_mask_, core_spill_mask);
62 DCHECK_EQ(code_info.fp_spill_mask_, fp_spill_mask);
63 DCHECK_EQ(code_info.number_of_dex_registers_, num_dex_registers);
64 });
65 }
66 }
67
EndMethod(size_t code_size)68 void StackMapStream::EndMethod(size_t code_size) {
69 DCHECK(in_method_) << "Mismatched Begin/End calls";
70 in_method_ = false;
71 code_size_ = code_size;
72
73 // Read the stack masks now. The compiler might have updated them.
74 for (size_t i = 0; i < lazy_stack_masks_.size(); i++) {
75 BitVector* stack_mask = lazy_stack_masks_[i];
76 if (stack_mask != nullptr && stack_mask->GetNumberOfBits() != 0) {
77 stack_maps_[i][StackMap::kStackMaskIndex] =
78 stack_masks_.Dedup(stack_mask->GetRawStorage(), stack_mask->GetNumberOfBits());
79 }
80 }
81
82 if (kIsDebugBuild) {
83 uint32_t packed_code_size = StackMap::PackNativePc(code_size, instruction_set_);
84 for (size_t i = 0; i < stack_maps_.size(); i++) {
85 DCHECK_LE(stack_maps_[i][StackMap::kPackedNativePc], packed_code_size);
86 }
87 }
88
89 if (kVerifyStackMaps) {
90 dchecks_.emplace_back([=](const CodeInfo& code_info) {
91 CHECK_EQ(code_info.code_size_, code_size);
92 });
93 }
94 }
95
BeginStackMapEntry(uint32_t dex_pc,uint32_t native_pc_offset,uint32_t register_mask,BitVector * stack_mask,StackMap::Kind kind,bool needs_vreg_info)96 void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
97 uint32_t native_pc_offset,
98 uint32_t register_mask,
99 BitVector* stack_mask,
100 StackMap::Kind kind,
101 bool needs_vreg_info) {
102 DCHECK(in_method_) << "Call BeginMethod first";
103 DCHECK(!in_stack_map_) << "Mismatched Begin/End calls";
104 in_stack_map_ = true;
105
106 current_stack_map_ = BitTableBuilder<StackMap>::Entry();
107 current_stack_map_[StackMap::kKind] = static_cast<uint32_t>(kind);
108 current_stack_map_[StackMap::kPackedNativePc] =
109 StackMap::PackNativePc(native_pc_offset, instruction_set_);
110 current_stack_map_[StackMap::kDexPc] = dex_pc;
111 if (stack_maps_.size() > 0) {
112 // Check that non-catch stack maps are sorted by pc.
113 // Catch stack maps are at the end and may be unordered.
114 if (stack_maps_.back()[StackMap::kKind] == StackMap::Kind::Catch) {
115 DCHECK(current_stack_map_[StackMap::kKind] == StackMap::Kind::Catch);
116 } else if (current_stack_map_[StackMap::kKind] != StackMap::Kind::Catch) {
117 DCHECK_LE(stack_maps_.back()[StackMap::kPackedNativePc],
118 current_stack_map_[StackMap::kPackedNativePc]);
119 }
120 }
121 if (register_mask != 0) {
122 uint32_t shift = LeastSignificantBit(register_mask);
123 BitTableBuilder<RegisterMask>::Entry entry;
124 entry[RegisterMask::kValue] = register_mask >> shift;
125 entry[RegisterMask::kShift] = shift;
126 current_stack_map_[StackMap::kRegisterMaskIndex] = register_masks_.Dedup(&entry);
127 }
128 // The compiler assumes the bit vector will be read during PrepareForFillIn(),
129 // and it might modify the data before that. Therefore, just store the pointer.
130 // See ClearSpillSlotsFromLoopPhisInStackMap in code_generator.h.
131 lazy_stack_masks_.push_back(stack_mask);
132 current_inline_infos_.clear();
133 current_dex_registers_.clear();
134 expected_num_dex_registers_ = needs_vreg_info ? num_dex_registers_ : 0u;
135
136 if (kVerifyStackMaps) {
137 size_t stack_map_index = stack_maps_.size();
138 // Create lambda method, which will be executed at the very end to verify data.
139 // Parameters and local variables will be captured(stored) by the lambda "[=]".
140 dchecks_.emplace_back([=](const CodeInfo& code_info) {
141 if (kind == StackMap::Kind::Default || kind == StackMap::Kind::OSR) {
142 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset,
143 instruction_set_);
144 CHECK_EQ(stack_map.Row(), stack_map_index);
145 } else if (kind == StackMap::Kind::Catch) {
146 StackMap stack_map = code_info.GetCatchStackMapForDexPc(dex_pc);
147 CHECK_EQ(stack_map.Row(), stack_map_index);
148 }
149 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
150 CHECK_EQ(stack_map.GetNativePcOffset(instruction_set_), native_pc_offset);
151 CHECK_EQ(stack_map.GetKind(), static_cast<uint32_t>(kind));
152 CHECK_EQ(stack_map.GetDexPc(), dex_pc);
153 CHECK_EQ(code_info.GetRegisterMaskOf(stack_map), register_mask);
154 BitMemoryRegion seen_stack_mask = code_info.GetStackMaskOf(stack_map);
155 CHECK_GE(seen_stack_mask.size_in_bits(), stack_mask ? stack_mask->GetNumberOfBits() : 0);
156 for (size_t b = 0; b < seen_stack_mask.size_in_bits(); b++) {
157 CHECK_EQ(seen_stack_mask.LoadBit(b), stack_mask != nullptr && stack_mask->IsBitSet(b));
158 }
159 });
160 }
161 }
162
EndStackMapEntry()163 void StackMapStream::EndStackMapEntry() {
164 DCHECK(in_stack_map_) << "Mismatched Begin/End calls";
165 in_stack_map_ = false;
166
167 // Generate index into the InlineInfo table.
168 size_t inlining_depth = current_inline_infos_.size();
169 if (!current_inline_infos_.empty()) {
170 current_inline_infos_.back()[InlineInfo::kIsLast] = InlineInfo::kLast;
171 current_stack_map_[StackMap::kInlineInfoIndex] =
172 inline_infos_.Dedup(current_inline_infos_.data(), current_inline_infos_.size());
173 }
174
175 // Generate delta-compressed dex register map.
176 size_t num_dex_registers = current_dex_registers_.size();
177 if (!current_dex_registers_.empty()) {
178 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
179 CreateDexRegisterMap();
180 }
181
182 stack_maps_.Add(current_stack_map_);
183
184 if (kVerifyStackMaps) {
185 size_t stack_map_index = stack_maps_.size() - 1;
186 dchecks_.emplace_back([=](const CodeInfo& code_info) {
187 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
188 CHECK_EQ(stack_map.HasDexRegisterMap(), (num_dex_registers != 0));
189 CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0));
190 CHECK_EQ(code_info.GetInlineInfosOf(stack_map).size(), inlining_depth);
191 });
192 }
193 }
194
BeginInlineInfoEntry(ArtMethod * method,uint32_t dex_pc,uint32_t num_dex_registers,const DexFile * outer_dex_file)195 void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
196 uint32_t dex_pc,
197 uint32_t num_dex_registers,
198 const DexFile* outer_dex_file) {
199 DCHECK(in_stack_map_) << "Call BeginStackMapEntry first";
200 DCHECK(!in_inline_info_) << "Mismatched Begin/End calls";
201 in_inline_info_ = true;
202 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
203
204 expected_num_dex_registers_ += num_dex_registers;
205
206 BitTableBuilder<InlineInfo>::Entry entry;
207 entry[InlineInfo::kIsLast] = InlineInfo::kMore;
208 entry[InlineInfo::kDexPc] = dex_pc;
209 entry[InlineInfo::kNumberOfDexRegisters] = static_cast<uint32_t>(expected_num_dex_registers_);
210 if (EncodeArtMethodInInlineInfo(method)) {
211 entry[InlineInfo::kArtMethodHi] = High32Bits(reinterpret_cast<uintptr_t>(method));
212 entry[InlineInfo::kArtMethodLo] = Low32Bits(reinterpret_cast<uintptr_t>(method));
213 } else {
214 if (dex_pc != static_cast<uint32_t>(-1) && kIsDebugBuild) {
215 ScopedObjectAccess soa(Thread::Current());
216 DCHECK(IsSameDexFile(*outer_dex_file, *method->GetDexFile()));
217 }
218 uint32_t dex_method_index = method->GetDexMethodIndex();
219 entry[InlineInfo::kMethodInfoIndex] = method_infos_.Dedup({dex_method_index});
220 }
221 current_inline_infos_.push_back(entry);
222
223 if (kVerifyStackMaps) {
224 size_t stack_map_index = stack_maps_.size();
225 size_t depth = current_inline_infos_.size() - 1;
226 dchecks_.emplace_back([=](const CodeInfo& code_info) {
227 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
228 InlineInfo inline_info = code_info.GetInlineInfosOf(stack_map)[depth];
229 CHECK_EQ(inline_info.GetDexPc(), dex_pc);
230 bool encode_art_method = EncodeArtMethodInInlineInfo(method);
231 CHECK_EQ(inline_info.EncodesArtMethod(), encode_art_method);
232 if (encode_art_method) {
233 CHECK_EQ(inline_info.GetArtMethod(), method);
234 } else {
235 CHECK_EQ(code_info.GetMethodIndexOf(inline_info), method->GetDexMethodIndex());
236 }
237 });
238 }
239 }
240
EndInlineInfoEntry()241 void StackMapStream::EndInlineInfoEntry() {
242 DCHECK(in_inline_info_) << "Mismatched Begin/End calls";
243 in_inline_info_ = false;
244 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
245 }
246
247 // Create delta-compressed dex register map based on the current list of DexRegisterLocations.
248 // All dex registers for a stack map are concatenated - inlined registers are just appended.
CreateDexRegisterMap()249 void StackMapStream::CreateDexRegisterMap() {
250 // These are fields rather than local variables so that we can reuse the reserved memory.
251 temp_dex_register_mask_.ClearAllBits();
252 temp_dex_register_map_.clear();
253
254 // Ensure that the arrays that hold previous state are big enough to be safely indexed below.
255 if (previous_dex_registers_.size() < current_dex_registers_.size()) {
256 previous_dex_registers_.resize(current_dex_registers_.size(), DexRegisterLocation::None());
257 dex_register_timestamp_.resize(current_dex_registers_.size(), 0u);
258 }
259
260 // Set bit in the mask for each register that has been changed since the previous stack map.
261 // Modified registers are stored in the catalogue and the catalogue index added to the list.
262 for (size_t i = 0; i < current_dex_registers_.size(); i++) {
263 DexRegisterLocation reg = current_dex_registers_[i];
264 // Distance is difference between this index and the index of last modification.
265 uint32_t distance = stack_maps_.size() - dex_register_timestamp_[i];
266 if (previous_dex_registers_[i] != reg || distance > kMaxDexRegisterMapSearchDistance) {
267 BitTableBuilder<DexRegisterInfo>::Entry entry;
268 entry[DexRegisterInfo::kKind] = static_cast<uint32_t>(reg.GetKind());
269 entry[DexRegisterInfo::kPackedValue] =
270 DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue());
271 uint32_t index = reg.IsLive() ? dex_register_catalog_.Dedup(&entry) : kNoValue;
272 temp_dex_register_mask_.SetBit(i);
273 temp_dex_register_map_.push_back({index});
274 previous_dex_registers_[i] = reg;
275 dex_register_timestamp_[i] = stack_maps_.size();
276 }
277 }
278
279 // Set the mask and map for the current StackMap (which includes inlined registers).
280 if (temp_dex_register_mask_.GetNumberOfBits() != 0) {
281 current_stack_map_[StackMap::kDexRegisterMaskIndex] =
282 dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(),
283 temp_dex_register_mask_.GetNumberOfBits());
284 }
285 if (!current_dex_registers_.empty()) {
286 current_stack_map_[StackMap::kDexRegisterMapIndex] =
287 dex_register_maps_.Dedup(temp_dex_register_map_.data(),
288 temp_dex_register_map_.size());
289 }
290
291 if (kVerifyStackMaps) {
292 size_t stack_map_index = stack_maps_.size();
293 // We need to make copy of the current registers for later (when the check is run).
294 auto expected_dex_registers = std::make_shared<dchecked_vector<DexRegisterLocation>>(
295 current_dex_registers_.begin(), current_dex_registers_.end());
296 dchecks_.emplace_back([=](const CodeInfo& code_info) {
297 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
298 uint32_t expected_reg = 0;
299 for (DexRegisterLocation reg : code_info.GetDexRegisterMapOf(stack_map)) {
300 CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
301 }
302 for (InlineInfo inline_info : code_info.GetInlineInfosOf(stack_map)) {
303 DexRegisterMap map = code_info.GetInlineDexRegisterMapOf(stack_map, inline_info);
304 for (DexRegisterLocation reg : map) {
305 CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
306 }
307 }
308 CHECK_EQ(expected_reg, expected_dex_registers->size());
309 });
310 }
311 }
312
Encode()313 ScopedArenaVector<uint8_t> StackMapStream::Encode() {
314 DCHECK(in_stack_map_ == false) << "Mismatched Begin/End calls";
315 DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls";
316
317 uint32_t flags = (inline_infos_.size() > 0) ? CodeInfo::kHasInlineInfo : 0;
318 flags |= baseline_ ? CodeInfo::kIsBaseline : 0;
319 DCHECK_LE(flags, kVarintMax); // Ensure flags can be read directly as byte.
320 uint32_t bit_table_flags = 0;
321 ForEachBitTable([&bit_table_flags](size_t i, auto bit_table) {
322 if (bit_table->size() != 0) { // Record which bit-tables are stored.
323 bit_table_flags |= 1 << i;
324 }
325 });
326
327 ScopedArenaVector<uint8_t> buffer(allocator_->Adapter(kArenaAllocStackMapStream));
328 BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&buffer);
329 out.WriteInterleavedVarints(std::array<uint32_t, CodeInfo::kNumHeaders>{
330 flags,
331 code_size_,
332 packed_frame_size_,
333 core_spill_mask_,
334 fp_spill_mask_,
335 num_dex_registers_,
336 bit_table_flags,
337 });
338 ForEachBitTable([&out](size_t, auto bit_table) {
339 if (bit_table->size() != 0) { // Skip empty bit-tables.
340 bit_table->Encode(out);
341 }
342 });
343
344 // Verify that we can load the CodeInfo and check some essentials.
345 size_t number_of_read_bits;
346 CodeInfo code_info(buffer.data(), &number_of_read_bits);
347 CHECK_EQ(number_of_read_bits, out.NumberOfWrittenBits());
348 CHECK_EQ(code_info.GetNumberOfStackMaps(), stack_maps_.size());
349 CHECK_EQ(CodeInfo::HasInlineInfo(buffer.data()), inline_infos_.size() > 0);
350 CHECK_EQ(CodeInfo::IsBaseline(buffer.data()), baseline_);
351
352 // Verify all written data (usually only in debug builds).
353 if (kVerifyStackMaps) {
354 for (const auto& dcheck : dchecks_) {
355 dcheck(code_info);
356 }
357 }
358
359 return buffer;
360 }
361
362 } // namespace art
363