1 /*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "stack_map_stream.h"
18
19 #include <memory>
20
21 #include "art_method-inl.h"
22 #include "base/stl_util.h"
23 #include "dex/dex_file_types.h"
24 #include "optimizing/optimizing_compiler.h"
25 #include "runtime.h"
26 #include "scoped_thread_state_change-inl.h"
27 #include "stack_map.h"
28
29 namespace art {
30
31 constexpr static bool kVerifyStackMaps = kIsDebugBuild;
32
GetStackMapNativePcOffset(size_t i)33 uint32_t StackMapStream::GetStackMapNativePcOffset(size_t i) {
34 return StackMap::UnpackNativePc(stack_maps_[i][StackMap::kPackedNativePc], instruction_set_);
35 }
36
SetStackMapNativePcOffset(size_t i,uint32_t native_pc_offset)37 void StackMapStream::SetStackMapNativePcOffset(size_t i, uint32_t native_pc_offset) {
38 stack_maps_[i][StackMap::kPackedNativePc] =
39 StackMap::PackNativePc(native_pc_offset, instruction_set_);
40 }
41
BeginMethod(size_t frame_size_in_bytes,size_t core_spill_mask,size_t fp_spill_mask,uint32_t num_dex_registers)42 void StackMapStream::BeginMethod(size_t frame_size_in_bytes,
43 size_t core_spill_mask,
44 size_t fp_spill_mask,
45 uint32_t num_dex_registers) {
46 DCHECK(!in_method_) << "Mismatched Begin/End calls";
47 in_method_ = true;
48 DCHECK_EQ(packed_frame_size_, 0u) << "BeginMethod was already called";
49
50 DCHECK_ALIGNED(frame_size_in_bytes, kStackAlignment);
51 packed_frame_size_ = frame_size_in_bytes / kStackAlignment;
52 core_spill_mask_ = core_spill_mask;
53 fp_spill_mask_ = fp_spill_mask;
54 num_dex_registers_ = num_dex_registers;
55 }
56
EndMethod()57 void StackMapStream::EndMethod() {
58 DCHECK(in_method_) << "Mismatched Begin/End calls";
59 in_method_ = false;
60
61 // Read the stack masks now. The compiler might have updated them.
62 for (size_t i = 0; i < lazy_stack_masks_.size(); i++) {
63 BitVector* stack_mask = lazy_stack_masks_[i];
64 if (stack_mask != nullptr && stack_mask->GetNumberOfBits() != 0) {
65 stack_maps_[i][StackMap::kStackMaskIndex] =
66 stack_masks_.Dedup(stack_mask->GetRawStorage(), stack_mask->GetNumberOfBits());
67 }
68 }
69 }
70
BeginStackMapEntry(uint32_t dex_pc,uint32_t native_pc_offset,uint32_t register_mask,BitVector * stack_mask,StackMap::Kind kind)71 void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
72 uint32_t native_pc_offset,
73 uint32_t register_mask,
74 BitVector* stack_mask,
75 StackMap::Kind kind) {
76 DCHECK(in_method_) << "Call BeginMethod first";
77 DCHECK(!in_stack_map_) << "Mismatched Begin/End calls";
78 in_stack_map_ = true;
79
80 current_stack_map_ = BitTableBuilder<StackMap>::Entry();
81 current_stack_map_[StackMap::kKind] = static_cast<uint32_t>(kind);
82 current_stack_map_[StackMap::kPackedNativePc] =
83 StackMap::PackNativePc(native_pc_offset, instruction_set_);
84 current_stack_map_[StackMap::kDexPc] = dex_pc;
85 if (stack_maps_.size() > 0) {
86 // Check that non-catch stack maps are sorted by pc.
87 // Catch stack maps are at the end and may be unordered.
88 if (stack_maps_.back()[StackMap::kKind] == StackMap::Kind::Catch) {
89 DCHECK(current_stack_map_[StackMap::kKind] == StackMap::Kind::Catch);
90 } else if (current_stack_map_[StackMap::kKind] != StackMap::Kind::Catch) {
91 DCHECK_LE(stack_maps_.back()[StackMap::kPackedNativePc],
92 current_stack_map_[StackMap::kPackedNativePc]);
93 }
94 }
95 if (register_mask != 0) {
96 uint32_t shift = LeastSignificantBit(register_mask);
97 BitTableBuilder<RegisterMask>::Entry entry;
98 entry[RegisterMask::kValue] = register_mask >> shift;
99 entry[RegisterMask::kShift] = shift;
100 current_stack_map_[StackMap::kRegisterMaskIndex] = register_masks_.Dedup(&entry);
101 }
102 // The compiler assumes the bit vector will be read during PrepareForFillIn(),
103 // and it might modify the data before that. Therefore, just store the pointer.
104 // See ClearSpillSlotsFromLoopPhisInStackMap in code_generator.h.
105 lazy_stack_masks_.push_back(stack_mask);
106 current_inline_infos_.clear();
107 current_dex_registers_.clear();
108 expected_num_dex_registers_ = num_dex_registers_;
109
110 if (kVerifyStackMaps) {
111 size_t stack_map_index = stack_maps_.size();
112 // Create lambda method, which will be executed at the very end to verify data.
113 // Parameters and local variables will be captured(stored) by the lambda "[=]".
114 dchecks_.emplace_back([=](const CodeInfo& code_info) {
115 if (kind == StackMap::Kind::Default || kind == StackMap::Kind::OSR) {
116 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset,
117 instruction_set_);
118 CHECK_EQ(stack_map.Row(), stack_map_index);
119 } else if (kind == StackMap::Kind::Catch) {
120 StackMap stack_map = code_info.GetCatchStackMapForDexPc(dex_pc);
121 CHECK_EQ(stack_map.Row(), stack_map_index);
122 }
123 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
124 CHECK_EQ(stack_map.GetNativePcOffset(instruction_set_), native_pc_offset);
125 CHECK_EQ(stack_map.GetKind(), static_cast<uint32_t>(kind));
126 CHECK_EQ(stack_map.GetDexPc(), dex_pc);
127 CHECK_EQ(code_info.GetRegisterMaskOf(stack_map), register_mask);
128 BitMemoryRegion seen_stack_mask = code_info.GetStackMaskOf(stack_map);
129 CHECK_GE(seen_stack_mask.size_in_bits(), stack_mask ? stack_mask->GetNumberOfBits() : 0);
130 for (size_t b = 0; b < seen_stack_mask.size_in_bits(); b++) {
131 CHECK_EQ(seen_stack_mask.LoadBit(b), stack_mask != nullptr && stack_mask->IsBitSet(b));
132 }
133 });
134 }
135 }
136
EndStackMapEntry()137 void StackMapStream::EndStackMapEntry() {
138 DCHECK(in_stack_map_) << "Mismatched Begin/End calls";
139 in_stack_map_ = false;
140
141 // Generate index into the InlineInfo table.
142 size_t inlining_depth = current_inline_infos_.size();
143 if (!current_inline_infos_.empty()) {
144 current_inline_infos_.back()[InlineInfo::kIsLast] = InlineInfo::kLast;
145 current_stack_map_[StackMap::kInlineInfoIndex] =
146 inline_infos_.Dedup(current_inline_infos_.data(), current_inline_infos_.size());
147 }
148
149 // Generate delta-compressed dex register map.
150 size_t num_dex_registers = current_dex_registers_.size();
151 if (!current_dex_registers_.empty()) {
152 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
153 CreateDexRegisterMap();
154 }
155
156 stack_maps_.Add(current_stack_map_);
157
158 if (kVerifyStackMaps) {
159 size_t stack_map_index = stack_maps_.size() - 1;
160 dchecks_.emplace_back([=](const CodeInfo& code_info) {
161 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
162 CHECK_EQ(stack_map.HasDexRegisterMap(), (num_dex_registers != 0));
163 CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0));
164 CHECK_EQ(code_info.GetInlineInfosOf(stack_map).size(), inlining_depth);
165 });
166 }
167 }
168
BeginInlineInfoEntry(ArtMethod * method,uint32_t dex_pc,uint32_t num_dex_registers,const DexFile * outer_dex_file)169 void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
170 uint32_t dex_pc,
171 uint32_t num_dex_registers,
172 const DexFile* outer_dex_file) {
173 DCHECK(in_stack_map_) << "Call BeginStackMapEntry first";
174 DCHECK(!in_inline_info_) << "Mismatched Begin/End calls";
175 in_inline_info_ = true;
176 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
177
178 expected_num_dex_registers_ += num_dex_registers;
179
180 BitTableBuilder<InlineInfo>::Entry entry;
181 entry[InlineInfo::kIsLast] = InlineInfo::kMore;
182 entry[InlineInfo::kDexPc] = dex_pc;
183 entry[InlineInfo::kNumberOfDexRegisters] = static_cast<uint32_t>(expected_num_dex_registers_);
184 if (EncodeArtMethodInInlineInfo(method)) {
185 entry[InlineInfo::kArtMethodHi] = High32Bits(reinterpret_cast<uintptr_t>(method));
186 entry[InlineInfo::kArtMethodLo] = Low32Bits(reinterpret_cast<uintptr_t>(method));
187 } else {
188 if (dex_pc != static_cast<uint32_t>(-1) && kIsDebugBuild) {
189 ScopedObjectAccess soa(Thread::Current());
190 DCHECK(IsSameDexFile(*outer_dex_file, *method->GetDexFile()));
191 }
192 uint32_t dex_method_index = method->GetDexMethodIndex();
193 entry[InlineInfo::kMethodInfoIndex] = method_infos_.Dedup({dex_method_index});
194 }
195 current_inline_infos_.push_back(entry);
196
197 if (kVerifyStackMaps) {
198 size_t stack_map_index = stack_maps_.size();
199 size_t depth = current_inline_infos_.size() - 1;
200 dchecks_.emplace_back([=](const CodeInfo& code_info) {
201 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
202 InlineInfo inline_info = code_info.GetInlineInfosOf(stack_map)[depth];
203 CHECK_EQ(inline_info.GetDexPc(), dex_pc);
204 bool encode_art_method = EncodeArtMethodInInlineInfo(method);
205 CHECK_EQ(inline_info.EncodesArtMethod(), encode_art_method);
206 if (encode_art_method) {
207 CHECK_EQ(inline_info.GetArtMethod(), method);
208 } else {
209 CHECK_EQ(code_info.GetMethodIndexOf(inline_info), method->GetDexMethodIndex());
210 }
211 });
212 }
213 }
214
EndInlineInfoEntry()215 void StackMapStream::EndInlineInfoEntry() {
216 DCHECK(in_inline_info_) << "Mismatched Begin/End calls";
217 in_inline_info_ = false;
218 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
219 }
220
221 // Create delta-compressed dex register map based on the current list of DexRegisterLocations.
222 // All dex registers for a stack map are concatenated - inlined registers are just appended.
CreateDexRegisterMap()223 void StackMapStream::CreateDexRegisterMap() {
224 // These are fields rather than local variables so that we can reuse the reserved memory.
225 temp_dex_register_mask_.ClearAllBits();
226 temp_dex_register_map_.clear();
227
228 // Ensure that the arrays that hold previous state are big enough to be safely indexed below.
229 if (previous_dex_registers_.size() < current_dex_registers_.size()) {
230 previous_dex_registers_.resize(current_dex_registers_.size(), DexRegisterLocation::None());
231 dex_register_timestamp_.resize(current_dex_registers_.size(), 0u);
232 }
233
234 // Set bit in the mask for each register that has been changed since the previous stack map.
235 // Modified registers are stored in the catalogue and the catalogue index added to the list.
236 for (size_t i = 0; i < current_dex_registers_.size(); i++) {
237 DexRegisterLocation reg = current_dex_registers_[i];
238 // Distance is difference between this index and the index of last modification.
239 uint32_t distance = stack_maps_.size() - dex_register_timestamp_[i];
240 if (previous_dex_registers_[i] != reg || distance > kMaxDexRegisterMapSearchDistance) {
241 BitTableBuilder<DexRegisterInfo>::Entry entry;
242 entry[DexRegisterInfo::kKind] = static_cast<uint32_t>(reg.GetKind());
243 entry[DexRegisterInfo::kPackedValue] =
244 DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue());
245 uint32_t index = reg.IsLive() ? dex_register_catalog_.Dedup(&entry) : kNoValue;
246 temp_dex_register_mask_.SetBit(i);
247 temp_dex_register_map_.push_back({index});
248 previous_dex_registers_[i] = reg;
249 dex_register_timestamp_[i] = stack_maps_.size();
250 }
251 }
252
253 // Set the mask and map for the current StackMap (which includes inlined registers).
254 if (temp_dex_register_mask_.GetNumberOfBits() != 0) {
255 current_stack_map_[StackMap::kDexRegisterMaskIndex] =
256 dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(),
257 temp_dex_register_mask_.GetNumberOfBits());
258 }
259 if (!current_dex_registers_.empty()) {
260 current_stack_map_[StackMap::kDexRegisterMapIndex] =
261 dex_register_maps_.Dedup(temp_dex_register_map_.data(),
262 temp_dex_register_map_.size());
263 }
264
265 if (kVerifyStackMaps) {
266 size_t stack_map_index = stack_maps_.size();
267 // We need to make copy of the current registers for later (when the check is run).
268 auto expected_dex_registers = std::make_shared<dchecked_vector<DexRegisterLocation>>(
269 current_dex_registers_.begin(), current_dex_registers_.end());
270 dchecks_.emplace_back([=](const CodeInfo& code_info) {
271 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
272 uint32_t expected_reg = 0;
273 for (DexRegisterLocation reg : code_info.GetDexRegisterMapOf(stack_map)) {
274 CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
275 }
276 for (InlineInfo inline_info : code_info.GetInlineInfosOf(stack_map)) {
277 DexRegisterMap map = code_info.GetInlineDexRegisterMapOf(stack_map, inline_info);
278 for (DexRegisterLocation reg : map) {
279 CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
280 }
281 }
282 CHECK_EQ(expected_reg, expected_dex_registers->size());
283 });
284 }
285 }
286
287 template<typename Writer, typename Builder>
EncodeTable(Writer & out,const Builder & bit_table)288 ALWAYS_INLINE static void EncodeTable(Writer& out, const Builder& bit_table) {
289 out.WriteBit(false); // Is not deduped.
290 bit_table.Encode(out);
291 }
292
Encode()293 ScopedArenaVector<uint8_t> StackMapStream::Encode() {
294 DCHECK(in_stack_map_ == false) << "Mismatched Begin/End calls";
295 DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls";
296
297 ScopedArenaVector<uint8_t> buffer(allocator_->Adapter(kArenaAllocStackMapStream));
298 BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&buffer);
299 out.WriteVarint(packed_frame_size_);
300 out.WriteVarint(core_spill_mask_);
301 out.WriteVarint(fp_spill_mask_);
302 out.WriteVarint(num_dex_registers_);
303 EncodeTable(out, stack_maps_);
304 EncodeTable(out, register_masks_);
305 EncodeTable(out, stack_masks_);
306 EncodeTable(out, inline_infos_);
307 EncodeTable(out, method_infos_);
308 EncodeTable(out, dex_register_masks_);
309 EncodeTable(out, dex_register_maps_);
310 EncodeTable(out, dex_register_catalog_);
311
312 // Verify that we can load the CodeInfo and check some essentials.
313 CodeInfo code_info(buffer.data());
314 CHECK_EQ(code_info.Size(), buffer.size());
315 CHECK_EQ(code_info.GetNumberOfStackMaps(), stack_maps_.size());
316
317 // Verify all written data (usually only in debug builds).
318 if (kVerifyStackMaps) {
319 for (const auto& dcheck : dchecks_) {
320 dcheck(code_info);
321 }
322 }
323
324 return buffer;
325 }
326
327 } // namespace art
328