• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "stack_map_stream.h"
18 
19 #include <memory>
20 
21 #include "art_method-inl.h"
22 #include "base/stl_util.h"
23 #include "dex/dex_file_types.h"
24 #include "optimizing/optimizing_compiler.h"
25 #include "runtime.h"
26 #include "scoped_thread_state_change-inl.h"
27 #include "stack_map.h"
28 
29 namespace art {
30 
31 constexpr static bool kVerifyStackMaps = kIsDebugBuild;
32 
GetStackMapNativePcOffset(size_t i)33 uint32_t StackMapStream::GetStackMapNativePcOffset(size_t i) {
34   return StackMap::UnpackNativePc(stack_maps_[i][StackMap::kPackedNativePc], instruction_set_);
35 }
36 
SetStackMapNativePcOffset(size_t i,uint32_t native_pc_offset)37 void StackMapStream::SetStackMapNativePcOffset(size_t i, uint32_t native_pc_offset) {
38   stack_maps_[i][StackMap::kPackedNativePc] =
39       StackMap::PackNativePc(native_pc_offset, instruction_set_);
40 }
41 
BeginMethod(size_t frame_size_in_bytes,size_t core_spill_mask,size_t fp_spill_mask,uint32_t num_dex_registers,bool baseline)42 void StackMapStream::BeginMethod(size_t frame_size_in_bytes,
43                                  size_t core_spill_mask,
44                                  size_t fp_spill_mask,
45                                  uint32_t num_dex_registers,
46                                  bool baseline) {
47   DCHECK(!in_method_) << "Mismatched Begin/End calls";
48   in_method_ = true;
49   DCHECK_EQ(packed_frame_size_, 0u) << "BeginMethod was already called";
50 
51   DCHECK_ALIGNED(frame_size_in_bytes, kStackAlignment);
52   packed_frame_size_ = frame_size_in_bytes / kStackAlignment;
53   core_spill_mask_ = core_spill_mask;
54   fp_spill_mask_ = fp_spill_mask;
55   num_dex_registers_ = num_dex_registers;
56   baseline_ = baseline;
57 
58   if (kVerifyStackMaps) {
59     dchecks_.emplace_back([=](const CodeInfo& code_info) {
60       DCHECK_EQ(code_info.packed_frame_size_, frame_size_in_bytes / kStackAlignment);
61       DCHECK_EQ(code_info.core_spill_mask_, core_spill_mask);
62       DCHECK_EQ(code_info.fp_spill_mask_, fp_spill_mask);
63       DCHECK_EQ(code_info.number_of_dex_registers_, num_dex_registers);
64     });
65   }
66 }
67 
EndMethod()68 void StackMapStream::EndMethod() {
69   DCHECK(in_method_) << "Mismatched Begin/End calls";
70   in_method_ = false;
71 
72   // Read the stack masks now. The compiler might have updated them.
73   for (size_t i = 0; i < lazy_stack_masks_.size(); i++) {
74     BitVector* stack_mask = lazy_stack_masks_[i];
75     if (stack_mask != nullptr && stack_mask->GetNumberOfBits() != 0) {
76       stack_maps_[i][StackMap::kStackMaskIndex] =
77           stack_masks_.Dedup(stack_mask->GetRawStorage(), stack_mask->GetNumberOfBits());
78     }
79   }
80 }
81 
BeginStackMapEntry(uint32_t dex_pc,uint32_t native_pc_offset,uint32_t register_mask,BitVector * stack_mask,StackMap::Kind kind,bool needs_vreg_info)82 void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
83                                         uint32_t native_pc_offset,
84                                         uint32_t register_mask,
85                                         BitVector* stack_mask,
86                                         StackMap::Kind kind,
87                                         bool needs_vreg_info) {
88   DCHECK(in_method_) << "Call BeginMethod first";
89   DCHECK(!in_stack_map_) << "Mismatched Begin/End calls";
90   in_stack_map_ = true;
91 
92   current_stack_map_ = BitTableBuilder<StackMap>::Entry();
93   current_stack_map_[StackMap::kKind] = static_cast<uint32_t>(kind);
94   current_stack_map_[StackMap::kPackedNativePc] =
95       StackMap::PackNativePc(native_pc_offset, instruction_set_);
96   current_stack_map_[StackMap::kDexPc] = dex_pc;
97   if (stack_maps_.size() > 0) {
98     // Check that non-catch stack maps are sorted by pc.
99     // Catch stack maps are at the end and may be unordered.
100     if (stack_maps_.back()[StackMap::kKind] == StackMap::Kind::Catch) {
101       DCHECK(current_stack_map_[StackMap::kKind] == StackMap::Kind::Catch);
102     } else if (current_stack_map_[StackMap::kKind] != StackMap::Kind::Catch) {
103       DCHECK_LE(stack_maps_.back()[StackMap::kPackedNativePc],
104                 current_stack_map_[StackMap::kPackedNativePc]);
105     }
106   }
107   if (register_mask != 0) {
108     uint32_t shift = LeastSignificantBit(register_mask);
109     BitTableBuilder<RegisterMask>::Entry entry;
110     entry[RegisterMask::kValue] = register_mask >> shift;
111     entry[RegisterMask::kShift] = shift;
112     current_stack_map_[StackMap::kRegisterMaskIndex] = register_masks_.Dedup(&entry);
113   }
114   // The compiler assumes the bit vector will be read during PrepareForFillIn(),
115   // and it might modify the data before that. Therefore, just store the pointer.
116   // See ClearSpillSlotsFromLoopPhisInStackMap in code_generator.h.
117   lazy_stack_masks_.push_back(stack_mask);
118   current_inline_infos_.clear();
119   current_dex_registers_.clear();
120   expected_num_dex_registers_ = needs_vreg_info  ? num_dex_registers_ : 0u;
121 
122   if (kVerifyStackMaps) {
123     size_t stack_map_index = stack_maps_.size();
124     // Create lambda method, which will be executed at the very end to verify data.
125     // Parameters and local variables will be captured(stored) by the lambda "[=]".
126     dchecks_.emplace_back([=](const CodeInfo& code_info) {
127       if (kind == StackMap::Kind::Default || kind == StackMap::Kind::OSR) {
128         StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset,
129                                                                     instruction_set_);
130         CHECK_EQ(stack_map.Row(), stack_map_index);
131       } else if (kind == StackMap::Kind::Catch) {
132         StackMap stack_map = code_info.GetCatchStackMapForDexPc(dex_pc);
133         CHECK_EQ(stack_map.Row(), stack_map_index);
134       }
135       StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
136       CHECK_EQ(stack_map.GetNativePcOffset(instruction_set_), native_pc_offset);
137       CHECK_EQ(stack_map.GetKind(), static_cast<uint32_t>(kind));
138       CHECK_EQ(stack_map.GetDexPc(), dex_pc);
139       CHECK_EQ(code_info.GetRegisterMaskOf(stack_map), register_mask);
140       BitMemoryRegion seen_stack_mask = code_info.GetStackMaskOf(stack_map);
141       CHECK_GE(seen_stack_mask.size_in_bits(), stack_mask ? stack_mask->GetNumberOfBits() : 0);
142       for (size_t b = 0; b < seen_stack_mask.size_in_bits(); b++) {
143         CHECK_EQ(seen_stack_mask.LoadBit(b), stack_mask != nullptr && stack_mask->IsBitSet(b));
144       }
145     });
146   }
147 }
148 
EndStackMapEntry()149 void StackMapStream::EndStackMapEntry() {
150   DCHECK(in_stack_map_) << "Mismatched Begin/End calls";
151   in_stack_map_ = false;
152 
153   // Generate index into the InlineInfo table.
154   size_t inlining_depth = current_inline_infos_.size();
155   if (!current_inline_infos_.empty()) {
156     current_inline_infos_.back()[InlineInfo::kIsLast] = InlineInfo::kLast;
157     current_stack_map_[StackMap::kInlineInfoIndex] =
158         inline_infos_.Dedup(current_inline_infos_.data(), current_inline_infos_.size());
159   }
160 
161   // Generate delta-compressed dex register map.
162   size_t num_dex_registers = current_dex_registers_.size();
163   if (!current_dex_registers_.empty()) {
164     DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
165     CreateDexRegisterMap();
166   }
167 
168   stack_maps_.Add(current_stack_map_);
169 
170   if (kVerifyStackMaps) {
171     size_t stack_map_index = stack_maps_.size() - 1;
172     dchecks_.emplace_back([=](const CodeInfo& code_info) {
173       StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
174       CHECK_EQ(stack_map.HasDexRegisterMap(), (num_dex_registers != 0));
175       CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0));
176       CHECK_EQ(code_info.GetInlineInfosOf(stack_map).size(), inlining_depth);
177     });
178   }
179 }
180 
BeginInlineInfoEntry(ArtMethod * method,uint32_t dex_pc,uint32_t num_dex_registers,const DexFile * outer_dex_file)181 void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
182                                           uint32_t dex_pc,
183                                           uint32_t num_dex_registers,
184                                           const DexFile* outer_dex_file) {
185   DCHECK(in_stack_map_) << "Call BeginStackMapEntry first";
186   DCHECK(!in_inline_info_) << "Mismatched Begin/End calls";
187   in_inline_info_ = true;
188   DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
189 
190   expected_num_dex_registers_ += num_dex_registers;
191 
192   BitTableBuilder<InlineInfo>::Entry entry;
193   entry[InlineInfo::kIsLast] = InlineInfo::kMore;
194   entry[InlineInfo::kDexPc] = dex_pc;
195   entry[InlineInfo::kNumberOfDexRegisters] = static_cast<uint32_t>(expected_num_dex_registers_);
196   if (EncodeArtMethodInInlineInfo(method)) {
197     entry[InlineInfo::kArtMethodHi] = High32Bits(reinterpret_cast<uintptr_t>(method));
198     entry[InlineInfo::kArtMethodLo] = Low32Bits(reinterpret_cast<uintptr_t>(method));
199   } else {
200     if (dex_pc != static_cast<uint32_t>(-1) && kIsDebugBuild) {
201       ScopedObjectAccess soa(Thread::Current());
202       DCHECK(IsSameDexFile(*outer_dex_file, *method->GetDexFile()));
203     }
204     uint32_t dex_method_index = method->GetDexMethodIndex();
205     entry[InlineInfo::kMethodInfoIndex] = method_infos_.Dedup({dex_method_index});
206   }
207   current_inline_infos_.push_back(entry);
208 
209   if (kVerifyStackMaps) {
210     size_t stack_map_index = stack_maps_.size();
211     size_t depth = current_inline_infos_.size() - 1;
212     dchecks_.emplace_back([=](const CodeInfo& code_info) {
213       StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
214       InlineInfo inline_info = code_info.GetInlineInfosOf(stack_map)[depth];
215       CHECK_EQ(inline_info.GetDexPc(), dex_pc);
216       bool encode_art_method = EncodeArtMethodInInlineInfo(method);
217       CHECK_EQ(inline_info.EncodesArtMethod(), encode_art_method);
218       if (encode_art_method) {
219         CHECK_EQ(inline_info.GetArtMethod(), method);
220       } else {
221         CHECK_EQ(code_info.GetMethodIndexOf(inline_info), method->GetDexMethodIndex());
222       }
223     });
224   }
225 }
226 
EndInlineInfoEntry()227 void StackMapStream::EndInlineInfoEntry() {
228   DCHECK(in_inline_info_) << "Mismatched Begin/End calls";
229   in_inline_info_ = false;
230   DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
231 }
232 
233 // Create delta-compressed dex register map based on the current list of DexRegisterLocations.
234 // All dex registers for a stack map are concatenated - inlined registers are just appended.
CreateDexRegisterMap()235 void StackMapStream::CreateDexRegisterMap() {
236   // These are fields rather than local variables so that we can reuse the reserved memory.
237   temp_dex_register_mask_.ClearAllBits();
238   temp_dex_register_map_.clear();
239 
240   // Ensure that the arrays that hold previous state are big enough to be safely indexed below.
241   if (previous_dex_registers_.size() < current_dex_registers_.size()) {
242     previous_dex_registers_.resize(current_dex_registers_.size(), DexRegisterLocation::None());
243     dex_register_timestamp_.resize(current_dex_registers_.size(), 0u);
244   }
245 
246   // Set bit in the mask for each register that has been changed since the previous stack map.
247   // Modified registers are stored in the catalogue and the catalogue index added to the list.
248   for (size_t i = 0; i < current_dex_registers_.size(); i++) {
249     DexRegisterLocation reg = current_dex_registers_[i];
250     // Distance is difference between this index and the index of last modification.
251     uint32_t distance = stack_maps_.size() - dex_register_timestamp_[i];
252     if (previous_dex_registers_[i] != reg || distance > kMaxDexRegisterMapSearchDistance) {
253       BitTableBuilder<DexRegisterInfo>::Entry entry;
254       entry[DexRegisterInfo::kKind] = static_cast<uint32_t>(reg.GetKind());
255       entry[DexRegisterInfo::kPackedValue] =
256           DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue());
257       uint32_t index = reg.IsLive() ? dex_register_catalog_.Dedup(&entry) : kNoValue;
258       temp_dex_register_mask_.SetBit(i);
259       temp_dex_register_map_.push_back({index});
260       previous_dex_registers_[i] = reg;
261       dex_register_timestamp_[i] = stack_maps_.size();
262     }
263   }
264 
265   // Set the mask and map for the current StackMap (which includes inlined registers).
266   if (temp_dex_register_mask_.GetNumberOfBits() != 0) {
267     current_stack_map_[StackMap::kDexRegisterMaskIndex] =
268         dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(),
269                                   temp_dex_register_mask_.GetNumberOfBits());
270   }
271   if (!current_dex_registers_.empty()) {
272     current_stack_map_[StackMap::kDexRegisterMapIndex] =
273         dex_register_maps_.Dedup(temp_dex_register_map_.data(),
274                                  temp_dex_register_map_.size());
275   }
276 
277   if (kVerifyStackMaps) {
278     size_t stack_map_index = stack_maps_.size();
279     // We need to make copy of the current registers for later (when the check is run).
280     auto expected_dex_registers = std::make_shared<dchecked_vector<DexRegisterLocation>>(
281         current_dex_registers_.begin(), current_dex_registers_.end());
282     dchecks_.emplace_back([=](const CodeInfo& code_info) {
283       StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
284       uint32_t expected_reg = 0;
285       for (DexRegisterLocation reg : code_info.GetDexRegisterMapOf(stack_map)) {
286         CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
287       }
288       for (InlineInfo inline_info : code_info.GetInlineInfosOf(stack_map)) {
289         DexRegisterMap map = code_info.GetInlineDexRegisterMapOf(stack_map, inline_info);
290         for (DexRegisterLocation reg : map) {
291           CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
292         }
293       }
294       CHECK_EQ(expected_reg, expected_dex_registers->size());
295     });
296   }
297 }
298 
Encode()299 ScopedArenaVector<uint8_t> StackMapStream::Encode() {
300   DCHECK(in_stack_map_ == false) << "Mismatched Begin/End calls";
301   DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls";
302 
303   uint32_t flags = (inline_infos_.size() > 0) ? CodeInfo::kHasInlineInfo : 0;
304   flags |= baseline_ ? CodeInfo::kIsBaseline : 0;
305   uint32_t bit_table_flags = 0;
306   ForEachBitTable([&bit_table_flags](size_t i, auto bit_table) {
307     if (bit_table->size() != 0) {  // Record which bit-tables are stored.
308       bit_table_flags |= 1 << i;
309     }
310   });
311 
312   ScopedArenaVector<uint8_t> buffer(allocator_->Adapter(kArenaAllocStackMapStream));
313   BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&buffer);
314   out.WriteInterleavedVarints(std::array<uint32_t, CodeInfo::kNumHeaders>{
315     flags,
316     packed_frame_size_,
317     core_spill_mask_,
318     fp_spill_mask_,
319     num_dex_registers_,
320     bit_table_flags,
321   });
322   ForEachBitTable([&out](size_t, auto bit_table) {
323     if (bit_table->size() != 0) {  // Skip empty bit-tables.
324       bit_table->Encode(out);
325     }
326   });
327 
328   // Verify that we can load the CodeInfo and check some essentials.
329   size_t number_of_read_bits;
330   CodeInfo code_info(buffer.data(), &number_of_read_bits);
331   CHECK_EQ(number_of_read_bits, out.NumberOfWrittenBits());
332   CHECK_EQ(code_info.GetNumberOfStackMaps(), stack_maps_.size());
333 
334   // Verify all written data (usually only in debug builds).
335   if (kVerifyStackMaps) {
336     for (const auto& dcheck : dchecks_) {
337       dcheck(code_info);
338     }
339   }
340 
341   return buffer;
342 }
343 
344 }  // namespace art
345