• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_STACK_MAP_H_
18 #define ART_RUNTIME_STACK_MAP_H_
19 
20 #include <limits>
21 
22 #include "arch/instruction_set.h"
23 #include "base/array_ref.h"
24 #include "base/bit_memory_region.h"
25 #include "base/bit_table.h"
26 #include "base/bit_utils.h"
27 #include "base/globals.h"
28 #include "base/logging.h"
29 #include "base/memory_region.h"
30 #include "dex/dex_file_types.h"
31 #include "dex_register_location.h"
32 #include "quick/quick_method_frame_info.h"
33 
34 namespace art {
35 
36 namespace linker {
37 class CodeInfoTableDeduper;
38 }  //  namespace linker
39 
40 class OatQuickMethodHeader;
41 class VariableIndentationOutputStream;
42 
43 // Size of a frame slot, in bytes.  This constant is a signed value,
44 // to please the compiler in arithmetic operations involving int32_t
45 // (signed) values.
46 static constexpr ssize_t kFrameSlotSize = 4;
47 
48 // The delta compression of dex register maps means we need to scan the stackmaps backwards.
49 // We compress the data in such a way so that there is an upper bound on the search distance.
50 // Max distance 0 means each stack map must be fully defined and no scanning back is allowed.
51 // If this value is changed, the oat file version should be incremented (for DCHECK to pass).
52 static constexpr size_t kMaxDexRegisterMapSearchDistance = 32;
53 
54 class ArtMethod;
55 class CodeInfo;
56 class Stats;
57 
58 std::ostream& operator<<(std::ostream& stream, const DexRegisterLocation& reg);
59 
60 // Information on Dex register locations for a specific PC.
61 // Effectively just a convenience wrapper for DexRegisterLocation vector.
62 // If the size is small enough, it keeps the data on the stack.
63 // TODO: Replace this with generic purpose "small-vector" implementation.
64 class DexRegisterMap {
65  public:
66   using iterator = DexRegisterLocation*;
67   using const_iterator = const DexRegisterLocation*;
68 
69   // Create map for given number of registers and initialize them to the given value.
DexRegisterMap(size_t count,DexRegisterLocation value)70   DexRegisterMap(size_t count, DexRegisterLocation value) : count_(count), regs_small_{} {
71     if (count_ <= kSmallCount) {
72       std::fill_n(regs_small_.begin(), count, value);
73     } else {
74       regs_large_.resize(count, value);
75     }
76   }
77 
data()78   DexRegisterLocation* data() {
79     return count_ <= kSmallCount ? regs_small_.data() : regs_large_.data();
80   }
data()81   const DexRegisterLocation* data() const {
82     return count_ <= kSmallCount ? regs_small_.data() : regs_large_.data();
83   }
84 
begin()85   iterator begin() { return data(); }
end()86   iterator end() { return data() + count_; }
begin()87   const_iterator begin() const { return data(); }
end()88   const_iterator end() const { return data() + count_; }
size()89   size_t size() const { return count_; }
empty()90   bool empty() const { return count_ == 0; }
91 
92   DexRegisterLocation& operator[](size_t index) {
93     DCHECK_LT(index, count_);
94     return data()[index];
95   }
96   const DexRegisterLocation& operator[](size_t index) const {
97     DCHECK_LT(index, count_);
98     return data()[index];
99   }
100 
GetNumberOfLiveDexRegisters()101   size_t GetNumberOfLiveDexRegisters() const {
102     return std::count_if(begin(), end(), [](auto& loc) { return loc.IsLive(); });
103   }
104 
HasAnyLiveDexRegisters()105   bool HasAnyLiveDexRegisters() const {
106     return std::any_of(begin(), end(), [](auto& loc) { return loc.IsLive(); });
107   }
108 
109   void Dump(VariableIndentationOutputStream* vios) const;
110 
111  private:
112   // Store the data inline if the number of registers is small to avoid memory allocations.
113   // If count_ <= kSmallCount, we use the regs_small_ array, and regs_large_ otherwise.
114   static constexpr size_t kSmallCount = 16;
115   size_t count_;
116   std::array<DexRegisterLocation, kSmallCount> regs_small_;
117   dchecked_vector<DexRegisterLocation> regs_large_;
118 };
119 
120 /**
121  * A Stack Map holds compilation information for a specific PC necessary for:
122  * - Mapping it to a dex PC,
123  * - Knowing which stack entries are objects,
124  * - Knowing which registers hold objects,
125  * - Knowing the inlining information,
126  * - Knowing the values of dex registers.
127  */
128 class StackMap : public BitTableAccessor<8> {
129  public:
130   enum Kind {
131     Default = -1,
132     Catch = 0,
133     OSR = 1,
134     Debug = 2,
135   };
136   BIT_TABLE_HEADER(StackMap)
137   BIT_TABLE_COLUMN(0, Kind)
138   BIT_TABLE_COLUMN(1, PackedNativePc)
139   BIT_TABLE_COLUMN(2, DexPc)
140   BIT_TABLE_COLUMN(3, RegisterMaskIndex)
141   BIT_TABLE_COLUMN(4, StackMaskIndex)
142   BIT_TABLE_COLUMN(5, InlineInfoIndex)
143   BIT_TABLE_COLUMN(6, DexRegisterMaskIndex)
144   BIT_TABLE_COLUMN(7, DexRegisterMapIndex)
145 
GetNativePcOffset(InstructionSet instruction_set)146   ALWAYS_INLINE uint32_t GetNativePcOffset(InstructionSet instruction_set) const {
147     return UnpackNativePc(GetPackedNativePc(), instruction_set);
148   }
149 
HasInlineInfo()150   ALWAYS_INLINE bool HasInlineInfo() const {
151     return HasInlineInfoIndex();
152   }
153 
HasDexRegisterMap()154   ALWAYS_INLINE bool HasDexRegisterMap() const {
155     return HasDexRegisterMapIndex();
156   }
157 
PackNativePc(uint32_t native_pc,InstructionSet isa)158   static uint32_t PackNativePc(uint32_t native_pc, InstructionSet isa) {
159     DCHECK_ALIGNED_PARAM(native_pc, GetInstructionSetInstructionAlignment(isa));
160     return native_pc / GetInstructionSetInstructionAlignment(isa);
161   }
162 
UnpackNativePc(uint32_t packed_native_pc,InstructionSet isa)163   static uint32_t UnpackNativePc(uint32_t packed_native_pc, InstructionSet isa) {
164     uint32_t native_pc = packed_native_pc * GetInstructionSetInstructionAlignment(isa);
165     DCHECK_EQ(native_pc / GetInstructionSetInstructionAlignment(isa), packed_native_pc);
166     return native_pc;
167   }
168 
169   void Dump(VariableIndentationOutputStream* vios,
170             const CodeInfo& code_info,
171             uint32_t code_offset,
172             InstructionSet instruction_set) const;
173 };
174 
175 /**
176  * Inline information for a specific PC.
177  * The row referenced from the StackMap holds information at depth 0.
178  * Following rows hold information for further depths.
179  */
180 class InlineInfo : public BitTableAccessor<6> {
181  public:
182   BIT_TABLE_HEADER(InlineInfo)
183   BIT_TABLE_COLUMN(0, IsLast)  // Determines if there are further rows for further depths.
184   BIT_TABLE_COLUMN(1, DexPc)
185   BIT_TABLE_COLUMN(2, MethodInfoIndex)
186   BIT_TABLE_COLUMN(3, ArtMethodHi)  // High bits of ArtMethod*.
187   BIT_TABLE_COLUMN(4, ArtMethodLo)  // Low bits of ArtMethod*.
188   BIT_TABLE_COLUMN(5, NumberOfDexRegisters)  // Includes outer levels and the main method.
189 
190   static constexpr uint32_t kLast = -1;
191   static constexpr uint32_t kMore = 0;
192 
EncodesArtMethod()193   bool EncodesArtMethod() const {
194     return HasArtMethodLo();
195   }
196 
GetArtMethod()197   ArtMethod* GetArtMethod() const {
198     uint64_t lo = GetArtMethodLo();
199     uint64_t hi = GetArtMethodHi();
200     return reinterpret_cast<ArtMethod*>((hi << 32) | lo);
201   }
202 
203   void Dump(VariableIndentationOutputStream* vios,
204             const CodeInfo& info,
205             const StackMap& stack_map) const;
206 };
207 
208 class StackMask : public BitTableAccessor<1> {
209  public:
210   BIT_TABLE_HEADER(StackMask)
211   BIT_TABLE_COLUMN(0, Mask)
212 };
213 
214 class DexRegisterMask : public BitTableAccessor<1> {
215  public:
216   BIT_TABLE_HEADER(DexRegisterMask)
217   BIT_TABLE_COLUMN(0, Mask)
218 };
219 
220 class DexRegisterMapInfo : public BitTableAccessor<1> {
221  public:
222   BIT_TABLE_HEADER(DexRegisterMapInfo)
223   BIT_TABLE_COLUMN(0, CatalogueIndex)
224 };
225 
226 class DexRegisterInfo : public BitTableAccessor<2> {
227  public:
BIT_TABLE_HEADER(DexRegisterInfo)228   BIT_TABLE_HEADER(DexRegisterInfo)
229   BIT_TABLE_COLUMN(0, Kind)
230   BIT_TABLE_COLUMN(1, PackedValue)
231 
232   ALWAYS_INLINE DexRegisterLocation GetLocation() const {
233     DexRegisterLocation::Kind kind = static_cast<DexRegisterLocation::Kind>(GetKind());
234     return DexRegisterLocation(kind, UnpackValue(kind, GetPackedValue()));
235   }
236 
PackValue(DexRegisterLocation::Kind kind,uint32_t value)237   static uint32_t PackValue(DexRegisterLocation::Kind kind, uint32_t value) {
238     uint32_t packed_value = value;
239     if (kind == DexRegisterLocation::Kind::kInStack) {
240       DCHECK(IsAligned<kFrameSlotSize>(packed_value));
241       packed_value /= kFrameSlotSize;
242     }
243     return packed_value;
244   }
245 
UnpackValue(DexRegisterLocation::Kind kind,uint32_t packed_value)246   static uint32_t UnpackValue(DexRegisterLocation::Kind kind, uint32_t packed_value) {
247     uint32_t value = packed_value;
248     if (kind == DexRegisterLocation::Kind::kInStack) {
249       value *= kFrameSlotSize;
250     }
251     return value;
252   }
253 };
254 
255 // Register masks tend to have many trailing zero bits (caller-saves are usually not encoded),
256 // therefore it is worth encoding the mask as value+shift.
257 class RegisterMask : public BitTableAccessor<2> {
258  public:
BIT_TABLE_HEADER(RegisterMask)259   BIT_TABLE_HEADER(RegisterMask)
260   BIT_TABLE_COLUMN(0, Value)
261   BIT_TABLE_COLUMN(1, Shift)
262 
263   ALWAYS_INLINE uint32_t GetMask() const {
264     return GetValue() << GetShift();
265   }
266 };
267 
268 // Method indices are not very dedup friendly.
269 // Separating them greatly improves dedup efficiency of the other tables.
270 class MethodInfo : public BitTableAccessor<3> {
271  public:
272   BIT_TABLE_HEADER(MethodInfo)
273   BIT_TABLE_COLUMN(0, MethodIndex)
274   BIT_TABLE_COLUMN(1, DexFileIndexKind)
275   BIT_TABLE_COLUMN(2, DexFileIndex)
276 
277   static constexpr uint32_t kKindNonBCP = -1;
278   static constexpr uint32_t kKindBCP = 0;
279 
280   static constexpr uint32_t kSameDexFile = -1;
281 };
282 
283 /**
284  * Wrapper around all compiler information collected for a method.
285  * See the Decode method at the end for the precise binary format.
286  */
287 class CodeInfo {
288  public:
CodeInfo()289   ALWAYS_INLINE CodeInfo() {}
290   ALWAYS_INLINE explicit CodeInfo(const uint8_t* data, size_t* num_read_bits = nullptr);
291   ALWAYS_INLINE explicit CodeInfo(const OatQuickMethodHeader* header);
292 
293   // The following methods decode only part of the data.
294   static CodeInfo DecodeGcMasksOnly(const OatQuickMethodHeader* header);
295   static CodeInfo DecodeInlineInfoOnly(const OatQuickMethodHeader* header);
296 
DecodeCodeSize(const uint8_t * code_info_data)297   ALWAYS_INLINE static uint32_t DecodeCodeSize(const uint8_t* code_info_data) {
298     return DecodeHeaderOnly(code_info_data).code_size_;
299   }
300 
DecodeFrameInfo(const uint8_t * code_info_data)301   ALWAYS_INLINE static QuickMethodFrameInfo DecodeFrameInfo(const uint8_t* code_info_data) {
302     CodeInfo code_info = DecodeHeaderOnly(code_info_data);
303     return QuickMethodFrameInfo(code_info.packed_frame_size_ * kStackAlignment,
304                                 code_info.core_spill_mask_,
305                                 code_info.fp_spill_mask_);
306   }
307 
DecodeHeaderOnly(const uint8_t * code_info_data)308   ALWAYS_INLINE static CodeInfo DecodeHeaderOnly(const uint8_t* code_info_data) {
309     CodeInfo code_info;
310     BitMemoryReader reader(code_info_data);
311     std::array<uint32_t, kNumHeaders> header = reader.ReadInterleavedVarints<kNumHeaders>();
312     ForEachHeaderField([&code_info, &header](size_t i, auto member_pointer) {
313       code_info.*member_pointer = header[i];
314     });
315     return code_info;
316   }
317 
GetStackMaps()318   ALWAYS_INLINE const BitTable<StackMap>& GetStackMaps() const {
319     return stack_maps_;
320   }
321 
GetStackMapAt(size_t index)322   ALWAYS_INLINE StackMap GetStackMapAt(size_t index) const {
323     return stack_maps_.GetRow(index);
324   }
325 
GetStackMask(size_t index)326   BitMemoryRegion GetStackMask(size_t index) const {
327     return stack_masks_.GetBitMemoryRegion(index);
328   }
329 
GetStackMaskOf(const StackMap & stack_map)330   BitMemoryRegion GetStackMaskOf(const StackMap& stack_map) const {
331     uint32_t index = stack_map.GetStackMaskIndex();
332     return (index == StackMap::kNoValue) ? BitMemoryRegion() : GetStackMask(index);
333   }
334 
GetRegisterMaskOf(const StackMap & stack_map)335   uint32_t GetRegisterMaskOf(const StackMap& stack_map) const {
336     uint32_t index = stack_map.GetRegisterMaskIndex();
337     return (index == StackMap::kNoValue) ? 0 : register_masks_.GetRow(index).GetMask();
338   }
339 
GetNumberOfLocationCatalogEntries()340   uint32_t GetNumberOfLocationCatalogEntries() const {
341     return dex_register_catalog_.NumRows();
342   }
343 
GetDexRegisterCatalogEntry(size_t index)344   ALWAYS_INLINE DexRegisterLocation GetDexRegisterCatalogEntry(size_t index) const {
345     return (index == StackMap::kNoValue)
346       ? DexRegisterLocation::None()
347       : dex_register_catalog_.GetRow(index).GetLocation();
348   }
349 
HasInlineInfo()350   bool HasInlineInfo() const {
351     return inline_infos_.NumRows() > 0;
352   }
353 
GetNumberOfStackMaps()354   uint32_t GetNumberOfStackMaps() const {
355     return stack_maps_.NumRows();
356   }
357 
GetMethodInfoOf(InlineInfo inline_info)358   MethodInfo GetMethodInfoOf(InlineInfo inline_info) const {
359     return method_infos_.GetRow(inline_info.GetMethodInfoIndex());
360   }
361 
GetMethodIndexOf(InlineInfo inline_info)362   uint32_t GetMethodIndexOf(InlineInfo inline_info) const {
363     return GetMethodInfoOf(inline_info).GetMethodIndex();
364   }
365 
366   // Returns the dex registers for `stack_map`, ignoring any inlined dex registers.
GetDexRegisterMapOf(StackMap stack_map)367   ALWAYS_INLINE DexRegisterMap GetDexRegisterMapOf(StackMap stack_map) const {
368     return GetDexRegisterMapOf(stack_map, /* first= */ 0, number_of_dex_registers_);
369   }
370 
371   // Returns the dex register map of `inline_info`, and just those registers.
GetInlineDexRegisterMapOf(StackMap stack_map,InlineInfo inline_info)372   ALWAYS_INLINE DexRegisterMap GetInlineDexRegisterMapOf(StackMap stack_map,
373                                                          InlineInfo inline_info) const {
374     if (stack_map.HasDexRegisterMap()) {
375       DCHECK(stack_map.HasInlineInfoIndex());
376       uint32_t depth = inline_info.Row() - stack_map.GetInlineInfoIndex();
377       // The register counts are commutative and include all outer levels.
378       // This allows us to determine the range [first, last) in just two lookups.
379       // If we are at depth 0 (the first inlinee), the count from the main method is used.
380       uint32_t first = (depth == 0)
381           ? number_of_dex_registers_
382           : inline_infos_.GetRow(inline_info.Row() - 1).GetNumberOfDexRegisters();
383       uint32_t last = inline_info.GetNumberOfDexRegisters();
384       return GetDexRegisterMapOf(stack_map, first, last);
385     }
386     return DexRegisterMap(0, DexRegisterLocation::None());
387   }
388 
389   // Returns the dex register map of `stack_map` in the range the range [first, last).
GetDexRegisterMapOf(StackMap stack_map,uint32_t first,uint32_t last)390   ALWAYS_INLINE DexRegisterMap GetDexRegisterMapOf(StackMap stack_map,
391                                                    uint32_t first,
392                                                    uint32_t last) const {
393     if (stack_map.HasDexRegisterMap()) {
394       DCHECK_LE(first, last);
395       DexRegisterMap map(last - first, DexRegisterLocation::Invalid());
396       DecodeDexRegisterMap(stack_map.Row(), first, &map);
397       return map;
398     }
399     return DexRegisterMap(0, DexRegisterLocation::None());
400   }
401 
GetInlineInfosOf(StackMap stack_map)402   BitTableRange<InlineInfo> GetInlineInfosOf(StackMap stack_map) const {
403     uint32_t index = stack_map.GetInlineInfoIndex();
404     if (index != StackMap::kNoValue) {
405       auto begin = inline_infos_.begin() + index;
406       auto end = begin;
407       while ((*end++).GetIsLast() == InlineInfo::kMore) { }
408       return BitTableRange<InlineInfo>(begin, end);
409     } else {
410       return BitTableRange<InlineInfo>();
411     }
412   }
413 
GetStackMapForDexPc(uint32_t dex_pc)414   StackMap GetStackMapForDexPc(uint32_t dex_pc) const {
415     for (StackMap stack_map : stack_maps_) {
416       if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() != StackMap::Kind::Debug) {
417         return stack_map;
418       }
419     }
420     return stack_maps_.GetInvalidRow();
421   }
422 
GetCatchStackMapForDexPc(ArrayRef<const uint32_t> dex_pcs)423   StackMap GetCatchStackMapForDexPc(ArrayRef<const uint32_t> dex_pcs) const {
424     // Searches the stack map list backwards because catch stack maps are stored at the end.
425     for (size_t i = GetNumberOfStackMaps(); i > 0; --i) {
426       StackMap stack_map = GetStackMapAt(i - 1);
427       if (UNLIKELY(stack_map.GetKind() != StackMap::Kind::Catch)) {
428         // Early break since we should have catch stack maps only at the end.
429         if (kIsDebugBuild) {
430           for (size_t j = i - 1; j > 0; --j) {
431             DCHECK(GetStackMapAt(j - 1).GetKind() != StackMap::Kind::Catch);
432           }
433         }
434         break;
435       }
436 
437       // Both the handler dex_pc and all of the inline dex_pcs have to match i.e. we want dex_pcs to
438       // be [stack_map_dex_pc, inline_dex_pc_1, ..., inline_dex_pc_n].
439       if (stack_map.GetDexPc() != dex_pcs.front()) {
440         continue;
441       }
442 
443       const BitTableRange<InlineInfo>& inline_infos = GetInlineInfosOf(stack_map);
444       if (inline_infos.size() == dex_pcs.size() - 1) {
445         bool matching_dex_pcs = true;
446         for (size_t inline_info_index = 0; inline_info_index < inline_infos.size();
447              ++inline_info_index) {
448           if (inline_infos[inline_info_index].GetDexPc() != dex_pcs[inline_info_index + 1]) {
449             matching_dex_pcs = false;
450             break;
451           }
452         }
453         if (matching_dex_pcs) {
454           return stack_map;
455         }
456       }
457     }
458     return stack_maps_.GetInvalidRow();
459   }
460 
GetOsrStackMapForDexPc(uint32_t dex_pc)461   StackMap GetOsrStackMapForDexPc(uint32_t dex_pc) const {
462     for (StackMap stack_map : stack_maps_) {
463       if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() == StackMap::Kind::OSR) {
464         return stack_map;
465       }
466     }
467     return stack_maps_.GetInvalidRow();
468   }
469 
470   StackMap GetStackMapForNativePcOffset(uintptr_t pc, InstructionSet isa = kRuntimeISA) const;
471 
472   // Dump this CodeInfo object on `vios`.
473   // `code_offset` is the (absolute) native PC of the compiled method.
474   void Dump(VariableIndentationOutputStream* vios,
475             uint32_t code_offset,
476             bool verbose,
477             InstructionSet instruction_set) const;
478 
479   // Accumulate code info size statistics into the given Stats tree.
480   static void CollectSizeStats(const uint8_t* code_info, /*out*/ Stats& parent);
481 
HasInlineInfo(const uint8_t * code_info_data)482   ALWAYS_INLINE static bool HasInlineInfo(const uint8_t* code_info_data) {
483     return (*code_info_data & kHasInlineInfo) != 0;
484   }
485 
IsBaseline(const uint8_t * code_info_data)486   ALWAYS_INLINE static bool IsBaseline(const uint8_t* code_info_data) {
487     return (*code_info_data & kIsBaseline) != 0;
488   }
489 
IsDebuggable(const uint8_t * code_info_data)490   ALWAYS_INLINE static bool IsDebuggable(const uint8_t* code_info_data) {
491     return (*code_info_data & kIsDebuggable) != 0;
492   }
493 
GetNumberOfDexRegisters()494   uint32_t GetNumberOfDexRegisters() {
495     return number_of_dex_registers_;
496   }
497 
498  private:
499   // Scan backward to determine dex register locations at given stack map.
500   void DecodeDexRegisterMap(uint32_t stack_map_index,
501                             uint32_t first_dex_register,
502                             /*out*/ DexRegisterMap* map) const;
503 
504   template<typename DecodeCallback>  // (size_t index, BitTable<...>*, BitMemoryRegion).
505   ALWAYS_INLINE CodeInfo(const uint8_t* data, size_t* num_read_bits, DecodeCallback callback);
506 
507   // Invokes the callback with index and member pointer of each header field.
508   template<typename Callback>
ForEachHeaderField(Callback callback)509   ALWAYS_INLINE static void ForEachHeaderField(Callback callback) {
510     size_t index = 0;
511     callback(index++, &CodeInfo::flags_);
512     callback(index++, &CodeInfo::code_size_);
513     callback(index++, &CodeInfo::packed_frame_size_);
514     callback(index++, &CodeInfo::core_spill_mask_);
515     callback(index++, &CodeInfo::fp_spill_mask_);
516     callback(index++, &CodeInfo::number_of_dex_registers_);
517     callback(index++, &CodeInfo::bit_table_flags_);
518     DCHECK_EQ(index, kNumHeaders);
519   }
520 
521   // Invokes the callback with index and member pointer of each BitTable field.
522   template<typename Callback>
ForEachBitTableField(Callback callback)523   ALWAYS_INLINE static void ForEachBitTableField(Callback callback) {
524     size_t index = 0;
525     callback(index++, &CodeInfo::stack_maps_);
526     callback(index++, &CodeInfo::register_masks_);
527     callback(index++, &CodeInfo::stack_masks_);
528     callback(index++, &CodeInfo::inline_infos_);
529     callback(index++, &CodeInfo::method_infos_);
530     callback(index++, &CodeInfo::dex_register_masks_);
531     callback(index++, &CodeInfo::dex_register_maps_);
532     callback(index++, &CodeInfo::dex_register_catalog_);
533     DCHECK_EQ(index, kNumBitTables);
534   }
535 
HasBitTable(size_t i)536   bool HasBitTable(size_t i) { return ((bit_table_flags_ >> i) & 1) != 0; }
IsBitTableDeduped(size_t i)537   bool IsBitTableDeduped(size_t i) { return ((bit_table_flags_ >> (kNumBitTables + i)) & 1) != 0; }
SetBitTableDeduped(size_t i)538   void SetBitTableDeduped(size_t i) { bit_table_flags_ |= 1 << (kNumBitTables + i); }
HasDedupedBitTables()539   bool HasDedupedBitTables() { return (bit_table_flags_ >> kNumBitTables) != 0u; }
540 
541   enum Flags {
542     kHasInlineInfo = 1 << 0,
543     kIsBaseline = 1 << 1,
544     kIsDebuggable = 1 << 2,
545   };
546 
547   // The CodeInfo starts with sequence of variable-length bit-encoded integers.
548   // (Please see kVarintMax for more details about encoding).
549   static constexpr size_t kNumHeaders = 7;
550   // Note that the space for flags is limited to three bits. We use a custom encoding where we
551   // encode the value inline if it is less than kVarintMax. We want to access flags without
552   // decoding the entire CodeInfo header so the value of flags cannot be more than kVarintMax.
553   // See IsDebuggable / IsBaseline / HasInlineInfo on how we access flags_ without decoding the
554   // header.
555   uint32_t flags_ = 0;
556   uint32_t code_size_ = 0;  // The size of native PC range in bytes.
557   uint32_t packed_frame_size_ = 0;  // Frame size in kStackAlignment units.
558   uint32_t core_spill_mask_ = 0;
559   uint32_t fp_spill_mask_ = 0;
560   uint32_t number_of_dex_registers_ = 0;
561   uint32_t bit_table_flags_ = 0;
562 
563   // The encoded bit-tables follow the header.  Based on the above flags field,
564   // bit-tables might be omitted or replaced by relative bit-offset if deduped.
565   static constexpr size_t kNumBitTables = 8;
566   BitTable<StackMap> stack_maps_;
567   BitTable<RegisterMask> register_masks_;
568   BitTable<StackMask> stack_masks_;
569   BitTable<InlineInfo> inline_infos_;
570   BitTable<MethodInfo> method_infos_;
571   BitTable<DexRegisterMask> dex_register_masks_;
572   BitTable<DexRegisterMapInfo> dex_register_maps_;
573   BitTable<DexRegisterInfo> dex_register_catalog_;
574 
575   friend class linker::CodeInfoTableDeduper;
576   friend class StackMapStream;
577 };
578 
579 #undef ELEMENT_BYTE_OFFSET_AFTER
580 #undef ELEMENT_BIT_OFFSET_AFTER
581 
582 }  // namespace art
583 
584 #endif  // ART_RUNTIME_STACK_MAP_H_
585