1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_STACK_MAP_H_ 18 #define ART_RUNTIME_STACK_MAP_H_ 19 20 #include <limits> 21 22 #include "arch/instruction_set.h" 23 #include "base/bit_memory_region.h" 24 #include "base/bit_table.h" 25 #include "base/bit_utils.h" 26 #include "base/bit_vector.h" 27 #include "base/leb128.h" 28 #include "base/memory_region.h" 29 #include "dex/dex_file_types.h" 30 #include "dex_register_location.h" 31 #include "quick/quick_method_frame_info.h" 32 33 namespace art { 34 35 class OatQuickMethodHeader; 36 class VariableIndentationOutputStream; 37 38 // Size of a frame slot, in bytes. This constant is a signed value, 39 // to please the compiler in arithmetic operations involving int32_t 40 // (signed) values. 41 static constexpr ssize_t kFrameSlotSize = 4; 42 43 // The delta compression of dex register maps means we need to scan the stackmaps backwards. 44 // We compress the data in such a way so that there is an upper bound on the search distance. 45 // Max distance 0 means each stack map must be fully defined and no scanning back is allowed. 46 // If this value is changed, the oat file version should be incremented (for DCHECK to pass). 47 static constexpr size_t kMaxDexRegisterMapSearchDistance = 32; 48 49 class ArtMethod; 50 class CodeInfo; 51 class Stats; 52 53 std::ostream& operator<<(std::ostream& stream, const DexRegisterLocation& reg); 54 55 // Information on Dex register locations for a specific PC. 56 // Effectively just a convenience wrapper for DexRegisterLocation vector. 57 // If the size is small enough, it keeps the data on the stack. 58 // TODO: Replace this with generic purpose "small-vector" implementation. 59 class DexRegisterMap { 60 public: 61 using iterator = DexRegisterLocation*; 62 using const_iterator = const DexRegisterLocation*; 63 64 // Create map for given number of registers and initialize them to the given value. DexRegisterMap(size_t count,DexRegisterLocation value)65 DexRegisterMap(size_t count, DexRegisterLocation value) : count_(count), regs_small_{} { 66 if (count_ <= kSmallCount) { 67 std::fill_n(regs_small_.begin(), count, value); 68 } else { 69 regs_large_.resize(count, value); 70 } 71 } 72 data()73 DexRegisterLocation* data() { 74 return count_ <= kSmallCount ? regs_small_.data() : regs_large_.data(); 75 } data()76 const DexRegisterLocation* data() const { 77 return count_ <= kSmallCount ? regs_small_.data() : regs_large_.data(); 78 } 79 begin()80 iterator begin() { return data(); } end()81 iterator end() { return data() + count_; } begin()82 const_iterator begin() const { return data(); } end()83 const_iterator end() const { return data() + count_; } size()84 size_t size() const { return count_; } empty()85 bool empty() const { return count_ == 0; } 86 87 DexRegisterLocation& operator[](size_t index) { 88 DCHECK_LT(index, count_); 89 return data()[index]; 90 } 91 const DexRegisterLocation& operator[](size_t index) const { 92 DCHECK_LT(index, count_); 93 return data()[index]; 94 } 95 GetNumberOfLiveDexRegisters()96 size_t GetNumberOfLiveDexRegisters() const { 97 return std::count_if(begin(), end(), [](auto& loc) { return loc.IsLive(); }); 98 } 99 HasAnyLiveDexRegisters()100 bool HasAnyLiveDexRegisters() const { 101 return std::any_of(begin(), end(), [](auto& loc) { return loc.IsLive(); }); 102 } 103 104 void Dump(VariableIndentationOutputStream* vios) const; 105 106 private: 107 // Store the data inline if the number of registers is small to avoid memory allocations. 108 // If count_ <= kSmallCount, we use the regs_small_ array, and regs_large_ otherwise. 109 static constexpr size_t kSmallCount = 16; 110 size_t count_; 111 std::array<DexRegisterLocation, kSmallCount> regs_small_; 112 dchecked_vector<DexRegisterLocation> regs_large_; 113 }; 114 115 /** 116 * A Stack Map holds compilation information for a specific PC necessary for: 117 * - Mapping it to a dex PC, 118 * - Knowing which stack entries are objects, 119 * - Knowing which registers hold objects, 120 * - Knowing the inlining information, 121 * - Knowing the values of dex registers. 122 */ 123 class StackMap : public BitTableAccessor<8> { 124 public: 125 enum Kind { 126 Default = -1, 127 Catch = 0, 128 OSR = 1, 129 Debug = 2, 130 }; 131 BIT_TABLE_HEADER(StackMap) 132 BIT_TABLE_COLUMN(0, Kind) 133 BIT_TABLE_COLUMN(1, PackedNativePc) 134 BIT_TABLE_COLUMN(2, DexPc) 135 BIT_TABLE_COLUMN(3, RegisterMaskIndex) 136 BIT_TABLE_COLUMN(4, StackMaskIndex) 137 BIT_TABLE_COLUMN(5, InlineInfoIndex) 138 BIT_TABLE_COLUMN(6, DexRegisterMaskIndex) 139 BIT_TABLE_COLUMN(7, DexRegisterMapIndex) 140 GetNativePcOffset(InstructionSet instruction_set)141 ALWAYS_INLINE uint32_t GetNativePcOffset(InstructionSet instruction_set) const { 142 return UnpackNativePc(GetPackedNativePc(), instruction_set); 143 } 144 HasInlineInfo()145 ALWAYS_INLINE bool HasInlineInfo() const { 146 return HasInlineInfoIndex(); 147 } 148 HasDexRegisterMap()149 ALWAYS_INLINE bool HasDexRegisterMap() const { 150 return HasDexRegisterMapIndex(); 151 } 152 PackNativePc(uint32_t native_pc,InstructionSet isa)153 static uint32_t PackNativePc(uint32_t native_pc, InstructionSet isa) { 154 DCHECK_ALIGNED_PARAM(native_pc, GetInstructionSetInstructionAlignment(isa)); 155 return native_pc / GetInstructionSetInstructionAlignment(isa); 156 } 157 UnpackNativePc(uint32_t packed_native_pc,InstructionSet isa)158 static uint32_t UnpackNativePc(uint32_t packed_native_pc, InstructionSet isa) { 159 uint32_t native_pc = packed_native_pc * GetInstructionSetInstructionAlignment(isa); 160 DCHECK_EQ(native_pc / GetInstructionSetInstructionAlignment(isa), packed_native_pc); 161 return native_pc; 162 } 163 164 void Dump(VariableIndentationOutputStream* vios, 165 const CodeInfo& code_info, 166 uint32_t code_offset, 167 InstructionSet instruction_set) const; 168 }; 169 170 /** 171 * Inline information for a specific PC. 172 * The row referenced from the StackMap holds information at depth 0. 173 * Following rows hold information for further depths. 174 */ 175 class InlineInfo : public BitTableAccessor<6> { 176 public: 177 BIT_TABLE_HEADER(InlineInfo) 178 BIT_TABLE_COLUMN(0, IsLast) // Determines if there are further rows for further depths. 179 BIT_TABLE_COLUMN(1, DexPc) 180 BIT_TABLE_COLUMN(2, MethodInfoIndex) 181 BIT_TABLE_COLUMN(3, ArtMethodHi) // High bits of ArtMethod*. 182 BIT_TABLE_COLUMN(4, ArtMethodLo) // Low bits of ArtMethod*. 183 BIT_TABLE_COLUMN(5, NumberOfDexRegisters) // Includes outer levels and the main method. 184 185 static constexpr uint32_t kLast = -1; 186 static constexpr uint32_t kMore = 0; 187 EncodesArtMethod()188 bool EncodesArtMethod() const { 189 return HasArtMethodLo(); 190 } 191 GetArtMethod()192 ArtMethod* GetArtMethod() const { 193 uint64_t lo = GetArtMethodLo(); 194 uint64_t hi = GetArtMethodHi(); 195 return reinterpret_cast<ArtMethod*>((hi << 32) | lo); 196 } 197 198 void Dump(VariableIndentationOutputStream* vios, 199 const CodeInfo& info, 200 const StackMap& stack_map) const; 201 }; 202 203 class StackMask : public BitTableAccessor<1> { 204 public: 205 BIT_TABLE_HEADER(StackMask) 206 BIT_TABLE_COLUMN(0, Mask) 207 }; 208 209 class DexRegisterMask : public BitTableAccessor<1> { 210 public: 211 BIT_TABLE_HEADER(DexRegisterMask) 212 BIT_TABLE_COLUMN(0, Mask) 213 }; 214 215 class DexRegisterMapInfo : public BitTableAccessor<1> { 216 public: 217 BIT_TABLE_HEADER(DexRegisterMapInfo) 218 BIT_TABLE_COLUMN(0, CatalogueIndex) 219 }; 220 221 class DexRegisterInfo : public BitTableAccessor<2> { 222 public: BIT_TABLE_HEADER(DexRegisterInfo)223 BIT_TABLE_HEADER(DexRegisterInfo) 224 BIT_TABLE_COLUMN(0, Kind) 225 BIT_TABLE_COLUMN(1, PackedValue) 226 227 ALWAYS_INLINE DexRegisterLocation GetLocation() const { 228 DexRegisterLocation::Kind kind = static_cast<DexRegisterLocation::Kind>(GetKind()); 229 return DexRegisterLocation(kind, UnpackValue(kind, GetPackedValue())); 230 } 231 PackValue(DexRegisterLocation::Kind kind,uint32_t value)232 static uint32_t PackValue(DexRegisterLocation::Kind kind, uint32_t value) { 233 uint32_t packed_value = value; 234 if (kind == DexRegisterLocation::Kind::kInStack) { 235 DCHECK(IsAligned<kFrameSlotSize>(packed_value)); 236 packed_value /= kFrameSlotSize; 237 } 238 return packed_value; 239 } 240 UnpackValue(DexRegisterLocation::Kind kind,uint32_t packed_value)241 static uint32_t UnpackValue(DexRegisterLocation::Kind kind, uint32_t packed_value) { 242 uint32_t value = packed_value; 243 if (kind == DexRegisterLocation::Kind::kInStack) { 244 value *= kFrameSlotSize; 245 } 246 return value; 247 } 248 }; 249 250 // Register masks tend to have many trailing zero bits (caller-saves are usually not encoded), 251 // therefore it is worth encoding the mask as value+shift. 252 class RegisterMask : public BitTableAccessor<2> { 253 public: BIT_TABLE_HEADER(RegisterMask)254 BIT_TABLE_HEADER(RegisterMask) 255 BIT_TABLE_COLUMN(0, Value) 256 BIT_TABLE_COLUMN(1, Shift) 257 258 ALWAYS_INLINE uint32_t GetMask() const { 259 return GetValue() << GetShift(); 260 } 261 }; 262 263 // Method indices are not very dedup friendly. 264 // Separating them greatly improves dedup efficiency of the other tables. 265 class MethodInfo : public BitTableAccessor<1> { 266 public: 267 BIT_TABLE_HEADER(MethodInfo) 268 BIT_TABLE_COLUMN(0, MethodIndex) 269 }; 270 271 /** 272 * Wrapper around all compiler information collected for a method. 273 * See the Decode method at the end for the precise binary format. 274 */ 275 class CodeInfo { 276 public: 277 class Deduper { 278 public: Deduper(std::vector<uint8_t> * output)279 explicit Deduper(std::vector<uint8_t>* output) : writer_(output) { 280 DCHECK_EQ(output->size(), 0u); 281 } 282 283 // Copy CodeInfo into output while de-duplicating the internal bit tables. 284 // It returns the byte offset of the copied CodeInfo within the output. 285 size_t Dedupe(const uint8_t* code_info); 286 287 private: 288 BitMemoryWriter<std::vector<uint8_t>> writer_; 289 290 // Deduplicate at BitTable level. The value is bit offset within the output. 291 std::map<BitMemoryRegion, uint32_t, BitMemoryRegion::Less> dedupe_map_; 292 }; 293 CodeInfo()294 ALWAYS_INLINE CodeInfo() {} 295 ALWAYS_INLINE explicit CodeInfo(const uint8_t* data, size_t* num_read_bits = nullptr); 296 ALWAYS_INLINE explicit CodeInfo(const OatQuickMethodHeader* header); 297 298 // The following methods decode only part of the data. 299 static CodeInfo DecodeGcMasksOnly(const OatQuickMethodHeader* header); 300 static CodeInfo DecodeInlineInfoOnly(const OatQuickMethodHeader* header); 301 DecodeCodeSize(const uint8_t * code_info_data)302 ALWAYS_INLINE static uint32_t DecodeCodeSize(const uint8_t* code_info_data) { 303 return DecodeHeaderOnly(code_info_data).code_size_; 304 } 305 DecodeFrameInfo(const uint8_t * code_info_data)306 ALWAYS_INLINE static QuickMethodFrameInfo DecodeFrameInfo(const uint8_t* code_info_data) { 307 CodeInfo code_info = DecodeHeaderOnly(code_info_data); 308 return QuickMethodFrameInfo(code_info.packed_frame_size_ * kStackAlignment, 309 code_info.core_spill_mask_, 310 code_info.fp_spill_mask_); 311 } 312 DecodeHeaderOnly(const uint8_t * code_info_data)313 ALWAYS_INLINE static CodeInfo DecodeHeaderOnly(const uint8_t* code_info_data) { 314 CodeInfo code_info; 315 BitMemoryReader reader(code_info_data); 316 std::array<uint32_t, kNumHeaders> header = reader.ReadInterleavedVarints<kNumHeaders>(); 317 ForEachHeaderField([&code_info, &header](size_t i, auto member_pointer) { 318 code_info.*member_pointer = header[i]; 319 }); 320 return code_info; 321 } 322 GetStackMaps()323 ALWAYS_INLINE const BitTable<StackMap>& GetStackMaps() const { 324 return stack_maps_; 325 } 326 GetStackMapAt(size_t index)327 ALWAYS_INLINE StackMap GetStackMapAt(size_t index) const { 328 return stack_maps_.GetRow(index); 329 } 330 GetStackMask(size_t index)331 BitMemoryRegion GetStackMask(size_t index) const { 332 return stack_masks_.GetBitMemoryRegion(index); 333 } 334 GetStackMaskOf(const StackMap & stack_map)335 BitMemoryRegion GetStackMaskOf(const StackMap& stack_map) const { 336 uint32_t index = stack_map.GetStackMaskIndex(); 337 return (index == StackMap::kNoValue) ? BitMemoryRegion() : GetStackMask(index); 338 } 339 GetRegisterMaskOf(const StackMap & stack_map)340 uint32_t GetRegisterMaskOf(const StackMap& stack_map) const { 341 uint32_t index = stack_map.GetRegisterMaskIndex(); 342 return (index == StackMap::kNoValue) ? 0 : register_masks_.GetRow(index).GetMask(); 343 } 344 GetNumberOfLocationCatalogEntries()345 uint32_t GetNumberOfLocationCatalogEntries() const { 346 return dex_register_catalog_.NumRows(); 347 } 348 GetDexRegisterCatalogEntry(size_t index)349 ALWAYS_INLINE DexRegisterLocation GetDexRegisterCatalogEntry(size_t index) const { 350 return (index == StackMap::kNoValue) 351 ? DexRegisterLocation::None() 352 : dex_register_catalog_.GetRow(index).GetLocation(); 353 } 354 HasInlineInfo()355 bool HasInlineInfo() const { 356 return inline_infos_.NumRows() > 0; 357 } 358 GetNumberOfStackMaps()359 uint32_t GetNumberOfStackMaps() const { 360 return stack_maps_.NumRows(); 361 } 362 GetMethodIndexOf(InlineInfo inline_info)363 uint32_t GetMethodIndexOf(InlineInfo inline_info) const { 364 return method_infos_.GetRow(inline_info.GetMethodInfoIndex()).GetMethodIndex(); 365 } 366 GetDexRegisterMapOf(StackMap stack_map)367 ALWAYS_INLINE DexRegisterMap GetDexRegisterMapOf(StackMap stack_map) const { 368 if (stack_map.HasDexRegisterMap()) { 369 DexRegisterMap map(number_of_dex_registers_, DexRegisterLocation::Invalid()); 370 DecodeDexRegisterMap(stack_map.Row(), /* first_dex_register= */ 0, &map); 371 return map; 372 } 373 return DexRegisterMap(0, DexRegisterLocation::None()); 374 } 375 GetInlineDexRegisterMapOf(StackMap stack_map,InlineInfo inline_info)376 ALWAYS_INLINE DexRegisterMap GetInlineDexRegisterMapOf(StackMap stack_map, 377 InlineInfo inline_info) const { 378 if (stack_map.HasDexRegisterMap()) { 379 DCHECK(stack_map.HasInlineInfoIndex()); 380 uint32_t depth = inline_info.Row() - stack_map.GetInlineInfoIndex(); 381 // The register counts are commutative and include all outer levels. 382 // This allows us to determine the range [first, last) in just two lookups. 383 // If we are at depth 0 (the first inlinee), the count from the main method is used. 384 uint32_t first = (depth == 0) 385 ? number_of_dex_registers_ 386 : inline_infos_.GetRow(inline_info.Row() - 1).GetNumberOfDexRegisters(); 387 uint32_t last = inline_info.GetNumberOfDexRegisters(); 388 DexRegisterMap map(last - first, DexRegisterLocation::Invalid()); 389 DecodeDexRegisterMap(stack_map.Row(), first, &map); 390 return map; 391 } 392 return DexRegisterMap(0, DexRegisterLocation::None()); 393 } 394 GetInlineInfosOf(StackMap stack_map)395 BitTableRange<InlineInfo> GetInlineInfosOf(StackMap stack_map) const { 396 uint32_t index = stack_map.GetInlineInfoIndex(); 397 if (index != StackMap::kNoValue) { 398 auto begin = inline_infos_.begin() + index; 399 auto end = begin; 400 while ((*end++).GetIsLast() == InlineInfo::kMore) { } 401 return BitTableRange<InlineInfo>(begin, end); 402 } else { 403 return BitTableRange<InlineInfo>(); 404 } 405 } 406 GetStackMapForDexPc(uint32_t dex_pc)407 StackMap GetStackMapForDexPc(uint32_t dex_pc) const { 408 for (StackMap stack_map : stack_maps_) { 409 if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() != StackMap::Kind::Debug) { 410 return stack_map; 411 } 412 } 413 return stack_maps_.GetInvalidRow(); 414 } 415 416 // Searches the stack map list backwards because catch stack maps are stored at the end. GetCatchStackMapForDexPc(uint32_t dex_pc)417 StackMap GetCatchStackMapForDexPc(uint32_t dex_pc) const { 418 for (size_t i = GetNumberOfStackMaps(); i > 0; --i) { 419 StackMap stack_map = GetStackMapAt(i - 1); 420 if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() == StackMap::Kind::Catch) { 421 return stack_map; 422 } 423 } 424 return stack_maps_.GetInvalidRow(); 425 } 426 GetOsrStackMapForDexPc(uint32_t dex_pc)427 StackMap GetOsrStackMapForDexPc(uint32_t dex_pc) const { 428 for (StackMap stack_map : stack_maps_) { 429 if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() == StackMap::Kind::OSR) { 430 return stack_map; 431 } 432 } 433 return stack_maps_.GetInvalidRow(); 434 } 435 436 StackMap GetStackMapForNativePcOffset(uintptr_t pc, InstructionSet isa = kRuntimeISA) const; 437 438 // Dump this CodeInfo object on `vios`. 439 // `code_offset` is the (absolute) native PC of the compiled method. 440 void Dump(VariableIndentationOutputStream* vios, 441 uint32_t code_offset, 442 bool verbose, 443 InstructionSet instruction_set) const; 444 445 // Accumulate code info size statistics into the given Stats tree. 446 static void CollectSizeStats(const uint8_t* code_info, /*out*/ Stats& parent); 447 HasInlineInfo(const uint8_t * code_info_data)448 ALWAYS_INLINE static bool HasInlineInfo(const uint8_t* code_info_data) { 449 return (*code_info_data & kHasInlineInfo) != 0; 450 } 451 IsBaseline(const uint8_t * code_info_data)452 ALWAYS_INLINE static bool IsBaseline(const uint8_t* code_info_data) { 453 return (*code_info_data & kIsBaseline) != 0; 454 } 455 456 private: 457 // Scan backward to determine dex register locations at given stack map. 458 void DecodeDexRegisterMap(uint32_t stack_map_index, 459 uint32_t first_dex_register, 460 /*out*/ DexRegisterMap* map) const; 461 462 template<typename DecodeCallback> // (size_t index, BitTable<...>*, BitMemoryRegion). 463 ALWAYS_INLINE CodeInfo(const uint8_t* data, size_t* num_read_bits, DecodeCallback callback); 464 465 // Invokes the callback with index and member pointer of each header field. 466 template<typename Callback> ForEachHeaderField(Callback callback)467 ALWAYS_INLINE static void ForEachHeaderField(Callback callback) { 468 size_t index = 0; 469 callback(index++, &CodeInfo::flags_); 470 callback(index++, &CodeInfo::code_size_); 471 callback(index++, &CodeInfo::packed_frame_size_); 472 callback(index++, &CodeInfo::core_spill_mask_); 473 callback(index++, &CodeInfo::fp_spill_mask_); 474 callback(index++, &CodeInfo::number_of_dex_registers_); 475 callback(index++, &CodeInfo::bit_table_flags_); 476 DCHECK_EQ(index, kNumHeaders); 477 } 478 479 // Invokes the callback with index and member pointer of each BitTable field. 480 template<typename Callback> ForEachBitTableField(Callback callback)481 ALWAYS_INLINE static void ForEachBitTableField(Callback callback) { 482 size_t index = 0; 483 callback(index++, &CodeInfo::stack_maps_); 484 callback(index++, &CodeInfo::register_masks_); 485 callback(index++, &CodeInfo::stack_masks_); 486 callback(index++, &CodeInfo::inline_infos_); 487 callback(index++, &CodeInfo::method_infos_); 488 callback(index++, &CodeInfo::dex_register_masks_); 489 callback(index++, &CodeInfo::dex_register_maps_); 490 callback(index++, &CodeInfo::dex_register_catalog_); 491 DCHECK_EQ(index, kNumBitTables); 492 } 493 HasBitTable(size_t i)494 bool HasBitTable(size_t i) { return ((bit_table_flags_ >> i) & 1) != 0; } IsBitTableDeduped(size_t i)495 bool IsBitTableDeduped(size_t i) { return ((bit_table_flags_ >> (kNumBitTables + i)) & 1) != 0; } SetBitTableDeduped(size_t i)496 void SetBitTableDeduped(size_t i) { bit_table_flags_ |= 1 << (kNumBitTables + i); } 497 498 enum Flags { 499 kHasInlineInfo = 1 << 0, 500 kIsBaseline = 1 << 1, 501 }; 502 503 // The CodeInfo starts with sequence of variable-length bit-encoded integers. 504 static constexpr size_t kNumHeaders = 7; 505 uint32_t flags_ = 0; // Note that the space is limited to three bits. 506 uint32_t code_size_ = 0; // The size of native PC range in bytes. 507 uint32_t packed_frame_size_ = 0; // Frame size in kStackAlignment units. 508 uint32_t core_spill_mask_ = 0; 509 uint32_t fp_spill_mask_ = 0; 510 uint32_t number_of_dex_registers_ = 0; 511 uint32_t bit_table_flags_ = 0; 512 513 // The encoded bit-tables follow the header. Based on the above flags field, 514 // bit-tables might be omitted or replaced by relative bit-offset if deduped. 515 static constexpr size_t kNumBitTables = 8; 516 BitTable<StackMap> stack_maps_; 517 BitTable<RegisterMask> register_masks_; 518 BitTable<StackMask> stack_masks_; 519 BitTable<InlineInfo> inline_infos_; 520 BitTable<MethodInfo> method_infos_; 521 BitTable<DexRegisterMask> dex_register_masks_; 522 BitTable<DexRegisterMapInfo> dex_register_maps_; 523 BitTable<DexRegisterInfo> dex_register_catalog_; 524 525 friend class StackMapStream; 526 }; 527 528 #undef ELEMENT_BYTE_OFFSET_AFTER 529 #undef ELEMENT_BIT_OFFSET_AFTER 530 531 } // namespace art 532 533 #endif // ART_RUNTIME_STACK_MAP_H_ 534