1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_STACK_MAP_H_ 18 #define ART_RUNTIME_STACK_MAP_H_ 19 20 #include <limits> 21 22 #include "arch/instruction_set.h" 23 #include "base/bit_memory_region.h" 24 #include "base/bit_table.h" 25 #include "base/bit_utils.h" 26 #include "base/memory_region.h" 27 #include "dex/dex_file_types.h" 28 #include "dex_register_location.h" 29 #include "quick/quick_method_frame_info.h" 30 31 namespace art { 32 33 namespace linker { 34 class CodeInfoTableDeduper; 35 } // namespace linker 36 37 class OatQuickMethodHeader; 38 class VariableIndentationOutputStream; 39 40 // Size of a frame slot, in bytes. This constant is a signed value, 41 // to please the compiler in arithmetic operations involving int32_t 42 // (signed) values. 43 static constexpr ssize_t kFrameSlotSize = 4; 44 45 // The delta compression of dex register maps means we need to scan the stackmaps backwards. 46 // We compress the data in such a way so that there is an upper bound on the search distance. 47 // Max distance 0 means each stack map must be fully defined and no scanning back is allowed. 48 // If this value is changed, the oat file version should be incremented (for DCHECK to pass). 49 static constexpr size_t kMaxDexRegisterMapSearchDistance = 32; 50 51 class ArtMethod; 52 class CodeInfo; 53 class Stats; 54 55 std::ostream& operator<<(std::ostream& stream, const DexRegisterLocation& reg); 56 57 // Information on Dex register locations for a specific PC. 58 // Effectively just a convenience wrapper for DexRegisterLocation vector. 59 // If the size is small enough, it keeps the data on the stack. 60 // TODO: Replace this with generic purpose "small-vector" implementation. 61 class DexRegisterMap { 62 public: 63 using iterator = DexRegisterLocation*; 64 using const_iterator = const DexRegisterLocation*; 65 66 // Create map for given number of registers and initialize them to the given value. DexRegisterMap(size_t count,DexRegisterLocation value)67 DexRegisterMap(size_t count, DexRegisterLocation value) : count_(count), regs_small_{} { 68 if (count_ <= kSmallCount) { 69 std::fill_n(regs_small_.begin(), count, value); 70 } else { 71 regs_large_.resize(count, value); 72 } 73 } 74 data()75 DexRegisterLocation* data() { 76 return count_ <= kSmallCount ? regs_small_.data() : regs_large_.data(); 77 } data()78 const DexRegisterLocation* data() const { 79 return count_ <= kSmallCount ? regs_small_.data() : regs_large_.data(); 80 } 81 begin()82 iterator begin() { return data(); } end()83 iterator end() { return data() + count_; } begin()84 const_iterator begin() const { return data(); } end()85 const_iterator end() const { return data() + count_; } size()86 size_t size() const { return count_; } empty()87 bool empty() const { return count_ == 0; } 88 89 DexRegisterLocation& operator[](size_t index) { 90 DCHECK_LT(index, count_); 91 return data()[index]; 92 } 93 const DexRegisterLocation& operator[](size_t index) const { 94 DCHECK_LT(index, count_); 95 return data()[index]; 96 } 97 GetNumberOfLiveDexRegisters()98 size_t GetNumberOfLiveDexRegisters() const { 99 return std::count_if(begin(), end(), [](auto& loc) { return loc.IsLive(); }); 100 } 101 HasAnyLiveDexRegisters()102 bool HasAnyLiveDexRegisters() const { 103 return std::any_of(begin(), end(), [](auto& loc) { return loc.IsLive(); }); 104 } 105 106 void Dump(VariableIndentationOutputStream* vios) const; 107 108 private: 109 // Store the data inline if the number of registers is small to avoid memory allocations. 110 // If count_ <= kSmallCount, we use the regs_small_ array, and regs_large_ otherwise. 111 static constexpr size_t kSmallCount = 16; 112 size_t count_; 113 std::array<DexRegisterLocation, kSmallCount> regs_small_; 114 dchecked_vector<DexRegisterLocation> regs_large_; 115 }; 116 117 /** 118 * A Stack Map holds compilation information for a specific PC necessary for: 119 * - Mapping it to a dex PC, 120 * - Knowing which stack entries are objects, 121 * - Knowing which registers hold objects, 122 * - Knowing the inlining information, 123 * - Knowing the values of dex registers. 124 */ 125 class StackMap : public BitTableAccessor<8> { 126 public: 127 enum Kind { 128 Default = -1, 129 Catch = 0, 130 OSR = 1, 131 Debug = 2, 132 }; 133 BIT_TABLE_HEADER(StackMap) 134 BIT_TABLE_COLUMN(0, Kind) 135 BIT_TABLE_COLUMN(1, PackedNativePc) 136 BIT_TABLE_COLUMN(2, DexPc) 137 BIT_TABLE_COLUMN(3, RegisterMaskIndex) 138 BIT_TABLE_COLUMN(4, StackMaskIndex) 139 BIT_TABLE_COLUMN(5, InlineInfoIndex) 140 BIT_TABLE_COLUMN(6, DexRegisterMaskIndex) 141 BIT_TABLE_COLUMN(7, DexRegisterMapIndex) 142 GetNativePcOffset(InstructionSet instruction_set)143 ALWAYS_INLINE uint32_t GetNativePcOffset(InstructionSet instruction_set) const { 144 return UnpackNativePc(GetPackedNativePc(), instruction_set); 145 } 146 HasInlineInfo()147 ALWAYS_INLINE bool HasInlineInfo() const { 148 return HasInlineInfoIndex(); 149 } 150 HasDexRegisterMap()151 ALWAYS_INLINE bool HasDexRegisterMap() const { 152 return HasDexRegisterMapIndex(); 153 } 154 PackNativePc(uint32_t native_pc,InstructionSet isa)155 static uint32_t PackNativePc(uint32_t native_pc, InstructionSet isa) { 156 DCHECK_ALIGNED_PARAM(native_pc, GetInstructionSetInstructionAlignment(isa)); 157 return native_pc / GetInstructionSetInstructionAlignment(isa); 158 } 159 UnpackNativePc(uint32_t packed_native_pc,InstructionSet isa)160 static uint32_t UnpackNativePc(uint32_t packed_native_pc, InstructionSet isa) { 161 uint32_t native_pc = packed_native_pc * GetInstructionSetInstructionAlignment(isa); 162 DCHECK_EQ(native_pc / GetInstructionSetInstructionAlignment(isa), packed_native_pc); 163 return native_pc; 164 } 165 166 void Dump(VariableIndentationOutputStream* vios, 167 const CodeInfo& code_info, 168 uint32_t code_offset, 169 InstructionSet instruction_set) const; 170 }; 171 172 /** 173 * Inline information for a specific PC. 174 * The row referenced from the StackMap holds information at depth 0. 175 * Following rows hold information for further depths. 176 */ 177 class InlineInfo : public BitTableAccessor<6> { 178 public: 179 BIT_TABLE_HEADER(InlineInfo) 180 BIT_TABLE_COLUMN(0, IsLast) // Determines if there are further rows for further depths. 181 BIT_TABLE_COLUMN(1, DexPc) 182 BIT_TABLE_COLUMN(2, MethodInfoIndex) 183 BIT_TABLE_COLUMN(3, ArtMethodHi) // High bits of ArtMethod*. 184 BIT_TABLE_COLUMN(4, ArtMethodLo) // Low bits of ArtMethod*. 185 BIT_TABLE_COLUMN(5, NumberOfDexRegisters) // Includes outer levels and the main method. 186 187 static constexpr uint32_t kLast = -1; 188 static constexpr uint32_t kMore = 0; 189 EncodesArtMethod()190 bool EncodesArtMethod() const { 191 return HasArtMethodLo(); 192 } 193 GetArtMethod()194 ArtMethod* GetArtMethod() const { 195 uint64_t lo = GetArtMethodLo(); 196 uint64_t hi = GetArtMethodHi(); 197 return reinterpret_cast<ArtMethod*>((hi << 32) | lo); 198 } 199 200 void Dump(VariableIndentationOutputStream* vios, 201 const CodeInfo& info, 202 const StackMap& stack_map) const; 203 }; 204 205 class StackMask : public BitTableAccessor<1> { 206 public: 207 BIT_TABLE_HEADER(StackMask) 208 BIT_TABLE_COLUMN(0, Mask) 209 }; 210 211 class DexRegisterMask : public BitTableAccessor<1> { 212 public: 213 BIT_TABLE_HEADER(DexRegisterMask) 214 BIT_TABLE_COLUMN(0, Mask) 215 }; 216 217 class DexRegisterMapInfo : public BitTableAccessor<1> { 218 public: 219 BIT_TABLE_HEADER(DexRegisterMapInfo) 220 BIT_TABLE_COLUMN(0, CatalogueIndex) 221 }; 222 223 class DexRegisterInfo : public BitTableAccessor<2> { 224 public: BIT_TABLE_HEADER(DexRegisterInfo)225 BIT_TABLE_HEADER(DexRegisterInfo) 226 BIT_TABLE_COLUMN(0, Kind) 227 BIT_TABLE_COLUMN(1, PackedValue) 228 229 ALWAYS_INLINE DexRegisterLocation GetLocation() const { 230 DexRegisterLocation::Kind kind = static_cast<DexRegisterLocation::Kind>(GetKind()); 231 return DexRegisterLocation(kind, UnpackValue(kind, GetPackedValue())); 232 } 233 PackValue(DexRegisterLocation::Kind kind,uint32_t value)234 static uint32_t PackValue(DexRegisterLocation::Kind kind, uint32_t value) { 235 uint32_t packed_value = value; 236 if (kind == DexRegisterLocation::Kind::kInStack) { 237 DCHECK(IsAligned<kFrameSlotSize>(packed_value)); 238 packed_value /= kFrameSlotSize; 239 } 240 return packed_value; 241 } 242 UnpackValue(DexRegisterLocation::Kind kind,uint32_t packed_value)243 static uint32_t UnpackValue(DexRegisterLocation::Kind kind, uint32_t packed_value) { 244 uint32_t value = packed_value; 245 if (kind == DexRegisterLocation::Kind::kInStack) { 246 value *= kFrameSlotSize; 247 } 248 return value; 249 } 250 }; 251 252 // Register masks tend to have many trailing zero bits (caller-saves are usually not encoded), 253 // therefore it is worth encoding the mask as value+shift. 254 class RegisterMask : public BitTableAccessor<2> { 255 public: BIT_TABLE_HEADER(RegisterMask)256 BIT_TABLE_HEADER(RegisterMask) 257 BIT_TABLE_COLUMN(0, Value) 258 BIT_TABLE_COLUMN(1, Shift) 259 260 ALWAYS_INLINE uint32_t GetMask() const { 261 return GetValue() << GetShift(); 262 } 263 }; 264 265 // Method indices are not very dedup friendly. 266 // Separating them greatly improves dedup efficiency of the other tables. 267 class MethodInfo : public BitTableAccessor<3> { 268 public: 269 BIT_TABLE_HEADER(MethodInfo) 270 BIT_TABLE_COLUMN(0, MethodIndex) 271 BIT_TABLE_COLUMN(1, DexFileIndexKind) 272 BIT_TABLE_COLUMN(2, DexFileIndex) 273 274 static constexpr uint32_t kKindNonBCP = -1; 275 static constexpr uint32_t kKindBCP = 0; 276 277 static constexpr uint32_t kSameDexFile = -1; 278 }; 279 280 /** 281 * Wrapper around all compiler information collected for a method. 282 * See the Decode method at the end for the precise binary format. 283 */ 284 class CodeInfo { 285 public: CodeInfo()286 ALWAYS_INLINE CodeInfo() {} 287 ALWAYS_INLINE explicit CodeInfo(const uint8_t* data, size_t* num_read_bits = nullptr); 288 ALWAYS_INLINE explicit CodeInfo(const OatQuickMethodHeader* header); 289 290 // The following methods decode only part of the data. 291 static CodeInfo DecodeGcMasksOnly(const OatQuickMethodHeader* header); 292 static CodeInfo DecodeInlineInfoOnly(const OatQuickMethodHeader* header); 293 DecodeCodeSize(const uint8_t * code_info_data)294 ALWAYS_INLINE static uint32_t DecodeCodeSize(const uint8_t* code_info_data) { 295 return DecodeHeaderOnly(code_info_data).code_size_; 296 } 297 DecodeFrameInfo(const uint8_t * code_info_data)298 ALWAYS_INLINE static QuickMethodFrameInfo DecodeFrameInfo(const uint8_t* code_info_data) { 299 CodeInfo code_info = DecodeHeaderOnly(code_info_data); 300 return QuickMethodFrameInfo(code_info.packed_frame_size_ * kStackAlignment, 301 code_info.core_spill_mask_, 302 code_info.fp_spill_mask_); 303 } 304 DecodeHeaderOnly(const uint8_t * code_info_data)305 ALWAYS_INLINE static CodeInfo DecodeHeaderOnly(const uint8_t* code_info_data) { 306 CodeInfo code_info; 307 BitMemoryReader reader(code_info_data); 308 std::array<uint32_t, kNumHeaders> header = reader.ReadInterleavedVarints<kNumHeaders>(); 309 ForEachHeaderField([&code_info, &header](size_t i, auto member_pointer) { 310 code_info.*member_pointer = header[i]; 311 }); 312 return code_info; 313 } 314 GetStackMaps()315 ALWAYS_INLINE const BitTable<StackMap>& GetStackMaps() const { 316 return stack_maps_; 317 } 318 GetStackMapAt(size_t index)319 ALWAYS_INLINE StackMap GetStackMapAt(size_t index) const { 320 return stack_maps_.GetRow(index); 321 } 322 GetStackMask(size_t index)323 BitMemoryRegion GetStackMask(size_t index) const { 324 return stack_masks_.GetBitMemoryRegion(index); 325 } 326 GetStackMaskOf(const StackMap & stack_map)327 BitMemoryRegion GetStackMaskOf(const StackMap& stack_map) const { 328 uint32_t index = stack_map.GetStackMaskIndex(); 329 return (index == StackMap::kNoValue) ? BitMemoryRegion() : GetStackMask(index); 330 } 331 GetRegisterMaskOf(const StackMap & stack_map)332 uint32_t GetRegisterMaskOf(const StackMap& stack_map) const { 333 uint32_t index = stack_map.GetRegisterMaskIndex(); 334 return (index == StackMap::kNoValue) ? 0 : register_masks_.GetRow(index).GetMask(); 335 } 336 GetNumberOfLocationCatalogEntries()337 uint32_t GetNumberOfLocationCatalogEntries() const { 338 return dex_register_catalog_.NumRows(); 339 } 340 GetDexRegisterCatalogEntry(size_t index)341 ALWAYS_INLINE DexRegisterLocation GetDexRegisterCatalogEntry(size_t index) const { 342 return (index == StackMap::kNoValue) 343 ? DexRegisterLocation::None() 344 : dex_register_catalog_.GetRow(index).GetLocation(); 345 } 346 HasInlineInfo()347 bool HasInlineInfo() const { 348 return inline_infos_.NumRows() > 0; 349 } 350 GetNumberOfStackMaps()351 uint32_t GetNumberOfStackMaps() const { 352 return stack_maps_.NumRows(); 353 } 354 GetMethodInfoOf(InlineInfo inline_info)355 MethodInfo GetMethodInfoOf(InlineInfo inline_info) const { 356 return method_infos_.GetRow(inline_info.GetMethodInfoIndex()); 357 } 358 GetMethodIndexOf(InlineInfo inline_info)359 uint32_t GetMethodIndexOf(InlineInfo inline_info) const { 360 return GetMethodInfoOf(inline_info).GetMethodIndex(); 361 } 362 GetDexRegisterMapOf(StackMap stack_map)363 ALWAYS_INLINE DexRegisterMap GetDexRegisterMapOf(StackMap stack_map) const { 364 if (stack_map.HasDexRegisterMap()) { 365 DexRegisterMap map(number_of_dex_registers_, DexRegisterLocation::Invalid()); 366 DecodeDexRegisterMap(stack_map.Row(), /* first_dex_register= */ 0, &map); 367 return map; 368 } 369 return DexRegisterMap(0, DexRegisterLocation::None()); 370 } 371 GetInlineDexRegisterMapOf(StackMap stack_map,InlineInfo inline_info)372 ALWAYS_INLINE DexRegisterMap GetInlineDexRegisterMapOf(StackMap stack_map, 373 InlineInfo inline_info) const { 374 if (stack_map.HasDexRegisterMap()) { 375 DCHECK(stack_map.HasInlineInfoIndex()); 376 uint32_t depth = inline_info.Row() - stack_map.GetInlineInfoIndex(); 377 // The register counts are commutative and include all outer levels. 378 // This allows us to determine the range [first, last) in just two lookups. 379 // If we are at depth 0 (the first inlinee), the count from the main method is used. 380 uint32_t first = (depth == 0) 381 ? number_of_dex_registers_ 382 : inline_infos_.GetRow(inline_info.Row() - 1).GetNumberOfDexRegisters(); 383 uint32_t last = inline_info.GetNumberOfDexRegisters(); 384 DexRegisterMap map(last - first, DexRegisterLocation::Invalid()); 385 DecodeDexRegisterMap(stack_map.Row(), first, &map); 386 return map; 387 } 388 return DexRegisterMap(0, DexRegisterLocation::None()); 389 } 390 GetInlineInfosOf(StackMap stack_map)391 BitTableRange<InlineInfo> GetInlineInfosOf(StackMap stack_map) const { 392 uint32_t index = stack_map.GetInlineInfoIndex(); 393 if (index != StackMap::kNoValue) { 394 auto begin = inline_infos_.begin() + index; 395 auto end = begin; 396 while ((*end++).GetIsLast() == InlineInfo::kMore) { } 397 return BitTableRange<InlineInfo>(begin, end); 398 } else { 399 return BitTableRange<InlineInfo>(); 400 } 401 } 402 GetStackMapForDexPc(uint32_t dex_pc)403 StackMap GetStackMapForDexPc(uint32_t dex_pc) const { 404 for (StackMap stack_map : stack_maps_) { 405 if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() != StackMap::Kind::Debug) { 406 return stack_map; 407 } 408 } 409 return stack_maps_.GetInvalidRow(); 410 } 411 412 // Searches the stack map list backwards because catch stack maps are stored at the end. GetCatchStackMapForDexPc(uint32_t dex_pc)413 StackMap GetCatchStackMapForDexPc(uint32_t dex_pc) const { 414 for (size_t i = GetNumberOfStackMaps(); i > 0; --i) { 415 StackMap stack_map = GetStackMapAt(i - 1); 416 if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() == StackMap::Kind::Catch) { 417 return stack_map; 418 } 419 } 420 return stack_maps_.GetInvalidRow(); 421 } 422 GetOsrStackMapForDexPc(uint32_t dex_pc)423 StackMap GetOsrStackMapForDexPc(uint32_t dex_pc) const { 424 for (StackMap stack_map : stack_maps_) { 425 if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() == StackMap::Kind::OSR) { 426 return stack_map; 427 } 428 } 429 return stack_maps_.GetInvalidRow(); 430 } 431 432 StackMap GetStackMapForNativePcOffset(uintptr_t pc, InstructionSet isa = kRuntimeISA) const; 433 434 // Dump this CodeInfo object on `vios`. 435 // `code_offset` is the (absolute) native PC of the compiled method. 436 void Dump(VariableIndentationOutputStream* vios, 437 uint32_t code_offset, 438 bool verbose, 439 InstructionSet instruction_set) const; 440 441 // Accumulate code info size statistics into the given Stats tree. 442 static void CollectSizeStats(const uint8_t* code_info, /*out*/ Stats& parent); 443 HasInlineInfo(const uint8_t * code_info_data)444 ALWAYS_INLINE static bool HasInlineInfo(const uint8_t* code_info_data) { 445 return (*code_info_data & kHasInlineInfo) != 0; 446 } 447 IsBaseline(const uint8_t * code_info_data)448 ALWAYS_INLINE static bool IsBaseline(const uint8_t* code_info_data) { 449 return (*code_info_data & kIsBaseline) != 0; 450 } 451 452 private: 453 // Scan backward to determine dex register locations at given stack map. 454 void DecodeDexRegisterMap(uint32_t stack_map_index, 455 uint32_t first_dex_register, 456 /*out*/ DexRegisterMap* map) const; 457 458 template<typename DecodeCallback> // (size_t index, BitTable<...>*, BitMemoryRegion). 459 ALWAYS_INLINE CodeInfo(const uint8_t* data, size_t* num_read_bits, DecodeCallback callback); 460 461 // Invokes the callback with index and member pointer of each header field. 462 template<typename Callback> ForEachHeaderField(Callback callback)463 ALWAYS_INLINE static void ForEachHeaderField(Callback callback) { 464 size_t index = 0; 465 callback(index++, &CodeInfo::flags_); 466 callback(index++, &CodeInfo::code_size_); 467 callback(index++, &CodeInfo::packed_frame_size_); 468 callback(index++, &CodeInfo::core_spill_mask_); 469 callback(index++, &CodeInfo::fp_spill_mask_); 470 callback(index++, &CodeInfo::number_of_dex_registers_); 471 callback(index++, &CodeInfo::bit_table_flags_); 472 DCHECK_EQ(index, kNumHeaders); 473 } 474 475 // Invokes the callback with index and member pointer of each BitTable field. 476 template<typename Callback> ForEachBitTableField(Callback callback)477 ALWAYS_INLINE static void ForEachBitTableField(Callback callback) { 478 size_t index = 0; 479 callback(index++, &CodeInfo::stack_maps_); 480 callback(index++, &CodeInfo::register_masks_); 481 callback(index++, &CodeInfo::stack_masks_); 482 callback(index++, &CodeInfo::inline_infos_); 483 callback(index++, &CodeInfo::method_infos_); 484 callback(index++, &CodeInfo::dex_register_masks_); 485 callback(index++, &CodeInfo::dex_register_maps_); 486 callback(index++, &CodeInfo::dex_register_catalog_); 487 DCHECK_EQ(index, kNumBitTables); 488 } 489 HasBitTable(size_t i)490 bool HasBitTable(size_t i) { return ((bit_table_flags_ >> i) & 1) != 0; } IsBitTableDeduped(size_t i)491 bool IsBitTableDeduped(size_t i) { return ((bit_table_flags_ >> (kNumBitTables + i)) & 1) != 0; } SetBitTableDeduped(size_t i)492 void SetBitTableDeduped(size_t i) { bit_table_flags_ |= 1 << (kNumBitTables + i); } HasDedupedBitTables()493 bool HasDedupedBitTables() { return (bit_table_flags_ >> kNumBitTables) != 0u; } 494 495 enum Flags { 496 kHasInlineInfo = 1 << 0, 497 kIsBaseline = 1 << 1, 498 }; 499 500 // The CodeInfo starts with sequence of variable-length bit-encoded integers. 501 static constexpr size_t kNumHeaders = 7; 502 uint32_t flags_ = 0; // Note that the space is limited to three bits. 503 uint32_t code_size_ = 0; // The size of native PC range in bytes. 504 uint32_t packed_frame_size_ = 0; // Frame size in kStackAlignment units. 505 uint32_t core_spill_mask_ = 0; 506 uint32_t fp_spill_mask_ = 0; 507 uint32_t number_of_dex_registers_ = 0; 508 uint32_t bit_table_flags_ = 0; 509 510 // The encoded bit-tables follow the header. Based on the above flags field, 511 // bit-tables might be omitted or replaced by relative bit-offset if deduped. 512 static constexpr size_t kNumBitTables = 8; 513 BitTable<StackMap> stack_maps_; 514 BitTable<RegisterMask> register_masks_; 515 BitTable<StackMask> stack_masks_; 516 BitTable<InlineInfo> inline_infos_; 517 BitTable<MethodInfo> method_infos_; 518 BitTable<DexRegisterMask> dex_register_masks_; 519 BitTable<DexRegisterMapInfo> dex_register_maps_; 520 BitTable<DexRegisterInfo> dex_register_catalog_; 521 522 friend class linker::CodeInfoTableDeduper; 523 friend class StackMapStream; 524 }; 525 526 #undef ELEMENT_BYTE_OFFSET_AFTER 527 #undef ELEMENT_BIT_OFFSET_AFTER 528 529 } // namespace art 530 531 #endif // ART_RUNTIME_STACK_MAP_H_ 532