1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_OAT_STACK_MAP_H_ 18 #define ART_RUNTIME_OAT_STACK_MAP_H_ 19 20 #include <limits> 21 22 #include "arch/instruction_set.h" 23 #include "base/array_ref.h" 24 #include "base/bit_memory_region.h" 25 #include "base/bit_table.h" 26 #include "base/bit_utils.h" 27 #include "base/globals.h" 28 #include "base/logging.h" 29 #include "base/macros.h" 30 #include "base/memory_region.h" 31 #include "dex/dex_file_types.h" 32 #include "dex_register_location.h" 33 #include "quick/quick_method_frame_info.h" 34 35 namespace art HIDDEN { 36 37 namespace linker { 38 class CodeInfoTableDeduper; 39 } // namespace linker 40 41 class OatQuickMethodHeader; 42 class VariableIndentationOutputStream; 43 44 // Size of a frame slot, in bytes. This constant is a signed value, 45 // to please the compiler in arithmetic operations involving int32_t 46 // (signed) values. 47 static constexpr ssize_t kFrameSlotSize = 4; 48 49 // The delta compression of dex register maps means we need to scan the stackmaps backwards. 50 // We compress the data in such a way so that there is an upper bound on the search distance. 51 // Max distance 0 means each stack map must be fully defined and no scanning back is allowed. 52 // If this value is changed, the oat file version should be incremented (for DCHECK to pass). 53 static constexpr size_t kMaxDexRegisterMapSearchDistance = 32; 54 55 class ArtMethod; 56 class CodeInfo; 57 class Stats; 58 59 std::ostream& operator<<(std::ostream& stream, const DexRegisterLocation& reg); 60 61 // Information on Dex register locations for a specific PC. 62 // Effectively just a convenience wrapper for DexRegisterLocation vector. 63 // If the size is small enough, it keeps the data on the stack. 64 // TODO: Replace this with generic purpose "small-vector" implementation. 65 class DexRegisterMap { 66 public: 67 using iterator = DexRegisterLocation*; 68 using const_iterator = const DexRegisterLocation*; 69 70 // Create map for given number of registers and initialize them to the given value. DexRegisterMap(size_t count,DexRegisterLocation value)71 DexRegisterMap(size_t count, DexRegisterLocation value) : count_(count), regs_small_{} { 72 if (count_ <= kSmallCount) { 73 std::fill_n(regs_small_.begin(), count, value); 74 } else { 75 regs_large_.resize(count, value); 76 } 77 } 78 data()79 DexRegisterLocation* data() { 80 return count_ <= kSmallCount ? regs_small_.data() : regs_large_.data(); 81 } data()82 const DexRegisterLocation* data() const { 83 return count_ <= kSmallCount ? regs_small_.data() : regs_large_.data(); 84 } 85 begin()86 iterator begin() { return data(); } end()87 iterator end() { return data() + count_; } begin()88 const_iterator begin() const { return data(); } end()89 const_iterator end() const { return data() + count_; } size()90 size_t size() const { return count_; } empty()91 bool empty() const { return count_ == 0; } 92 93 DexRegisterLocation& operator[](size_t index) { 94 DCHECK_LT(index, count_); 95 return data()[index]; 96 } 97 const DexRegisterLocation& operator[](size_t index) const { 98 DCHECK_LT(index, count_); 99 return data()[index]; 100 } 101 GetNumberOfLiveDexRegisters()102 size_t GetNumberOfLiveDexRegisters() const { 103 return std::count_if(begin(), end(), [](auto& loc) { return loc.IsLive(); }); 104 } 105 HasAnyLiveDexRegisters()106 bool HasAnyLiveDexRegisters() const { 107 return std::any_of(begin(), end(), [](auto& loc) { return loc.IsLive(); }); 108 } 109 110 void Dump(VariableIndentationOutputStream* vios) const; 111 112 private: 113 // Store the data inline if the number of registers is small to avoid memory allocations. 114 // If count_ <= kSmallCount, we use the regs_small_ array, and regs_large_ otherwise. 115 static constexpr size_t kSmallCount = 16; 116 size_t count_; 117 std::array<DexRegisterLocation, kSmallCount> regs_small_; 118 dchecked_vector<DexRegisterLocation> regs_large_; 119 }; 120 121 /** 122 * A Stack Map holds compilation information for a specific PC necessary for: 123 * - Mapping it to a dex PC, 124 * - Knowing which stack entries are objects, 125 * - Knowing which registers hold objects, 126 * - Knowing the inlining information, 127 * - Knowing the values of dex registers. 128 */ 129 class StackMap : public BitTableAccessor<8> { 130 public: 131 enum Kind { 132 Default = -1, 133 Catch = 0, 134 OSR = 1, 135 Debug = 2, 136 }; 137 BIT_TABLE_HEADER(StackMap) 138 BIT_TABLE_COLUMN(0, Kind) 139 BIT_TABLE_COLUMN(1, PackedNativePc) 140 BIT_TABLE_COLUMN(2, DexPc) 141 BIT_TABLE_COLUMN(3, RegisterMaskIndex) 142 BIT_TABLE_COLUMN(4, StackMaskIndex) 143 BIT_TABLE_COLUMN(5, InlineInfoIndex) 144 BIT_TABLE_COLUMN(6, DexRegisterMaskIndex) 145 BIT_TABLE_COLUMN(7, DexRegisterMapIndex) 146 GetNativePcOffset(InstructionSet instruction_set)147 ALWAYS_INLINE uint32_t GetNativePcOffset(InstructionSet instruction_set) const { 148 return UnpackNativePc(GetPackedNativePc(), instruction_set); 149 } 150 HasInlineInfo()151 ALWAYS_INLINE bool HasInlineInfo() const { 152 return HasInlineInfoIndex(); 153 } 154 HasDexRegisterMap()155 ALWAYS_INLINE bool HasDexRegisterMap() const { 156 return HasDexRegisterMapIndex(); 157 } 158 PackNativePc(uint32_t native_pc,InstructionSet isa)159 static uint32_t PackNativePc(uint32_t native_pc, InstructionSet isa) { 160 DCHECK_ALIGNED_PARAM(native_pc, GetInstructionSetInstructionAlignment(isa)); 161 return native_pc / GetInstructionSetInstructionAlignment(isa); 162 } 163 UnpackNativePc(uint32_t packed_native_pc,InstructionSet isa)164 static uint32_t UnpackNativePc(uint32_t packed_native_pc, InstructionSet isa) { 165 uint32_t native_pc = packed_native_pc * GetInstructionSetInstructionAlignment(isa); 166 DCHECK_EQ(native_pc / GetInstructionSetInstructionAlignment(isa), packed_native_pc); 167 return native_pc; 168 } 169 170 EXPORT void Dump(VariableIndentationOutputStream* vios, 171 const CodeInfo& code_info, 172 uint32_t code_offset, 173 InstructionSet instruction_set) const; 174 }; 175 176 /** 177 * Inline information for a specific PC. 178 * The row referenced from the StackMap holds information at depth 0. 179 * Following rows hold information for further depths. 180 */ 181 class InlineInfo : public BitTableAccessor<6> { 182 public: 183 BIT_TABLE_HEADER(InlineInfo) 184 BIT_TABLE_COLUMN(0, IsLast) // Determines if there are further rows for further depths. 185 BIT_TABLE_COLUMN(1, DexPc) 186 BIT_TABLE_COLUMN(2, MethodInfoIndex) 187 BIT_TABLE_COLUMN(3, ArtMethodHi) // High bits of ArtMethod*. 188 BIT_TABLE_COLUMN(4, ArtMethodLo) // Low bits of ArtMethod*. 189 BIT_TABLE_COLUMN(5, NumberOfDexRegisters) // Includes outer levels and the main method. 190 191 static constexpr uint32_t kLast = -1; 192 static constexpr uint32_t kMore = 0; 193 EncodesArtMethod()194 bool EncodesArtMethod() const { 195 return HasArtMethodLo(); 196 } 197 GetArtMethod()198 ArtMethod* GetArtMethod() const { 199 uint64_t lo = GetArtMethodLo(); 200 uint64_t hi = GetArtMethodHi(); 201 return reinterpret_cast<ArtMethod*>((hi << 32) | lo); 202 } 203 204 void Dump(VariableIndentationOutputStream* vios, 205 const CodeInfo& info, 206 const StackMap& stack_map) const; 207 }; 208 209 class StackMask : public BitTableAccessor<1> { 210 public: 211 BIT_TABLE_HEADER(StackMask) 212 BIT_TABLE_COLUMN(0, Mask) 213 }; 214 215 class DexRegisterMask : public BitTableAccessor<1> { 216 public: 217 BIT_TABLE_HEADER(DexRegisterMask) 218 BIT_TABLE_COLUMN(0, Mask) 219 }; 220 221 class DexRegisterMapInfo : public BitTableAccessor<1> { 222 public: 223 BIT_TABLE_HEADER(DexRegisterMapInfo) 224 BIT_TABLE_COLUMN(0, CatalogueIndex) 225 }; 226 227 class DexRegisterInfo : public BitTableAccessor<2> { 228 public: BIT_TABLE_HEADER(DexRegisterInfo)229 BIT_TABLE_HEADER(DexRegisterInfo) 230 BIT_TABLE_COLUMN(0, Kind) 231 BIT_TABLE_COLUMN(1, PackedValue) 232 233 ALWAYS_INLINE DexRegisterLocation GetLocation() const { 234 DexRegisterLocation::Kind kind = static_cast<DexRegisterLocation::Kind>(GetKind()); 235 return DexRegisterLocation(kind, UnpackValue(kind, GetPackedValue())); 236 } 237 PackValue(DexRegisterLocation::Kind kind,uint32_t value)238 static uint32_t PackValue(DexRegisterLocation::Kind kind, uint32_t value) { 239 uint32_t packed_value = value; 240 if (kind == DexRegisterLocation::Kind::kInStack) { 241 DCHECK(IsAligned<kFrameSlotSize>(packed_value)); 242 packed_value /= kFrameSlotSize; 243 } 244 return packed_value; 245 } 246 UnpackValue(DexRegisterLocation::Kind kind,uint32_t packed_value)247 static uint32_t UnpackValue(DexRegisterLocation::Kind kind, uint32_t packed_value) { 248 uint32_t value = packed_value; 249 if (kind == DexRegisterLocation::Kind::kInStack) { 250 value *= kFrameSlotSize; 251 } 252 return value; 253 } 254 }; 255 256 // Register masks tend to have many trailing zero bits (caller-saves are usually not encoded), 257 // therefore it is worth encoding the mask as value+shift. 258 class RegisterMask : public BitTableAccessor<2> { 259 public: BIT_TABLE_HEADER(RegisterMask)260 BIT_TABLE_HEADER(RegisterMask) 261 BIT_TABLE_COLUMN(0, Value) 262 BIT_TABLE_COLUMN(1, Shift) 263 264 ALWAYS_INLINE uint32_t GetMask() const { 265 return GetValue() << GetShift(); 266 } 267 }; 268 269 // Method indices are not very dedup friendly. 270 // Separating them greatly improves dedup efficiency of the other tables. 271 class MethodInfo : public BitTableAccessor<3> { 272 public: 273 BIT_TABLE_HEADER(MethodInfo) 274 BIT_TABLE_COLUMN(0, MethodIndex) 275 BIT_TABLE_COLUMN(1, DexFileIndexKind) 276 BIT_TABLE_COLUMN(2, DexFileIndex) 277 278 static constexpr uint32_t kKindNonBCP = -1; 279 static constexpr uint32_t kKindBCP = 0; 280 281 static constexpr uint32_t kSameDexFile = -1; 282 }; 283 284 /** 285 * Wrapper around all compiler information collected for a method. 286 * See the Decode method at the end for the precise binary format. 287 */ 288 class CodeInfo { 289 public: CodeInfo()290 ALWAYS_INLINE CodeInfo() {} 291 EXPORT ALWAYS_INLINE explicit CodeInfo(const uint8_t* data, size_t* num_read_bits = nullptr); 292 EXPORT ALWAYS_INLINE explicit CodeInfo(const OatQuickMethodHeader* header); 293 294 // The following methods decode only part of the data. 295 static CodeInfo DecodeGcMasksOnly(const OatQuickMethodHeader* header); 296 static CodeInfo DecodeInlineInfoOnly(const OatQuickMethodHeader* header); 297 DecodeCodeSize(const uint8_t * code_info_data)298 ALWAYS_INLINE static uint32_t DecodeCodeSize(const uint8_t* code_info_data) { 299 return DecodeHeaderOnly(code_info_data).code_size_; 300 } 301 DecodeFrameInfo(const uint8_t * code_info_data)302 ALWAYS_INLINE static QuickMethodFrameInfo DecodeFrameInfo(const uint8_t* code_info_data) { 303 CodeInfo code_info = DecodeHeaderOnly(code_info_data); 304 return QuickMethodFrameInfo(code_info.packed_frame_size_ * kStackAlignment, 305 code_info.core_spill_mask_, 306 code_info.fp_spill_mask_); 307 } 308 DecodeHeaderOnly(const uint8_t * code_info_data)309 ALWAYS_INLINE static CodeInfo DecodeHeaderOnly(const uint8_t* code_info_data) { 310 CodeInfo code_info; 311 BitMemoryReader reader(code_info_data); 312 std::array<uint32_t, kNumHeaders> header = reader.ReadInterleavedVarints<kNumHeaders>(); 313 ForEachHeaderField([&code_info, &header](size_t i, auto member_pointer) { 314 code_info.*member_pointer = header[i]; 315 }); 316 return code_info; 317 } 318 GetStackMaps()319 ALWAYS_INLINE const BitTable<StackMap>& GetStackMaps() const { 320 return stack_maps_; 321 } 322 GetStackMapAt(size_t index)323 ALWAYS_INLINE StackMap GetStackMapAt(size_t index) const { 324 return stack_maps_.GetRow(index); 325 } 326 GetStackMask(size_t index)327 BitMemoryRegion GetStackMask(size_t index) const { 328 return stack_masks_.GetBitMemoryRegion(index); 329 } 330 GetStackMaskOf(const StackMap & stack_map)331 BitMemoryRegion GetStackMaskOf(const StackMap& stack_map) const { 332 uint32_t index = stack_map.GetStackMaskIndex(); 333 return (index == StackMap::kNoValue) ? BitMemoryRegion() : GetStackMask(index); 334 } 335 GetRegisterMaskOf(const StackMap & stack_map)336 uint32_t GetRegisterMaskOf(const StackMap& stack_map) const { 337 uint32_t index = stack_map.GetRegisterMaskIndex(); 338 return (index == StackMap::kNoValue) ? 0 : register_masks_.GetRow(index).GetMask(); 339 } 340 GetNumberOfLocationCatalogEntries()341 uint32_t GetNumberOfLocationCatalogEntries() const { 342 return dex_register_catalog_.NumRows(); 343 } 344 GetDexRegisterCatalogEntry(size_t index)345 ALWAYS_INLINE DexRegisterLocation GetDexRegisterCatalogEntry(size_t index) const { 346 return (index == StackMap::kNoValue) 347 ? DexRegisterLocation::None() 348 : dex_register_catalog_.GetRow(index).GetLocation(); 349 } 350 HasInlineInfo()351 bool HasInlineInfo() const { 352 return inline_infos_.NumRows() > 0; 353 } 354 GetNumberOfStackMaps()355 uint32_t GetNumberOfStackMaps() const { 356 return stack_maps_.NumRows(); 357 } 358 GetMethodInfoOf(InlineInfo inline_info)359 MethodInfo GetMethodInfoOf(InlineInfo inline_info) const { 360 return method_infos_.GetRow(inline_info.GetMethodInfoIndex()); 361 } 362 GetMethodIndexOf(InlineInfo inline_info)363 uint32_t GetMethodIndexOf(InlineInfo inline_info) const { 364 return GetMethodInfoOf(inline_info).GetMethodIndex(); 365 } 366 367 // Returns the dex registers for `stack_map`, ignoring any inlined dex registers. GetDexRegisterMapOf(StackMap stack_map)368 ALWAYS_INLINE DexRegisterMap GetDexRegisterMapOf(StackMap stack_map) const { 369 return GetDexRegisterMapOf(stack_map, /* first= */ 0, number_of_dex_registers_); 370 } 371 372 // Returns the dex register map of `inline_info`, and just those registers. GetInlineDexRegisterMapOf(StackMap stack_map,InlineInfo inline_info)373 ALWAYS_INLINE DexRegisterMap GetInlineDexRegisterMapOf(StackMap stack_map, 374 InlineInfo inline_info) const { 375 if (stack_map.HasDexRegisterMap()) { 376 DCHECK(stack_map.HasInlineInfoIndex()); 377 uint32_t depth = inline_info.Row() - stack_map.GetInlineInfoIndex(); 378 // The register counts are commutative and include all outer levels. 379 // This allows us to determine the range [first, last) in just two lookups. 380 // If we are at depth 0 (the first inlinee), the count from the main method is used. 381 uint32_t first = (depth == 0) 382 ? number_of_dex_registers_ 383 : inline_infos_.GetRow(inline_info.Row() - 1).GetNumberOfDexRegisters(); 384 uint32_t last = inline_info.GetNumberOfDexRegisters(); 385 return GetDexRegisterMapOf(stack_map, first, last); 386 } 387 return DexRegisterMap(0, DexRegisterLocation::None()); 388 } 389 390 // Returns the dex register map of `stack_map` in the range the range [first, last). GetDexRegisterMapOf(StackMap stack_map,uint32_t first,uint32_t last)391 ALWAYS_INLINE DexRegisterMap GetDexRegisterMapOf(StackMap stack_map, 392 uint32_t first, 393 uint32_t last) const { 394 if (stack_map.HasDexRegisterMap()) { 395 DCHECK_LE(first, last); 396 DexRegisterMap map(last - first, DexRegisterLocation::Invalid()); 397 DecodeDexRegisterMap(stack_map.Row(), first, &map); 398 return map; 399 } 400 return DexRegisterMap(0, DexRegisterLocation::None()); 401 } 402 GetInlineInfosOf(StackMap stack_map)403 BitTableRange<InlineInfo> GetInlineInfosOf(StackMap stack_map) const { 404 uint32_t index = stack_map.GetInlineInfoIndex(); 405 if (index != StackMap::kNoValue) { 406 auto begin = inline_infos_.begin() + index; 407 auto end = begin; 408 while ((*end++).GetIsLast() == InlineInfo::kMore) { } 409 return BitTableRange<InlineInfo>(begin, end); 410 } else { 411 return BitTableRange<InlineInfo>(); 412 } 413 } 414 GetStackMapForDexPc(uint32_t dex_pc)415 StackMap GetStackMapForDexPc(uint32_t dex_pc) const { 416 for (StackMap stack_map : stack_maps_) { 417 if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() != StackMap::Kind::Debug) { 418 return stack_map; 419 } 420 } 421 return stack_maps_.GetInvalidRow(); 422 } 423 GetCatchStackMapForDexPc(ArrayRef<const uint32_t> dex_pcs)424 StackMap GetCatchStackMapForDexPc(ArrayRef<const uint32_t> dex_pcs) const { 425 // Searches the stack map list backwards because catch stack maps are stored at the end. 426 for (size_t i = GetNumberOfStackMaps(); i > 0; --i) { 427 StackMap stack_map = GetStackMapAt(i - 1); 428 if (UNLIKELY(stack_map.GetKind() != StackMap::Kind::Catch)) { 429 // Early break since we should have catch stack maps only at the end. 430 if (kIsDebugBuild) { 431 for (size_t j = i - 1; j > 0; --j) { 432 DCHECK(GetStackMapAt(j - 1).GetKind() != StackMap::Kind::Catch); 433 } 434 } 435 break; 436 } 437 438 // Both the handler dex_pc and all of the inline dex_pcs have to match i.e. we want dex_pcs to 439 // be [stack_map_dex_pc, inline_dex_pc_1, ..., inline_dex_pc_n]. 440 if (stack_map.GetDexPc() != dex_pcs.front()) { 441 continue; 442 } 443 444 const BitTableRange<InlineInfo>& inline_infos = GetInlineInfosOf(stack_map); 445 if (inline_infos.size() == dex_pcs.size() - 1) { 446 bool matching_dex_pcs = true; 447 for (size_t inline_info_index = 0; inline_info_index < inline_infos.size(); 448 ++inline_info_index) { 449 if (inline_infos[inline_info_index].GetDexPc() != dex_pcs[inline_info_index + 1]) { 450 matching_dex_pcs = false; 451 break; 452 } 453 } 454 if (matching_dex_pcs) { 455 return stack_map; 456 } 457 } 458 } 459 return stack_maps_.GetInvalidRow(); 460 } 461 GetOsrStackMapForDexPc(uint32_t dex_pc)462 StackMap GetOsrStackMapForDexPc(uint32_t dex_pc) const { 463 for (StackMap stack_map : stack_maps_) { 464 if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() == StackMap::Kind::OSR) { 465 return stack_map; 466 } 467 } 468 return stack_maps_.GetInvalidRow(); 469 } 470 471 EXPORT StackMap GetStackMapForNativePcOffset(uintptr_t pc, 472 InstructionSet isa = kRuntimeISA) const; 473 474 // Dump this CodeInfo object on `vios`. 475 // `code_offset` is the (absolute) native PC of the compiled method. 476 EXPORT void Dump(VariableIndentationOutputStream* vios, 477 uint32_t code_offset, 478 bool verbose, 479 InstructionSet instruction_set) const; 480 481 // Accumulate code info size statistics into the given Stats tree. 482 EXPORT static void CollectSizeStats(const uint8_t* code_info, /*out*/ Stats& parent); 483 484 template <uint32_t kFlag> HasFlag(const uint8_t * code_info_data)485 ALWAYS_INLINE static bool HasFlag(const uint8_t* code_info_data) { 486 // Fast path - read just the one specific bit from the header. 487 bool result; 488 uint8_t varint = (*code_info_data) & MaxInt<uint8_t>(kVarintBits); 489 if (LIKELY(varint <= kVarintMax)) { 490 result = (varint & kFlag) != 0; 491 } else { 492 DCHECK_EQ(varint, kVarintMax + 1); // Only up to 8 flags are supported for now. 493 constexpr uint32_t bit_offset = kNumHeaders * kVarintBits + WhichPowerOf2(kFlag); 494 result = (code_info_data[bit_offset / kBitsPerByte] & (1 << bit_offset % kBitsPerByte)) != 0; 495 } 496 // Slow path - dcheck that we got the correct result against the naive implementation. 497 BitMemoryReader reader(code_info_data); 498 DCHECK_EQ(result, (reader.ReadInterleavedVarints<kNumHeaders>()[0] & kFlag) != 0); 499 return result; 500 } 501 HasInlineInfo(const uint8_t * code_info_data)502 ALWAYS_INLINE static bool HasInlineInfo(const uint8_t* code_info_data) { 503 return HasFlag<kHasInlineInfo>(code_info_data); 504 } 505 HasShouldDeoptimizeFlag(const uint8_t * code_info_data)506 ALWAYS_INLINE static bool HasShouldDeoptimizeFlag(const uint8_t* code_info_data) { 507 return HasFlag<kHasShouldDeoptimizeFlag>(code_info_data); 508 } 509 IsBaseline(const uint8_t * code_info_data)510 ALWAYS_INLINE static bool IsBaseline(const uint8_t* code_info_data) { 511 return HasFlag<kIsBaseline>(code_info_data); 512 } 513 IsDebuggable(const uint8_t * code_info_data)514 ALWAYS_INLINE static bool IsDebuggable(const uint8_t* code_info_data) { 515 return HasFlag<kIsDebuggable>(code_info_data); 516 } 517 GetNumberOfDexRegisters()518 uint32_t GetNumberOfDexRegisters() { 519 return number_of_dex_registers_; 520 } 521 522 private: 523 // Scan backward to determine dex register locations at given stack map. 524 EXPORT void DecodeDexRegisterMap(uint32_t stack_map_index, 525 uint32_t first_dex_register, 526 /*out*/ DexRegisterMap* map) const; 527 528 template<typename DecodeCallback> // (size_t index, BitTable<...>*, BitMemoryRegion). 529 ALWAYS_INLINE CodeInfo(const uint8_t* data, size_t* num_read_bits, DecodeCallback callback); 530 531 // Invokes the callback with index and member pointer of each header field. 532 template<typename Callback> ForEachHeaderField(Callback callback)533 ALWAYS_INLINE static void ForEachHeaderField(Callback callback) { 534 size_t index = 0; 535 callback(index++, &CodeInfo::flags_); 536 callback(index++, &CodeInfo::code_size_); 537 callback(index++, &CodeInfo::packed_frame_size_); 538 callback(index++, &CodeInfo::core_spill_mask_); 539 callback(index++, &CodeInfo::fp_spill_mask_); 540 callback(index++, &CodeInfo::number_of_dex_registers_); 541 callback(index++, &CodeInfo::bit_table_flags_); 542 DCHECK_EQ(index, kNumHeaders); 543 } 544 545 // Invokes the callback with index and member pointer of each BitTable field. 546 template<typename Callback> ForEachBitTableField(Callback callback)547 ALWAYS_INLINE static void ForEachBitTableField(Callback callback) { 548 size_t index = 0; 549 callback(index++, &CodeInfo::stack_maps_); 550 callback(index++, &CodeInfo::register_masks_); 551 callback(index++, &CodeInfo::stack_masks_); 552 callback(index++, &CodeInfo::inline_infos_); 553 callback(index++, &CodeInfo::method_infos_); 554 callback(index++, &CodeInfo::dex_register_masks_); 555 callback(index++, &CodeInfo::dex_register_maps_); 556 callback(index++, &CodeInfo::dex_register_catalog_); 557 DCHECK_EQ(index, kNumBitTables); 558 } 559 HasBitTable(size_t i)560 bool HasBitTable(size_t i) { return ((bit_table_flags_ >> i) & 1) != 0; } IsBitTableDeduped(size_t i)561 bool IsBitTableDeduped(size_t i) { return ((bit_table_flags_ >> (kNumBitTables + i)) & 1) != 0; } SetBitTableDeduped(size_t i)562 void SetBitTableDeduped(size_t i) { bit_table_flags_ |= 1 << (kNumBitTables + i); } HasDedupedBitTables()563 bool HasDedupedBitTables() { return (bit_table_flags_ >> kNumBitTables) != 0u; } 564 565 // NB: The first three flags should be the most common ones. 566 // Maximum of 8 flags is supported right now (see the HasFlag method). 567 enum Flags { 568 kHasInlineInfo = 1 << 0, 569 kHasShouldDeoptimizeFlag = 1 << 1, 570 kIsBaseline = 1 << 2, 571 kIsDebuggable = 1 << 3, 572 }; 573 574 // The CodeInfo starts with sequence of variable-length bit-encoded integers. 575 // (Please see kVarintMax for more details about encoding). 576 static constexpr size_t kNumHeaders = 7; 577 uint32_t flags_ = 0; 578 uint32_t code_size_ = 0; // The size of native PC range in bytes. 579 uint32_t packed_frame_size_ = 0; // Frame size in kStackAlignment units. 580 uint32_t core_spill_mask_ = 0; 581 uint32_t fp_spill_mask_ = 0; 582 uint32_t number_of_dex_registers_ = 0; 583 uint32_t bit_table_flags_ = 0; 584 585 // The encoded bit-tables follow the header. Based on the above flags field, 586 // bit-tables might be omitted or replaced by relative bit-offset if deduped. 587 static constexpr size_t kNumBitTables = 8; 588 BitTable<StackMap> stack_maps_; 589 BitTable<RegisterMask> register_masks_; 590 BitTable<StackMask> stack_masks_; 591 BitTable<InlineInfo> inline_infos_; 592 BitTable<MethodInfo> method_infos_; 593 BitTable<DexRegisterMask> dex_register_masks_; 594 BitTable<DexRegisterMapInfo> dex_register_maps_; 595 BitTable<DexRegisterInfo> dex_register_catalog_; 596 597 friend class linker::CodeInfoTableDeduper; 598 friend class StackMapStream; 599 }; 600 601 #undef ELEMENT_BYTE_OFFSET_AFTER 602 #undef ELEMENT_BIT_OFFSET_AFTER 603 604 } // namespace art 605 606 #endif // ART_RUNTIME_OAT_STACK_MAP_H_ 607