1 // Copyright 2014 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_REGISTER_ALLOCATOR_H_ 6 #define V8_REGISTER_ALLOCATOR_H_ 7 8 #include "src/compiler/instruction.h" 9 #include "src/ostreams.h" 10 #include "src/register-configuration.h" 11 #include "src/zone-containers.h" 12 13 namespace v8 { 14 namespace internal { 15 namespace compiler { 16 17 enum RegisterKind { GENERAL_REGISTERS, FP_REGISTERS }; 18 19 // This class represents a single point of a InstructionOperand's lifetime. For 20 // each instruction there are four lifetime positions: 21 // 22 // [[START, END], [START, END]] 23 // 24 // Where the first half position corresponds to 25 // 26 // [GapPosition::START, GapPosition::END] 27 // 28 // and the second half position corresponds to 29 // 30 // [Lifetime::USED_AT_START, Lifetime::USED_AT_END] 31 // 32 class LifetimePosition final { 33 public: 34 // Return the lifetime position that corresponds to the beginning of 35 // the gap with the given index. GapFromInstructionIndex(int index)36 static LifetimePosition GapFromInstructionIndex(int index) { 37 return LifetimePosition(index * kStep); 38 } 39 // Return the lifetime position that corresponds to the beginning of 40 // the instruction with the given index. InstructionFromInstructionIndex(int index)41 static LifetimePosition InstructionFromInstructionIndex(int index) { 42 return LifetimePosition(index * kStep + kHalfStep); 43 } 44 ExistsGapPositionBetween(LifetimePosition pos1,LifetimePosition pos2)45 static bool ExistsGapPositionBetween(LifetimePosition pos1, 46 LifetimePosition pos2) { 47 if (pos1 > pos2) std::swap(pos1, pos2); 48 LifetimePosition next(pos1.value_ + 1); 49 if (next.IsGapPosition()) return next < pos2; 50 return next.NextFullStart() < pos2; 51 } 52 53 // Returns a numeric representation of this lifetime position. value()54 int value() const { return value_; } 55 56 // Returns the index of the instruction to which this lifetime position 57 // corresponds. ToInstructionIndex()58 int ToInstructionIndex() const { 59 DCHECK(IsValid()); 60 return value_ / kStep; 61 } 62 63 // Returns true if this lifetime position corresponds to a START value IsStart()64 bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; } 65 // Returns true if this lifetime position corresponds to an END value IsEnd()66 bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; } 67 // Returns true if this lifetime position corresponds to a gap START value IsFullStart()68 bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; } 69 IsGapPosition()70 bool IsGapPosition() const { return (value_ & 0x2) == 0; } IsInstructionPosition()71 bool IsInstructionPosition() const { return !IsGapPosition(); } 72 73 // Returns the lifetime position for the current START. Start()74 LifetimePosition Start() const { 75 DCHECK(IsValid()); 76 return LifetimePosition(value_ & ~(kHalfStep - 1)); 77 } 78 79 // Returns the lifetime position for the current gap START. FullStart()80 LifetimePosition FullStart() const { 81 DCHECK(IsValid()); 82 return LifetimePosition(value_ & ~(kStep - 1)); 83 } 84 85 // Returns the lifetime position for the current END. End()86 LifetimePosition End() const { 87 DCHECK(IsValid()); 88 return LifetimePosition(Start().value_ + kHalfStep / 2); 89 } 90 91 // Returns the lifetime position for the beginning of the next START. NextStart()92 LifetimePosition NextStart() const { 93 DCHECK(IsValid()); 94 return LifetimePosition(Start().value_ + kHalfStep); 95 } 96 97 // Returns the lifetime position for the beginning of the next gap START. NextFullStart()98 LifetimePosition NextFullStart() const { 99 DCHECK(IsValid()); 100 return LifetimePosition(FullStart().value_ + kStep); 101 } 102 103 // Returns the lifetime position for the beginning of the previous START. PrevStart()104 LifetimePosition PrevStart() const { 105 DCHECK(IsValid()); 106 DCHECK(value_ >= kHalfStep); 107 return LifetimePosition(Start().value_ - kHalfStep); 108 } 109 110 // Constructs the lifetime position which does not correspond to any 111 // instruction. LifetimePosition()112 LifetimePosition() : value_(-1) {} 113 114 // Returns true if this lifetime positions corrensponds to some 115 // instruction. IsValid()116 bool IsValid() const { return value_ != -1; } 117 118 bool operator<(const LifetimePosition& that) const { 119 return this->value_ < that.value_; 120 } 121 122 bool operator<=(const LifetimePosition& that) const { 123 return this->value_ <= that.value_; 124 } 125 126 bool operator==(const LifetimePosition& that) const { 127 return this->value_ == that.value_; 128 } 129 130 bool operator!=(const LifetimePosition& that) const { 131 return this->value_ != that.value_; 132 } 133 134 bool operator>(const LifetimePosition& that) const { 135 return this->value_ > that.value_; 136 } 137 138 bool operator>=(const LifetimePosition& that) const { 139 return this->value_ >= that.value_; 140 } 141 142 void Print() const; 143 Invalid()144 static inline LifetimePosition Invalid() { return LifetimePosition(); } 145 MaxPosition()146 static inline LifetimePosition MaxPosition() { 147 // We have to use this kind of getter instead of static member due to 148 // crash bug in GDB. 149 return LifetimePosition(kMaxInt); 150 } 151 FromInt(int value)152 static inline LifetimePosition FromInt(int value) { 153 return LifetimePosition(value); 154 } 155 156 private: 157 static const int kHalfStep = 2; 158 static const int kStep = 2 * kHalfStep; 159 160 // Code relies on kStep and kHalfStep being a power of two. 161 STATIC_ASSERT(IS_POWER_OF_TWO(kHalfStep)); 162 LifetimePosition(int value)163 explicit LifetimePosition(int value) : value_(value) {} 164 165 int value_; 166 }; 167 168 169 std::ostream& operator<<(std::ostream& os, const LifetimePosition pos); 170 171 172 // Representation of the non-empty interval [start,end[. 173 class UseInterval final : public ZoneObject { 174 public: UseInterval(LifetimePosition start,LifetimePosition end)175 UseInterval(LifetimePosition start, LifetimePosition end) 176 : start_(start), end_(end), next_(nullptr) { 177 DCHECK(start < end); 178 } 179 start()180 LifetimePosition start() const { return start_; } set_start(LifetimePosition start)181 void set_start(LifetimePosition start) { start_ = start; } end()182 LifetimePosition end() const { return end_; } set_end(LifetimePosition end)183 void set_end(LifetimePosition end) { end_ = end; } next()184 UseInterval* next() const { return next_; } set_next(UseInterval * next)185 void set_next(UseInterval* next) { next_ = next; } 186 187 // Split this interval at the given position without effecting the 188 // live range that owns it. The interval must contain the position. 189 UseInterval* SplitAt(LifetimePosition pos, Zone* zone); 190 191 // If this interval intersects with other return smallest position 192 // that belongs to both of them. Intersect(const UseInterval * other)193 LifetimePosition Intersect(const UseInterval* other) const { 194 if (other->start() < start_) return other->Intersect(this); 195 if (other->start() < end_) return other->start(); 196 return LifetimePosition::Invalid(); 197 } 198 Contains(LifetimePosition point)199 bool Contains(LifetimePosition point) const { 200 return start_ <= point && point < end_; 201 } 202 203 // Returns the index of the first gap covered by this interval. FirstGapIndex()204 int FirstGapIndex() const { 205 int ret = start_.ToInstructionIndex(); 206 if (start_.IsInstructionPosition()) { 207 ++ret; 208 } 209 return ret; 210 } 211 212 // Returns the index of the last gap covered by this interval. LastGapIndex()213 int LastGapIndex() const { 214 int ret = end_.ToInstructionIndex(); 215 if (end_.IsGapPosition() && end_.IsStart()) { 216 --ret; 217 } 218 return ret; 219 } 220 221 private: 222 LifetimePosition start_; 223 LifetimePosition end_; 224 UseInterval* next_; 225 226 DISALLOW_COPY_AND_ASSIGN(UseInterval); 227 }; 228 229 230 enum class UsePositionType : uint8_t { kAny, kRequiresRegister, kRequiresSlot }; 231 232 233 enum class UsePositionHintType : uint8_t { 234 kNone, 235 kOperand, 236 kUsePos, 237 kPhi, 238 kUnresolved 239 }; 240 241 242 static const int32_t kUnassignedRegister = 243 RegisterConfiguration::kMaxGeneralRegisters; 244 245 static_assert(kUnassignedRegister <= RegisterConfiguration::kMaxFPRegisters, 246 "kUnassignedRegister too small"); 247 248 // Representation of a use position. 249 class UsePosition final : public ZoneObject { 250 public: 251 UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint, 252 UsePositionHintType hint_type); 253 operand()254 InstructionOperand* operand() const { return operand_; } HasOperand()255 bool HasOperand() const { return operand_ != nullptr; } 256 RegisterIsBeneficial()257 bool RegisterIsBeneficial() const { 258 return RegisterBeneficialField::decode(flags_); 259 } type()260 UsePositionType type() const { return TypeField::decode(flags_); } 261 void set_type(UsePositionType type, bool register_beneficial); 262 pos()263 LifetimePosition pos() const { return pos_; } 264 next()265 UsePosition* next() const { return next_; } set_next(UsePosition * next)266 void set_next(UsePosition* next) { next_ = next; } 267 268 // For hinting only. set_assigned_register(int register_code)269 void set_assigned_register(int register_code) { 270 flags_ = AssignedRegisterField::update(flags_, register_code); 271 } 272 hint_type()273 UsePositionHintType hint_type() const { 274 return HintTypeField::decode(flags_); 275 } 276 bool HasHint() const; 277 bool HintRegister(int* register_code) const; 278 void ResolveHint(UsePosition* use_pos); IsResolved()279 bool IsResolved() const { 280 return hint_type() != UsePositionHintType::kUnresolved; 281 } 282 static UsePositionHintType HintTypeForOperand(const InstructionOperand& op); 283 284 private: 285 typedef BitField<UsePositionType, 0, 2> TypeField; 286 typedef BitField<UsePositionHintType, 2, 3> HintTypeField; 287 typedef BitField<bool, 5, 1> RegisterBeneficialField; 288 typedef BitField<int32_t, 6, 6> AssignedRegisterField; 289 290 InstructionOperand* const operand_; 291 void* hint_; 292 UsePosition* next_; 293 LifetimePosition const pos_; 294 uint32_t flags_; 295 296 DISALLOW_COPY_AND_ASSIGN(UsePosition); 297 }; 298 299 300 class SpillRange; 301 class RegisterAllocationData; 302 class TopLevelLiveRange; 303 class LiveRangeGroup; 304 305 // Representation of SSA values' live ranges as a collection of (continuous) 306 // intervals over the instruction ordering. 307 class LiveRange : public ZoneObject { 308 public: first_interval()309 UseInterval* first_interval() const { return first_interval_; } first_pos()310 UsePosition* first_pos() const { return first_pos_; } TopLevel()311 TopLevelLiveRange* TopLevel() { return top_level_; } TopLevel()312 const TopLevelLiveRange* TopLevel() const { return top_level_; } 313 314 bool IsTopLevel() const; 315 next()316 LiveRange* next() const { return next_; } 317 relative_id()318 int relative_id() const { return relative_id_; } 319 IsEmpty()320 bool IsEmpty() const { return first_interval() == nullptr; } 321 322 InstructionOperand GetAssignedOperand() const; 323 representation()324 MachineRepresentation representation() const { 325 return RepresentationField::decode(bits_); 326 } 327 assigned_register()328 int assigned_register() const { return AssignedRegisterField::decode(bits_); } HasRegisterAssigned()329 bool HasRegisterAssigned() const { 330 return assigned_register() != kUnassignedRegister; 331 } 332 void set_assigned_register(int reg); 333 void UnsetAssignedRegister(); 334 spilled()335 bool spilled() const { return SpilledField::decode(bits_); } 336 void Spill(); 337 338 RegisterKind kind() const; 339 340 // Returns use position in this live range that follows both start 341 // and last processed use position. 342 UsePosition* NextUsePosition(LifetimePosition start) const; 343 344 // Returns use position for which register is required in this live 345 // range and which follows both start and last processed use position 346 UsePosition* NextRegisterPosition(LifetimePosition start) const; 347 348 // Returns the first use position requiring stack slot, or nullptr. 349 UsePosition* NextSlotPosition(LifetimePosition start) const; 350 351 // Returns use position for which register is beneficial in this live 352 // range and which follows both start and last processed use position 353 UsePosition* NextUsePositionRegisterIsBeneficial( 354 LifetimePosition start) const; 355 356 // Returns use position for which register is beneficial in this live 357 // range and which precedes start. 358 UsePosition* PreviousUsePositionRegisterIsBeneficial( 359 LifetimePosition start) const; 360 361 // Can this live range be spilled at this position. 362 bool CanBeSpilled(LifetimePosition pos) const; 363 364 // Splitting primitive used by both splitting and splintering members. 365 // Performs the split, but does not link the resulting ranges. 366 // The given position must follow the start of the range. 367 // All uses following the given position will be moved from this 368 // live range to the result live range. 369 // The current range will terminate at position, while result will start from 370 // position. 371 UsePosition* DetachAt(LifetimePosition position, LiveRange* result, 372 Zone* zone); 373 374 // Detaches at position, and then links the resulting ranges. Returns the 375 // child, which starts at position. 376 LiveRange* SplitAt(LifetimePosition position, Zone* zone); 377 378 // Returns nullptr when no register is hinted, otherwise sets register_index. 379 UsePosition* FirstHintPosition(int* register_index) const; FirstHintPosition()380 UsePosition* FirstHintPosition() const { 381 int register_index; 382 return FirstHintPosition(®ister_index); 383 } 384 current_hint_position()385 UsePosition* current_hint_position() const { 386 DCHECK(current_hint_position_ == FirstHintPosition()); 387 return current_hint_position_; 388 } 389 Start()390 LifetimePosition Start() const { 391 DCHECK(!IsEmpty()); 392 return first_interval()->start(); 393 } 394 End()395 LifetimePosition End() const { 396 DCHECK(!IsEmpty()); 397 return last_interval_->end(); 398 } 399 400 bool ShouldBeAllocatedBefore(const LiveRange* other) const; 401 bool CanCover(LifetimePosition position) const; 402 bool Covers(LifetimePosition position) const; 403 LifetimePosition FirstIntersection(LiveRange* other) const; 404 VerifyChildStructure()405 void VerifyChildStructure() const { 406 VerifyIntervals(); 407 VerifyPositions(); 408 } 409 410 void ConvertUsesToOperand(const InstructionOperand& op, 411 const InstructionOperand& spill_op); 412 void SetUseHints(int register_index); UnsetUseHints()413 void UnsetUseHints() { SetUseHints(kUnassignedRegister); } 414 415 void Print(const RegisterConfiguration* config, bool with_children) const; 416 void Print(bool with_children) const; 417 418 private: 419 friend class TopLevelLiveRange; 420 explicit LiveRange(int relative_id, MachineRepresentation rep, 421 TopLevelLiveRange* top_level); 422 423 void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level); 424 set_spilled(bool value)425 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); } 426 427 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; 428 void AdvanceLastProcessedMarker(UseInterval* to_start_of, 429 LifetimePosition but_not_past) const; 430 431 void VerifyPositions() const; 432 void VerifyIntervals() const; 433 434 typedef BitField<bool, 0, 1> SpilledField; 435 typedef BitField<int32_t, 6, 6> AssignedRegisterField; 436 typedef BitField<MachineRepresentation, 12, 8> RepresentationField; 437 438 // Unique among children and splinters of the same virtual register. 439 int relative_id_; 440 uint32_t bits_; 441 UseInterval* last_interval_; 442 UseInterval* first_interval_; 443 UsePosition* first_pos_; 444 TopLevelLiveRange* top_level_; 445 LiveRange* next_; 446 // This is used as a cache, it doesn't affect correctness. 447 mutable UseInterval* current_interval_; 448 // This is used as a cache, it doesn't affect correctness. 449 mutable UsePosition* last_processed_use_; 450 // This is used as a cache, it's invalid outside of BuildLiveRanges. 451 mutable UsePosition* current_hint_position_; 452 // Cache the last position splintering stopped at. 453 mutable UsePosition* splitting_pointer_; 454 455 DISALLOW_COPY_AND_ASSIGN(LiveRange); 456 }; 457 458 459 class LiveRangeGroup final : public ZoneObject { 460 public: LiveRangeGroup(Zone * zone)461 explicit LiveRangeGroup(Zone* zone) : ranges_(zone) {} ranges()462 ZoneVector<LiveRange*>& ranges() { return ranges_; } ranges()463 const ZoneVector<LiveRange*>& ranges() const { return ranges_; } 464 assigned_register()465 int assigned_register() const { return assigned_register_; } set_assigned_register(int reg)466 void set_assigned_register(int reg) { assigned_register_ = reg; } 467 468 private: 469 ZoneVector<LiveRange*> ranges_; 470 int assigned_register_; 471 DISALLOW_COPY_AND_ASSIGN(LiveRangeGroup); 472 }; 473 474 475 class TopLevelLiveRange final : public LiveRange { 476 public: 477 explicit TopLevelLiveRange(int vreg, MachineRepresentation rep); spill_start_index()478 int spill_start_index() const { return spill_start_index_; } 479 IsFixed()480 bool IsFixed() const { return vreg_ < 0; } 481 is_phi()482 bool is_phi() const { return IsPhiField::decode(bits_); } set_is_phi(bool value)483 void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); } 484 is_non_loop_phi()485 bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); } set_is_non_loop_phi(bool value)486 void set_is_non_loop_phi(bool value) { 487 bits_ = IsNonLoopPhiField::update(bits_, value); 488 } 489 has_slot_use()490 bool has_slot_use() const { return HasSlotUseField::decode(bits_); } set_has_slot_use(bool value)491 void set_has_slot_use(bool value) { 492 bits_ = HasSlotUseField::update(bits_, value); 493 } 494 495 // Add a new interval or a new use position to this live range. 496 void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone); 497 void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone); 498 void AddUsePosition(UsePosition* pos); 499 500 // Shorten the most recently added interval by setting a new start. 501 void ShortenTo(LifetimePosition start); 502 503 // Detaches between start and end, and attributes the resulting range to 504 // result. 505 // The current range is pointed to as "splintered_from". No parent/child 506 // relationship is established between this and result. 507 void Splinter(LifetimePosition start, LifetimePosition end, Zone* zone); 508 509 // Assuming other was splintered from this range, embeds other and its 510 // children as part of the children sequence of this range. 511 void Merge(TopLevelLiveRange* other, Zone* zone); 512 513 // Spill range management. 514 void SetSpillRange(SpillRange* spill_range); 515 enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange }; set_spill_type(SpillType value)516 void set_spill_type(SpillType value) { 517 bits_ = SpillTypeField::update(bits_, value); 518 } spill_type()519 SpillType spill_type() const { return SpillTypeField::decode(bits_); } GetSpillOperand()520 InstructionOperand* GetSpillOperand() const { 521 DCHECK(spill_type() == SpillType::kSpillOperand); 522 return spill_operand_; 523 } 524 GetAllocatedSpillRange()525 SpillRange* GetAllocatedSpillRange() const { 526 DCHECK(spill_type() != SpillType::kSpillOperand); 527 return spill_range_; 528 } 529 GetSpillRange()530 SpillRange* GetSpillRange() const { 531 DCHECK(spill_type() == SpillType::kSpillRange); 532 return spill_range_; 533 } HasNoSpillType()534 bool HasNoSpillType() const { 535 return spill_type() == SpillType::kNoSpillType; 536 } HasSpillOperand()537 bool HasSpillOperand() const { 538 return spill_type() == SpillType::kSpillOperand; 539 } HasSpillRange()540 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; } 541 542 AllocatedOperand GetSpillRangeOperand() const; 543 544 void RecordSpillLocation(Zone* zone, int gap_index, 545 InstructionOperand* operand); 546 void SetSpillOperand(InstructionOperand* operand); SetSpillStartIndex(int start)547 void SetSpillStartIndex(int start) { 548 spill_start_index_ = Min(start, spill_start_index_); 549 } 550 551 void CommitSpillMoves(InstructionSequence* sequence, 552 const InstructionOperand& operand, 553 bool might_be_duplicated); 554 555 // If all the children of this range are spilled in deferred blocks, and if 556 // for any non-spilled child with a use position requiring a slot, that range 557 // is contained in a deferred block, mark the range as 558 // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition, 559 // and instead let the LiveRangeConnector perform the spills within the 560 // deferred blocks. If so, we insert here spills for non-spilled ranges 561 // with slot use positions. TreatAsSpilledInDeferredBlock(Zone * zone,int total_block_count)562 void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) { 563 spill_start_index_ = -1; 564 spilled_in_deferred_blocks_ = true; 565 spill_move_insertion_locations_ = nullptr; 566 list_of_blocks_requiring_spill_operands_ = 567 new (zone) BitVector(total_block_count, zone); 568 } 569 570 void CommitSpillInDeferredBlocks(RegisterAllocationData* data, 571 const InstructionOperand& spill_operand, 572 BitVector* necessary_spill_points); 573 splintered_from()574 TopLevelLiveRange* splintered_from() const { return splintered_from_; } IsSplinter()575 bool IsSplinter() const { return splintered_from_ != nullptr; } MayRequireSpillRange()576 bool MayRequireSpillRange() const { 577 DCHECK(!IsSplinter()); 578 return !HasSpillOperand() && spill_range_ == nullptr; 579 } 580 void UpdateSpillRangePostMerge(TopLevelLiveRange* merged); vreg()581 int vreg() const { return vreg_; } 582 583 #if DEBUG 584 int debug_virt_reg() const; 585 #endif 586 587 void Verify() const; 588 void VerifyChildrenInOrder() const; 589 GetNextChildId()590 int GetNextChildId() { 591 return IsSplinter() ? splintered_from()->GetNextChildId() 592 : ++last_child_id_; 593 } 594 GetChildCount()595 int GetChildCount() const { return last_child_id_ + 1; } 596 IsSpilledOnlyInDeferredBlocks()597 bool IsSpilledOnlyInDeferredBlocks() const { 598 return spilled_in_deferred_blocks_; 599 } 600 601 struct SpillMoveInsertionList; 602 GetSpillMoveInsertionLocations()603 SpillMoveInsertionList* GetSpillMoveInsertionLocations() const { 604 DCHECK(!IsSpilledOnlyInDeferredBlocks()); 605 return spill_move_insertion_locations_; 606 } splinter()607 TopLevelLiveRange* splinter() const { return splinter_; } SetSplinter(TopLevelLiveRange * splinter)608 void SetSplinter(TopLevelLiveRange* splinter) { 609 DCHECK_NULL(splinter_); 610 DCHECK_NOT_NULL(splinter); 611 612 splinter_ = splinter; 613 splinter->relative_id_ = GetNextChildId(); 614 splinter->set_spill_type(spill_type()); 615 splinter->SetSplinteredFrom(this); 616 } 617 MarkHasPreassignedSlot()618 void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; } has_preassigned_slot()619 bool has_preassigned_slot() const { return has_preassigned_slot_; } 620 AddBlockRequiringSpillOperand(RpoNumber block_id)621 void AddBlockRequiringSpillOperand(RpoNumber block_id) { 622 DCHECK(IsSpilledOnlyInDeferredBlocks()); 623 GetListOfBlocksRequiringSpillOperands()->Add(block_id.ToInt()); 624 } 625 GetListOfBlocksRequiringSpillOperands()626 BitVector* GetListOfBlocksRequiringSpillOperands() const { 627 DCHECK(IsSpilledOnlyInDeferredBlocks()); 628 return list_of_blocks_requiring_spill_operands_; 629 } 630 631 private: 632 void SetSplinteredFrom(TopLevelLiveRange* splinter_parent); 633 634 typedef BitField<bool, 1, 1> HasSlotUseField; 635 typedef BitField<bool, 2, 1> IsPhiField; 636 typedef BitField<bool, 3, 1> IsNonLoopPhiField; 637 typedef BitField<SpillType, 4, 2> SpillTypeField; 638 639 int vreg_; 640 int last_child_id_; 641 TopLevelLiveRange* splintered_from_; 642 union { 643 // Correct value determined by spill_type() 644 InstructionOperand* spill_operand_; 645 SpillRange* spill_range_; 646 }; 647 648 union { 649 SpillMoveInsertionList* spill_move_insertion_locations_; 650 BitVector* list_of_blocks_requiring_spill_operands_; 651 }; 652 653 // TODO(mtrofin): generalize spilling after definition, currently specialized 654 // just for spill in a single deferred block. 655 bool spilled_in_deferred_blocks_; 656 int spill_start_index_; 657 UsePosition* last_pos_; 658 TopLevelLiveRange* splinter_; 659 bool has_preassigned_slot_; 660 661 DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange); 662 }; 663 664 665 struct PrintableLiveRange { 666 const RegisterConfiguration* register_configuration_; 667 const LiveRange* range_; 668 }; 669 670 671 std::ostream& operator<<(std::ostream& os, 672 const PrintableLiveRange& printable_range); 673 674 675 class SpillRange final : public ZoneObject { 676 public: 677 static const int kUnassignedSlot = -1; 678 SpillRange(TopLevelLiveRange* range, Zone* zone); 679 interval()680 UseInterval* interval() const { return use_interval_; } 681 IsEmpty()682 bool IsEmpty() const { return live_ranges_.empty(); } 683 bool TryMerge(SpillRange* other); HasSlot()684 bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; } 685 set_assigned_slot(int index)686 void set_assigned_slot(int index) { 687 DCHECK_EQ(kUnassignedSlot, assigned_slot_); 688 assigned_slot_ = index; 689 } assigned_slot()690 int assigned_slot() { 691 DCHECK_NE(kUnassignedSlot, assigned_slot_); 692 return assigned_slot_; 693 } live_ranges()694 const ZoneVector<TopLevelLiveRange*>& live_ranges() const { 695 return live_ranges_; 696 } live_ranges()697 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; } byte_width()698 int byte_width() const { return byte_width_; } kind()699 RegisterKind kind() const { return kind_; } 700 void Print() const; 701 702 private: End()703 LifetimePosition End() const { return end_position_; } 704 bool IsIntersectingWith(SpillRange* other) const; 705 // Merge intervals, making sure the use intervals are sorted 706 void MergeDisjointIntervals(UseInterval* other); 707 708 ZoneVector<TopLevelLiveRange*> live_ranges_; 709 UseInterval* use_interval_; 710 LifetimePosition end_position_; 711 int assigned_slot_; 712 int byte_width_; 713 RegisterKind kind_; 714 715 DISALLOW_COPY_AND_ASSIGN(SpillRange); 716 }; 717 718 719 class RegisterAllocationData final : public ZoneObject { 720 public: 721 class PhiMapValue : public ZoneObject { 722 public: 723 PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone); 724 phi()725 const PhiInstruction* phi() const { return phi_; } block()726 const InstructionBlock* block() const { return block_; } 727 728 // For hinting. assigned_register()729 int assigned_register() const { return assigned_register_; } set_assigned_register(int register_code)730 void set_assigned_register(int register_code) { 731 DCHECK_EQ(assigned_register_, kUnassignedRegister); 732 assigned_register_ = register_code; 733 } UnsetAssignedRegister()734 void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; } 735 736 void AddOperand(InstructionOperand* operand); 737 void CommitAssignment(const InstructionOperand& operand); 738 739 private: 740 PhiInstruction* const phi_; 741 const InstructionBlock* const block_; 742 ZoneVector<InstructionOperand*> incoming_operands_; 743 int assigned_register_; 744 }; 745 typedef ZoneMap<int, PhiMapValue*> PhiMap; 746 747 struct DelayedReference { 748 ReferenceMap* map; 749 InstructionOperand* operand; 750 }; 751 typedef ZoneVector<DelayedReference> DelayedReferences; 752 typedef ZoneVector<std::pair<TopLevelLiveRange*, int>> 753 RangesWithPreassignedSlots; 754 755 RegisterAllocationData(const RegisterConfiguration* config, 756 Zone* allocation_zone, Frame* frame, 757 InstructionSequence* code, 758 const char* debug_name = nullptr); 759 live_ranges()760 const ZoneVector<TopLevelLiveRange*>& live_ranges() const { 761 return live_ranges_; 762 } live_ranges()763 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; } fixed_live_ranges()764 const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const { 765 return fixed_live_ranges_; 766 } fixed_live_ranges()767 ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() { 768 return fixed_live_ranges_; 769 } fixed_float_live_ranges()770 ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() { 771 return fixed_float_live_ranges_; 772 } fixed_float_live_ranges()773 const ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() const { 774 return fixed_float_live_ranges_; 775 } fixed_double_live_ranges()776 ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() { 777 return fixed_double_live_ranges_; 778 } fixed_double_live_ranges()779 const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const { 780 return fixed_double_live_ranges_; 781 } live_in_sets()782 ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; } live_out_sets()783 ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; } spill_ranges()784 ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; } delayed_references()785 DelayedReferences& delayed_references() { return delayed_references_; } code()786 InstructionSequence* code() const { return code_; } 787 // This zone is for data structures only needed during register allocation 788 // phases. allocation_zone()789 Zone* allocation_zone() const { return allocation_zone_; } 790 // This zone is for InstructionOperands and moves that live beyond register 791 // allocation. code_zone()792 Zone* code_zone() const { return code()->zone(); } frame()793 Frame* frame() const { return frame_; } debug_name()794 const char* debug_name() const { return debug_name_; } config()795 const RegisterConfiguration* config() const { return config_; } 796 797 MachineRepresentation RepresentationFor(int virtual_register); 798 799 TopLevelLiveRange* GetOrCreateLiveRangeFor(int index); 800 // Creates a new live range. 801 TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep); 802 TopLevelLiveRange* NextLiveRange(MachineRepresentation rep); 803 804 SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range); 805 SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range); 806 807 MoveOperands* AddGapMove(int index, Instruction::GapPosition position, 808 const InstructionOperand& from, 809 const InstructionOperand& to); 810 IsReference(TopLevelLiveRange * top_range)811 bool IsReference(TopLevelLiveRange* top_range) const { 812 return code()->IsReference(top_range->vreg()); 813 } 814 815 bool ExistsUseWithoutDefinition(); 816 bool RangesDefinedInDeferredStayInDeferred(); 817 818 void MarkAllocated(MachineRepresentation rep, int index); 819 820 PhiMapValue* InitializePhiMap(const InstructionBlock* block, 821 PhiInstruction* phi); 822 PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range); 823 PhiMapValue* GetPhiMapValueFor(int virtual_register); 824 bool IsBlockBoundary(LifetimePosition pos) const; 825 preassigned_slot_ranges()826 RangesWithPreassignedSlots& preassigned_slot_ranges() { 827 return preassigned_slot_ranges_; 828 } 829 830 private: 831 int GetNextLiveRangeId(); 832 833 Zone* const allocation_zone_; 834 Frame* const frame_; 835 InstructionSequence* const code_; 836 const char* const debug_name_; 837 const RegisterConfiguration* const config_; 838 PhiMap phi_map_; 839 ZoneVector<BitVector*> live_in_sets_; 840 ZoneVector<BitVector*> live_out_sets_; 841 ZoneVector<TopLevelLiveRange*> live_ranges_; 842 ZoneVector<TopLevelLiveRange*> fixed_live_ranges_; 843 ZoneVector<TopLevelLiveRange*> fixed_float_live_ranges_; 844 ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_; 845 ZoneVector<SpillRange*> spill_ranges_; 846 DelayedReferences delayed_references_; 847 BitVector* assigned_registers_; 848 BitVector* assigned_double_registers_; 849 int virtual_register_count_; 850 RangesWithPreassignedSlots preassigned_slot_ranges_; 851 852 DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData); 853 }; 854 855 856 class ConstraintBuilder final : public ZoneObject { 857 public: 858 explicit ConstraintBuilder(RegisterAllocationData* data); 859 860 // Phase 1 : insert moves to account for fixed register operands. 861 void MeetRegisterConstraints(); 862 863 // Phase 2: deconstruct SSA by inserting moves in successors and the headers 864 // of blocks containing phis. 865 void ResolvePhis(); 866 867 private: data()868 RegisterAllocationData* data() const { return data_; } code()869 InstructionSequence* code() const { return data()->code(); } allocation_zone()870 Zone* allocation_zone() const { return data()->allocation_zone(); } 871 872 InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos, 873 bool is_tagged); 874 void MeetRegisterConstraints(const InstructionBlock* block); 875 void MeetConstraintsBefore(int index); 876 void MeetConstraintsAfter(int index); 877 void MeetRegisterConstraintsForLastInstructionInBlock( 878 const InstructionBlock* block); 879 void ResolvePhis(const InstructionBlock* block); 880 881 RegisterAllocationData* const data_; 882 883 DISALLOW_COPY_AND_ASSIGN(ConstraintBuilder); 884 }; 885 886 887 class LiveRangeBuilder final : public ZoneObject { 888 public: 889 explicit LiveRangeBuilder(RegisterAllocationData* data, Zone* local_zone); 890 891 // Phase 3: compute liveness of all virtual register. 892 void BuildLiveRanges(); 893 static BitVector* ComputeLiveOut(const InstructionBlock* block, 894 RegisterAllocationData* data); 895 896 private: data()897 RegisterAllocationData* data() const { return data_; } code()898 InstructionSequence* code() const { return data()->code(); } allocation_zone()899 Zone* allocation_zone() const { return data()->allocation_zone(); } code_zone()900 Zone* code_zone() const { return code()->zone(); } config()901 const RegisterConfiguration* config() const { return data()->config(); } live_in_sets()902 ZoneVector<BitVector*>& live_in_sets() const { 903 return data()->live_in_sets(); 904 } 905 906 // Verification. 907 void Verify() const; 908 bool IntervalStartsAtBlockBoundary(const UseInterval* interval) const; 909 bool IntervalPredecessorsCoveredByRange(const UseInterval* interval, 910 const TopLevelLiveRange* range) const; 911 bool NextIntervalStartsInDifferentBlocks(const UseInterval* interval) const; 912 913 // Liveness analysis support. 914 void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out); 915 void ProcessInstructions(const InstructionBlock* block, BitVector* live); 916 void ProcessPhis(const InstructionBlock* block, BitVector* live); 917 void ProcessLoopHeader(const InstructionBlock* block, BitVector* live); 918 FixedLiveRangeID(int index)919 static int FixedLiveRangeID(int index) { return -index - 1; } 920 int FixedFPLiveRangeID(int index, MachineRepresentation rep); 921 TopLevelLiveRange* FixedLiveRangeFor(int index); 922 TopLevelLiveRange* FixedFPLiveRangeFor(int index, MachineRepresentation rep); 923 924 void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos); 925 void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos); 926 927 UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand, 928 void* hint, UsePositionHintType hint_type); NewUsePosition(LifetimePosition pos)929 UsePosition* NewUsePosition(LifetimePosition pos) { 930 return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone); 931 } 932 TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand); 933 // Helper methods for building intervals. 934 UsePosition* Define(LifetimePosition position, InstructionOperand* operand, 935 void* hint, UsePositionHintType hint_type); Define(LifetimePosition position,InstructionOperand * operand)936 void Define(LifetimePosition position, InstructionOperand* operand) { 937 Define(position, operand, nullptr, UsePositionHintType::kNone); 938 } 939 UsePosition* Use(LifetimePosition block_start, LifetimePosition position, 940 InstructionOperand* operand, void* hint, 941 UsePositionHintType hint_type); Use(LifetimePosition block_start,LifetimePosition position,InstructionOperand * operand)942 void Use(LifetimePosition block_start, LifetimePosition position, 943 InstructionOperand* operand) { 944 Use(block_start, position, operand, nullptr, UsePositionHintType::kNone); 945 } 946 947 RegisterAllocationData* const data_; 948 ZoneMap<InstructionOperand*, UsePosition*> phi_hints_; 949 950 DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder); 951 }; 952 953 954 class RegisterAllocator : public ZoneObject { 955 public: 956 RegisterAllocator(RegisterAllocationData* data, RegisterKind kind); 957 958 protected: data()959 RegisterAllocationData* data() const { return data_; } code()960 InstructionSequence* code() const { return data()->code(); } mode()961 RegisterKind mode() const { return mode_; } num_registers()962 int num_registers() const { return num_registers_; } num_allocatable_registers()963 int num_allocatable_registers() const { return num_allocatable_registers_; } allocatable_register_codes()964 const int* allocatable_register_codes() const { 965 return allocatable_register_codes_; 966 } 967 968 // TODO(mtrofin): explain why splitting in gap START is always OK. 969 LifetimePosition GetSplitPositionForInstruction(const LiveRange* range, 970 int instruction_index); 971 allocation_zone()972 Zone* allocation_zone() const { return data()->allocation_zone(); } 973 974 // Find the optimal split for ranges defined by a memory operand, e.g. 975 // constants or function parameters passed on the stack. 976 void SplitAndSpillRangesDefinedByMemoryOperand(bool operands_only); 977 978 // Split the given range at the given position. 979 // If range starts at or after the given position then the 980 // original range is returned. 981 // Otherwise returns the live range that starts at pos and contains 982 // all uses from the original range that follow pos. Uses at pos will 983 // still be owned by the original range after splitting. 984 LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos); 985 CanProcessRange(LiveRange * range)986 bool CanProcessRange(LiveRange* range) const { 987 return range != nullptr && !range->IsEmpty() && range->kind() == mode(); 988 } 989 990 991 // Split the given range in a position from the interval [start, end]. 992 LiveRange* SplitBetween(LiveRange* range, LifetimePosition start, 993 LifetimePosition end); 994 995 // Find a lifetime position in the interval [start, end] which 996 // is optimal for splitting: it is either header of the outermost 997 // loop covered by this interval or the latest possible position. 998 LifetimePosition FindOptimalSplitPos(LifetimePosition start, 999 LifetimePosition end); 1000 1001 void Spill(LiveRange* range); 1002 1003 // If we are trying to spill a range inside the loop try to 1004 // hoist spill position out to the point just before the loop. 1005 LifetimePosition FindOptimalSpillingPos(LiveRange* range, 1006 LifetimePosition pos); 1007 1008 const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const; 1009 const char* RegisterName(int allocation_index) const; 1010 1011 private: 1012 RegisterAllocationData* const data_; 1013 const RegisterKind mode_; 1014 const int num_registers_; 1015 int num_allocatable_registers_; 1016 const int* allocatable_register_codes_; 1017 1018 private: 1019 bool no_combining_; 1020 1021 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator); 1022 }; 1023 1024 1025 class LinearScanAllocator final : public RegisterAllocator { 1026 public: 1027 LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind, 1028 Zone* local_zone); 1029 1030 // Phase 4: compute register assignments. 1031 void AllocateRegisters(); 1032 1033 private: unhandled_live_ranges()1034 ZoneVector<LiveRange*>& unhandled_live_ranges() { 1035 return unhandled_live_ranges_; 1036 } active_live_ranges()1037 ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; } inactive_live_ranges()1038 ZoneVector<LiveRange*>& inactive_live_ranges() { 1039 return inactive_live_ranges_; 1040 } 1041 1042 void SetLiveRangeAssignedRegister(LiveRange* range, int reg); 1043 1044 // Helper methods for updating the life range lists. 1045 void AddToActive(LiveRange* range); 1046 void AddToInactive(LiveRange* range); 1047 void AddToUnhandledSorted(LiveRange* range); 1048 void AddToUnhandledUnsorted(LiveRange* range); 1049 void SortUnhandled(); 1050 bool UnhandledIsSorted(); 1051 void ActiveToHandled(LiveRange* range); 1052 void ActiveToInactive(LiveRange* range); 1053 void InactiveToHandled(LiveRange* range); 1054 void InactiveToActive(LiveRange* range); 1055 1056 // Helper methods for allocating registers. 1057 bool TryReuseSpillForPhi(TopLevelLiveRange* range); 1058 bool TryAllocateFreeReg(LiveRange* range); 1059 void AllocateBlockedReg(LiveRange* range); 1060 1061 // Spill the given life range after position pos. 1062 void SpillAfter(LiveRange* range, LifetimePosition pos); 1063 1064 // Spill the given life range after position [start] and up to position [end]. 1065 void SpillBetween(LiveRange* range, LifetimePosition start, 1066 LifetimePosition end); 1067 1068 // Spill the given life range after position [start] and up to position [end]. 1069 // Range is guaranteed to be spilled at least until position [until]. 1070 void SpillBetweenUntil(LiveRange* range, LifetimePosition start, 1071 LifetimePosition until, LifetimePosition end); 1072 1073 void SplitAndSpillIntersecting(LiveRange* range); 1074 1075 ZoneVector<LiveRange*> unhandled_live_ranges_; 1076 ZoneVector<LiveRange*> active_live_ranges_; 1077 ZoneVector<LiveRange*> inactive_live_ranges_; 1078 1079 #ifdef DEBUG 1080 LifetimePosition allocation_finger_; 1081 #endif 1082 1083 DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator); 1084 }; 1085 1086 1087 class SpillSlotLocator final : public ZoneObject { 1088 public: 1089 explicit SpillSlotLocator(RegisterAllocationData* data); 1090 1091 void LocateSpillSlots(); 1092 1093 private: data()1094 RegisterAllocationData* data() const { return data_; } 1095 1096 RegisterAllocationData* const data_; 1097 1098 DISALLOW_COPY_AND_ASSIGN(SpillSlotLocator); 1099 }; 1100 1101 1102 class OperandAssigner final : public ZoneObject { 1103 public: 1104 explicit OperandAssigner(RegisterAllocationData* data); 1105 1106 // Phase 5: assign spill splots. 1107 void AssignSpillSlots(); 1108 1109 // Phase 6: commit assignment. 1110 void CommitAssignment(); 1111 1112 private: data()1113 RegisterAllocationData* data() const { return data_; } 1114 1115 RegisterAllocationData* const data_; 1116 1117 DISALLOW_COPY_AND_ASSIGN(OperandAssigner); 1118 }; 1119 1120 1121 class ReferenceMapPopulator final : public ZoneObject { 1122 public: 1123 explicit ReferenceMapPopulator(RegisterAllocationData* data); 1124 1125 // Phase 7: compute values for pointer maps. 1126 void PopulateReferenceMaps(); 1127 1128 private: data()1129 RegisterAllocationData* data() const { return data_; } 1130 1131 bool SafePointsAreInOrder() const; 1132 1133 RegisterAllocationData* const data_; 1134 1135 DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator); 1136 }; 1137 1138 1139 class LiveRangeBoundArray; 1140 // Insert moves of the form 1141 // 1142 // Operand(child_(k+1)) = Operand(child_k) 1143 // 1144 // where child_k and child_(k+1) are consecutive children of a range (so 1145 // child_k->next() == child_(k+1)), and Operand(...) refers to the 1146 // assigned operand, be it a register or a slot. 1147 class LiveRangeConnector final : public ZoneObject { 1148 public: 1149 explicit LiveRangeConnector(RegisterAllocationData* data); 1150 1151 // Phase 8: reconnect split ranges with moves, when the control flow 1152 // between the ranges is trivial (no branches). 1153 void ConnectRanges(Zone* local_zone); 1154 1155 // Phase 9: insert moves to connect ranges across basic blocks, when the 1156 // control flow between them cannot be trivially resolved, such as joining 1157 // branches. 1158 void ResolveControlFlow(Zone* local_zone); 1159 1160 private: data()1161 RegisterAllocationData* data() const { return data_; } code()1162 InstructionSequence* code() const { return data()->code(); } code_zone()1163 Zone* code_zone() const { return code()->zone(); } 1164 1165 bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const; 1166 1167 int ResolveControlFlow(const InstructionBlock* block, 1168 const InstructionOperand& cur_op, 1169 const InstructionBlock* pred, 1170 const InstructionOperand& pred_op); 1171 1172 void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range, 1173 LiveRangeBoundArray* array, 1174 Zone* temp_zone); 1175 1176 RegisterAllocationData* const data_; 1177 1178 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector); 1179 }; 1180 1181 } // namespace compiler 1182 } // namespace internal 1183 } // namespace v8 1184 1185 #endif // V8_REGISTER_ALLOCATOR_H_ 1186