1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_LITHIUM_ALLOCATOR_H_ 29 #define V8_LITHIUM_ALLOCATOR_H_ 30 31 #include "v8.h" 32 33 #include "allocation.h" 34 #include "lithium.h" 35 #include "zone.h" 36 37 namespace v8 { 38 namespace internal { 39 40 // Forward declarations. 41 class HBasicBlock; 42 class HGraph; 43 class HInstruction; 44 class HPhi; 45 class HTracer; 46 class HValue; 47 class BitVector; 48 class StringStream; 49 50 class LArgument; 51 class LChunk; 52 class LOperand; 53 class LUnallocated; 54 class LConstantOperand; 55 class LGap; 56 class LParallelMove; 57 class LPointerMap; 58 class LStackSlot; 59 class LRegister; 60 61 62 // This class represents a single point of a LOperand's lifetime. 63 // For each lithium instruction there are exactly two lifetime positions: 64 // the beginning and the end of the instruction. Lifetime positions for 65 // different lithium instructions are disjoint. 66 class LifetimePosition { 67 public: 68 // Return the lifetime position that corresponds to the beginning of 69 // the instruction with the given index. FromInstructionIndex(int index)70 static LifetimePosition FromInstructionIndex(int index) { 71 return LifetimePosition(index * kStep); 72 } 73 74 // Returns a numeric representation of this lifetime position. Value()75 int Value() const { 76 return value_; 77 } 78 79 // Returns the index of the instruction to which this lifetime position 80 // corresponds. InstructionIndex()81 int InstructionIndex() const { 82 ASSERT(IsValid()); 83 return value_ / kStep; 84 } 85 86 // Returns true if this lifetime position corresponds to the instruction 87 // start. IsInstructionStart()88 bool IsInstructionStart() const { 89 return (value_ & (kStep - 1)) == 0; 90 } 91 92 // Returns the lifetime position for the start of the instruction which 93 // corresponds to this lifetime position. InstructionStart()94 LifetimePosition InstructionStart() const { 95 ASSERT(IsValid()); 96 return LifetimePosition(value_ & ~(kStep - 1)); 97 } 98 99 // Returns the lifetime position for the end of the instruction which 100 // corresponds to this lifetime position. InstructionEnd()101 LifetimePosition InstructionEnd() const { 102 ASSERT(IsValid()); 103 return LifetimePosition(InstructionStart().Value() + kStep/2); 104 } 105 106 // Returns the lifetime position for the beginning of the next instruction. NextInstruction()107 LifetimePosition NextInstruction() const { 108 ASSERT(IsValid()); 109 return LifetimePosition(InstructionStart().Value() + kStep); 110 } 111 112 // Returns the lifetime position for the beginning of the previous 113 // instruction. PrevInstruction()114 LifetimePosition PrevInstruction() const { 115 ASSERT(IsValid()); 116 ASSERT(value_ > 1); 117 return LifetimePosition(InstructionStart().Value() - kStep); 118 } 119 120 // Constructs the lifetime position which does not correspond to any 121 // instruction. LifetimePosition()122 LifetimePosition() : value_(-1) {} 123 124 // Returns true if this lifetime positions corrensponds to some 125 // instruction. IsValid()126 bool IsValid() const { return value_ != -1; } 127 Invalid()128 static inline LifetimePosition Invalid() { return LifetimePosition(); } 129 MaxPosition()130 static inline LifetimePosition MaxPosition() { 131 // We have to use this kind of getter instead of static member due to 132 // crash bug in GDB. 133 return LifetimePosition(kMaxInt); 134 } 135 136 private: 137 static const int kStep = 2; 138 139 // Code relies on kStep being a power of two. 140 STATIC_ASSERT(IS_POWER_OF_TWO(kStep)); 141 LifetimePosition(int value)142 explicit LifetimePosition(int value) : value_(value) { } 143 144 int value_; 145 }; 146 147 148 enum RegisterKind { 149 GENERAL_REGISTERS, 150 DOUBLE_REGISTERS 151 }; 152 153 154 // A register-allocator view of a Lithium instruction. It contains the id of 155 // the output operand and a list of input operand uses. 156 157 class LInstruction; 158 class LEnvironment; 159 160 // Iterator for non-null temp operands. 161 class TempIterator BASE_EMBEDDED { 162 public: 163 inline explicit TempIterator(LInstruction* instr); 164 inline bool Done(); 165 inline LOperand* Current(); 166 inline void Advance(); 167 168 private: 169 inline void SkipUninteresting(); 170 LInstruction* instr_; 171 int limit_; 172 int current_; 173 }; 174 175 176 // Iterator for non-constant input operands. 177 class InputIterator BASE_EMBEDDED { 178 public: 179 inline explicit InputIterator(LInstruction* instr); 180 inline bool Done(); 181 inline LOperand* Current(); 182 inline void Advance(); 183 184 private: 185 inline void SkipUninteresting(); 186 LInstruction* instr_; 187 int limit_; 188 int current_; 189 }; 190 191 192 class UseIterator BASE_EMBEDDED { 193 public: 194 inline explicit UseIterator(LInstruction* instr); 195 inline bool Done(); 196 inline LOperand* Current(); 197 inline void Advance(); 198 199 private: 200 InputIterator input_iterator_; 201 DeepIterator env_iterator_; 202 }; 203 204 205 // Representation of the non-empty interval [start,end[. 206 class UseInterval: public ZoneObject { 207 public: UseInterval(LifetimePosition start,LifetimePosition end)208 UseInterval(LifetimePosition start, LifetimePosition end) 209 : start_(start), end_(end), next_(NULL) { 210 ASSERT(start.Value() < end.Value()); 211 } 212 start()213 LifetimePosition start() const { return start_; } end()214 LifetimePosition end() const { return end_; } next()215 UseInterval* next() const { return next_; } 216 217 // Split this interval at the given position without effecting the 218 // live range that owns it. The interval must contain the position. 219 void SplitAt(LifetimePosition pos, Zone* zone); 220 221 // If this interval intersects with other return smallest position 222 // that belongs to both of them. Intersect(const UseInterval * other)223 LifetimePosition Intersect(const UseInterval* other) const { 224 if (other->start().Value() < start_.Value()) return other->Intersect(this); 225 if (other->start().Value() < end_.Value()) return other->start(); 226 return LifetimePosition::Invalid(); 227 } 228 Contains(LifetimePosition point)229 bool Contains(LifetimePosition point) const { 230 return start_.Value() <= point.Value() && point.Value() < end_.Value(); 231 } 232 233 private: set_start(LifetimePosition start)234 void set_start(LifetimePosition start) { start_ = start; } set_next(UseInterval * next)235 void set_next(UseInterval* next) { next_ = next; } 236 237 LifetimePosition start_; 238 LifetimePosition end_; 239 UseInterval* next_; 240 241 friend class LiveRange; // Assigns to start_. 242 }; 243 244 // Representation of a use position. 245 class UsePosition: public ZoneObject { 246 public: 247 UsePosition(LifetimePosition pos, LOperand* operand); 248 operand()249 LOperand* operand() const { return operand_; } HasOperand()250 bool HasOperand() const { return operand_ != NULL; } 251 hint()252 LOperand* hint() const { return hint_; } set_hint(LOperand * hint)253 void set_hint(LOperand* hint) { hint_ = hint; } 254 bool HasHint() const; 255 bool RequiresRegister() const; 256 bool RegisterIsBeneficial() const; 257 pos()258 LifetimePosition pos() const { return pos_; } next()259 UsePosition* next() const { return next_; } 260 261 private: set_next(UsePosition * next)262 void set_next(UsePosition* next) { next_ = next; } 263 264 LOperand* operand_; 265 LOperand* hint_; 266 LifetimePosition pos_; 267 UsePosition* next_; 268 bool requires_reg_; 269 bool register_beneficial_; 270 271 friend class LiveRange; 272 }; 273 274 // Representation of SSA values' live ranges as a collection of (continuous) 275 // intervals over the instruction ordering. 276 class LiveRange: public ZoneObject { 277 public: 278 static const int kInvalidAssignment = 0x7fffffff; 279 280 LiveRange(int id, Zone* zone); 281 first_interval()282 UseInterval* first_interval() const { return first_interval_; } first_pos()283 UsePosition* first_pos() const { return first_pos_; } parent()284 LiveRange* parent() const { return parent_; } TopLevel()285 LiveRange* TopLevel() { return (parent_ == NULL) ? this : parent_; } next()286 LiveRange* next() const { return next_; } IsChild()287 bool IsChild() const { return parent() != NULL; } id()288 int id() const { return id_; } IsFixed()289 bool IsFixed() const { return id_ < 0; } IsEmpty()290 bool IsEmpty() const { return first_interval() == NULL; } 291 LOperand* CreateAssignedOperand(Zone* zone); assigned_register()292 int assigned_register() const { return assigned_register_; } spill_start_index()293 int spill_start_index() const { return spill_start_index_; } 294 void set_assigned_register(int reg, 295 RegisterKind register_kind, 296 Zone* zone); 297 void MakeSpilled(Zone* zone); 298 299 // Returns use position in this live range that follows both start 300 // and last processed use position. 301 // Modifies internal state of live range! 302 UsePosition* NextUsePosition(LifetimePosition start); 303 304 // Returns use position for which register is required in this live 305 // range and which follows both start and last processed use position 306 // Modifies internal state of live range! 307 UsePosition* NextRegisterPosition(LifetimePosition start); 308 309 // Returns use position for which register is beneficial in this live 310 // range and which follows both start and last processed use position 311 // Modifies internal state of live range! 312 UsePosition* NextUsePositionRegisterIsBeneficial(LifetimePosition start); 313 314 // Can this live range be spilled at this position. 315 bool CanBeSpilled(LifetimePosition pos); 316 317 // Split this live range at the given position which must follow the start of 318 // the range. 319 // All uses following the given position will be moved from this 320 // live range to the result live range. 321 void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone); 322 IsDouble()323 bool IsDouble() const { return is_double_; } HasRegisterAssigned()324 bool HasRegisterAssigned() const { 325 return assigned_register_ != kInvalidAssignment; 326 } IsSpilled()327 bool IsSpilled() const { return spilled_; } 328 UsePosition* FirstPosWithHint() const; 329 FirstHint()330 LOperand* FirstHint() const { 331 UsePosition* pos = FirstPosWithHint(); 332 if (pos != NULL) return pos->hint(); 333 return NULL; 334 } 335 Start()336 LifetimePosition Start() const { 337 ASSERT(!IsEmpty()); 338 return first_interval()->start(); 339 } 340 End()341 LifetimePosition End() const { 342 ASSERT(!IsEmpty()); 343 return last_interval_->end(); 344 } 345 346 bool HasAllocatedSpillOperand() const; GetSpillOperand()347 LOperand* GetSpillOperand() const { return spill_operand_; } 348 void SetSpillOperand(LOperand* operand); 349 SetSpillStartIndex(int start)350 void SetSpillStartIndex(int start) { 351 spill_start_index_ = Min(start, spill_start_index_); 352 } 353 354 bool ShouldBeAllocatedBefore(const LiveRange* other) const; 355 bool CanCover(LifetimePosition position) const; 356 bool Covers(LifetimePosition position); 357 LifetimePosition FirstIntersection(LiveRange* other); 358 359 // Add a new interval or a new use position to this live range. 360 void EnsureInterval(LifetimePosition start, 361 LifetimePosition end, 362 Zone* zone); 363 void AddUseInterval(LifetimePosition start, 364 LifetimePosition end, 365 Zone* zone); 366 UsePosition* AddUsePosition(LifetimePosition pos, 367 LOperand* operand, 368 Zone* zone); 369 370 // Shorten the most recently added interval by setting a new start. 371 void ShortenTo(LifetimePosition start); 372 373 #ifdef DEBUG 374 // True if target overlaps an existing interval. 375 bool HasOverlap(UseInterval* target) const; 376 void Verify() const; 377 #endif 378 379 private: 380 void ConvertOperands(Zone* zone); 381 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; 382 void AdvanceLastProcessedMarker(UseInterval* to_start_of, 383 LifetimePosition but_not_past) const; 384 385 int id_; 386 bool spilled_; 387 bool is_double_; 388 int assigned_register_; 389 UseInterval* last_interval_; 390 UseInterval* first_interval_; 391 UsePosition* first_pos_; 392 LiveRange* parent_; 393 LiveRange* next_; 394 // This is used as a cache, it doesn't affect correctness. 395 mutable UseInterval* current_interval_; 396 UsePosition* last_processed_use_; 397 LOperand* spill_operand_; 398 int spill_start_index_; 399 }; 400 401 402 class GrowableBitVector BASE_EMBEDDED { 403 public: GrowableBitVector()404 GrowableBitVector() : bits_(NULL) { } 405 Contains(int value)406 bool Contains(int value) const { 407 if (!InBitsRange(value)) return false; 408 return bits_->Contains(value); 409 } 410 Add(int value,Zone * zone)411 void Add(int value, Zone* zone) { 412 EnsureCapacity(value, zone); 413 bits_->Add(value); 414 } 415 416 private: 417 static const int kInitialLength = 1024; 418 InBitsRange(int value)419 bool InBitsRange(int value) const { 420 return bits_ != NULL && bits_->length() > value; 421 } 422 EnsureCapacity(int value,Zone * zone)423 void EnsureCapacity(int value, Zone* zone) { 424 if (InBitsRange(value)) return; 425 int new_length = bits_ == NULL ? kInitialLength : bits_->length(); 426 while (new_length <= value) new_length *= 2; 427 BitVector* new_bits = new(zone) BitVector(new_length, zone); 428 if (bits_ != NULL) new_bits->CopyFrom(*bits_); 429 bits_ = new_bits; 430 } 431 432 BitVector* bits_; 433 }; 434 435 436 class LAllocator BASE_EMBEDDED { 437 public: 438 LAllocator(int first_virtual_register, HGraph* graph); 439 440 static void TraceAlloc(const char* msg, ...); 441 442 // Checks whether the value of a given virtual register is tagged. 443 bool HasTaggedValue(int virtual_register) const; 444 445 // Returns the register kind required by the given virtual register. 446 RegisterKind RequiredRegisterKind(int virtual_register) const; 447 448 bool Allocate(LChunk* chunk); 449 live_ranges()450 const ZoneList<LiveRange*>* live_ranges() const { return &live_ranges_; } fixed_live_ranges()451 const Vector<LiveRange*>* fixed_live_ranges() const { 452 return &fixed_live_ranges_; 453 } fixed_double_live_ranges()454 const Vector<LiveRange*>* fixed_double_live_ranges() const { 455 return &fixed_double_live_ranges_; 456 } 457 chunk()458 LChunk* chunk() const { return chunk_; } graph()459 HGraph* graph() const { return graph_; } 460 GetVirtualRegister()461 int GetVirtualRegister() { 462 if (next_virtual_register_ > LUnallocated::kMaxVirtualRegisters) { 463 allocation_ok_ = false; 464 } 465 return next_virtual_register_++; 466 } 467 AllocationOk()468 bool AllocationOk() { return allocation_ok_; } 469 MarkAsOsrEntry()470 void MarkAsOsrEntry() { 471 // There can be only one. 472 ASSERT(!has_osr_entry_); 473 // Simply set a flag to find and process instruction later. 474 has_osr_entry_ = true; 475 } 476 477 #ifdef DEBUG 478 void Verify() const; 479 #endif 480 481 private: 482 void MeetRegisterConstraints(); 483 void ResolvePhis(); 484 void BuildLiveRanges(); 485 void AllocateGeneralRegisters(); 486 void AllocateDoubleRegisters(); 487 void ConnectRanges(); 488 void ResolveControlFlow(); 489 void PopulatePointerMaps(); 490 void ProcessOsrEntry(); 491 void AllocateRegisters(); 492 bool CanEagerlyResolveControlFlow(HBasicBlock* block) const; 493 inline bool SafePointsAreInOrder() const; 494 495 // Liveness analysis support. 496 void InitializeLivenessAnalysis(); 497 BitVector* ComputeLiveOut(HBasicBlock* block); 498 void AddInitialIntervals(HBasicBlock* block, BitVector* live_out); 499 void ProcessInstructions(HBasicBlock* block, BitVector* live); 500 void MeetRegisterConstraints(HBasicBlock* block); 501 void MeetConstraintsBetween(LInstruction* first, 502 LInstruction* second, 503 int gap_index); 504 void ResolvePhis(HBasicBlock* block); 505 506 // Helper methods for building intervals. 507 LOperand* AllocateFixed(LUnallocated* operand, int pos, bool is_tagged); 508 LiveRange* LiveRangeFor(LOperand* operand); 509 void Define(LifetimePosition position, LOperand* operand, LOperand* hint); 510 void Use(LifetimePosition block_start, 511 LifetimePosition position, 512 LOperand* operand, 513 LOperand* hint); 514 void AddConstraintsGapMove(int index, LOperand* from, LOperand* to); 515 516 // Helper methods for updating the life range lists. 517 void AddToActive(LiveRange* range); 518 void AddToInactive(LiveRange* range); 519 void AddToUnhandledSorted(LiveRange* range); 520 void AddToUnhandledUnsorted(LiveRange* range); 521 void SortUnhandled(); 522 bool UnhandledIsSorted(); 523 void ActiveToHandled(LiveRange* range); 524 void ActiveToInactive(LiveRange* range); 525 void InactiveToHandled(LiveRange* range); 526 void InactiveToActive(LiveRange* range); 527 void FreeSpillSlot(LiveRange* range); 528 LOperand* TryReuseSpillSlot(LiveRange* range); 529 530 // Helper methods for allocating registers. 531 bool TryAllocateFreeReg(LiveRange* range); 532 void AllocateBlockedReg(LiveRange* range); 533 534 // Live range splitting helpers. 535 536 // Split the given range at the given position. 537 // If range starts at or after the given position then the 538 // original range is returned. 539 // Otherwise returns the live range that starts at pos and contains 540 // all uses from the original range that follow pos. Uses at pos will 541 // still be owned by the original range after splitting. 542 LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos); 543 544 // Split the given range in a position from the interval [start, end]. 545 LiveRange* SplitBetween(LiveRange* range, 546 LifetimePosition start, 547 LifetimePosition end); 548 549 // Find a lifetime position in the interval [start, end] which 550 // is optimal for splitting: it is either header of the outermost 551 // loop covered by this interval or the latest possible position. 552 LifetimePosition FindOptimalSplitPos(LifetimePosition start, 553 LifetimePosition end); 554 555 // Spill the given life range after position pos. 556 void SpillAfter(LiveRange* range, LifetimePosition pos); 557 558 // Spill the given life range after position start and up to position end. 559 void SpillBetween(LiveRange* range, 560 LifetimePosition start, 561 LifetimePosition end); 562 563 void SplitAndSpillIntersecting(LiveRange* range); 564 565 void Spill(LiveRange* range); 566 bool IsBlockBoundary(LifetimePosition pos); 567 568 // Helper methods for resolving control flow. 569 void ResolveControlFlow(LiveRange* range, 570 HBasicBlock* block, 571 HBasicBlock* pred); 572 573 // Return parallel move that should be used to connect ranges split at the 574 // given position. 575 LParallelMove* GetConnectingParallelMove(LifetimePosition pos); 576 577 // Return the block which contains give lifetime position. 578 HBasicBlock* GetBlock(LifetimePosition pos); 579 580 // Helper methods for the fixed registers. 581 int RegisterCount() const; FixedLiveRangeID(int index)582 static int FixedLiveRangeID(int index) { return -index - 1; } 583 static int FixedDoubleLiveRangeID(int index); 584 LiveRange* FixedLiveRangeFor(int index); 585 LiveRange* FixedDoubleLiveRangeFor(int index); 586 LiveRange* LiveRangeFor(int index); 587 HPhi* LookupPhi(LOperand* operand) const; 588 LGap* GetLastGap(HBasicBlock* block); 589 590 const char* RegisterName(int allocation_index); 591 592 inline bool IsGapAt(int index); 593 594 inline LInstruction* InstructionAt(int index); 595 596 inline LGap* GapAt(int index); 597 598 Zone* zone_; 599 600 LChunk* chunk_; 601 602 // During liveness analysis keep a mapping from block id to live_in sets 603 // for blocks already analyzed. 604 ZoneList<BitVector*> live_in_sets_; 605 606 // Liveness analysis results. 607 ZoneList<LiveRange*> live_ranges_; 608 609 // Lists of live ranges 610 EmbeddedVector<LiveRange*, Register::kNumAllocatableRegisters> 611 fixed_live_ranges_; 612 EmbeddedVector<LiveRange*, DoubleRegister::kNumAllocatableRegisters> 613 fixed_double_live_ranges_; 614 ZoneList<LiveRange*> unhandled_live_ranges_; 615 ZoneList<LiveRange*> active_live_ranges_; 616 ZoneList<LiveRange*> inactive_live_ranges_; 617 ZoneList<LiveRange*> reusable_slots_; 618 619 // Next virtual register number to be assigned to temporaries. 620 int next_virtual_register_; 621 int first_artificial_register_; 622 GrowableBitVector double_artificial_registers_; 623 624 RegisterKind mode_; 625 int num_registers_; 626 627 HGraph* graph_; 628 629 bool has_osr_entry_; 630 631 // Indicates success or failure during register allocation. 632 bool allocation_ok_; 633 634 DISALLOW_COPY_AND_ASSIGN(LAllocator); 635 }; 636 637 638 } } // namespace v8::internal 639 640 #endif // V8_LITHIUM_ALLOCATOR_H_ 641