1 // Copyright 2010 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_LITHIUM_ALLOCATOR_H_ 29 #define V8_LITHIUM_ALLOCATOR_H_ 30 31 #include "v8.h" 32 33 #include "data-flow.h" 34 #include "lithium.h" 35 #include "zone.h" 36 37 namespace v8 { 38 namespace internal { 39 40 // Forward declarations. 41 class HBasicBlock; 42 class HGraph; 43 class HInstruction; 44 class HPhi; 45 class HTracer; 46 class HValue; 47 class BitVector; 48 class StringStream; 49 50 class LArgument; 51 class LChunk; 52 class LOperand; 53 class LUnallocated; 54 class LConstantOperand; 55 class LGap; 56 class LParallelMove; 57 class LPointerMap; 58 class LStackSlot; 59 class LRegister; 60 61 62 // This class represents a single point of a LOperand's lifetime. 63 // For each lithium instruction there are exactly two lifetime positions: 64 // the beginning and the end of the instruction. Lifetime positions for 65 // different lithium instructions are disjoint. 66 class LifetimePosition { 67 public: 68 // Return the lifetime position that corresponds to the beginning of 69 // the instruction with the given index. FromInstructionIndex(int index)70 static LifetimePosition FromInstructionIndex(int index) { 71 return LifetimePosition(index * kStep); 72 } 73 74 // Returns a numeric representation of this lifetime position. Value()75 int Value() const { 76 return value_; 77 } 78 79 // Returns the index of the instruction to which this lifetime position 80 // corresponds. InstructionIndex()81 int InstructionIndex() const { 82 ASSERT(IsValid()); 83 return value_ / kStep; 84 } 85 86 // Returns true if this lifetime position corresponds to the instruction 87 // start. IsInstructionStart()88 bool IsInstructionStart() const { 89 return (value_ & (kStep - 1)) == 0; 90 } 91 92 // Returns the lifetime position for the start of the instruction which 93 // corresponds to this lifetime position. InstructionStart()94 LifetimePosition InstructionStart() const { 95 ASSERT(IsValid()); 96 return LifetimePosition(value_ & ~(kStep - 1)); 97 } 98 99 // Returns the lifetime position for the end of the instruction which 100 // corresponds to this lifetime position. InstructionEnd()101 LifetimePosition InstructionEnd() const { 102 ASSERT(IsValid()); 103 return LifetimePosition(InstructionStart().Value() + kStep/2); 104 } 105 106 // Returns the lifetime position for the beginning of the next instruction. NextInstruction()107 LifetimePosition NextInstruction() const { 108 ASSERT(IsValid()); 109 return LifetimePosition(InstructionStart().Value() + kStep); 110 } 111 112 // Returns the lifetime position for the beginning of the previous 113 // instruction. PrevInstruction()114 LifetimePosition PrevInstruction() const { 115 ASSERT(IsValid()); 116 ASSERT(value_ > 1); 117 return LifetimePosition(InstructionStart().Value() - kStep); 118 } 119 120 // Constructs the lifetime position which does not correspond to any 121 // instruction. LifetimePosition()122 LifetimePosition() : value_(-1) {} 123 124 // Returns true if this lifetime positions corrensponds to some 125 // instruction. IsValid()126 bool IsValid() const { return value_ != -1; } 127 Invalid()128 static inline LifetimePosition Invalid() { return LifetimePosition(); } 129 MaxPosition()130 static inline LifetimePosition MaxPosition() { 131 // We have to use this kind of getter instead of static member due to 132 // crash bug in GDB. 133 return LifetimePosition(kMaxInt); 134 } 135 136 private: 137 static const int kStep = 2; 138 139 // Code relies on kStep being a power of two. 140 STATIC_ASSERT(IS_POWER_OF_TWO(kStep)); 141 LifetimePosition(int value)142 explicit LifetimePosition(int value) : value_(value) { } 143 144 int value_; 145 }; 146 147 148 enum RegisterKind { 149 NONE, 150 GENERAL_REGISTERS, 151 DOUBLE_REGISTERS 152 }; 153 154 155 // A register-allocator view of a Lithium instruction. It contains the id of 156 // the output operand and a list of input operand uses. 157 158 class LInstruction; 159 class LEnvironment; 160 161 // Iterator for non-null temp operands. 162 class TempIterator BASE_EMBEDDED { 163 public: 164 inline explicit TempIterator(LInstruction* instr); 165 inline bool HasNext(); 166 inline LOperand* Next(); 167 inline void Advance(); 168 169 private: 170 inline int AdvanceToNext(int start); 171 LInstruction* instr_; 172 int limit_; 173 int current_; 174 }; 175 176 177 // Iterator for non-constant input operands. 178 class InputIterator BASE_EMBEDDED { 179 public: 180 inline explicit InputIterator(LInstruction* instr); 181 inline bool HasNext(); 182 inline LOperand* Next(); 183 inline void Advance(); 184 185 private: 186 inline int AdvanceToNext(int start); 187 LInstruction* instr_; 188 int limit_; 189 int current_; 190 }; 191 192 193 class UseIterator BASE_EMBEDDED { 194 public: 195 inline explicit UseIterator(LInstruction* instr); 196 inline bool HasNext(); 197 inline LOperand* Next(); 198 inline void Advance(); 199 200 private: 201 InputIterator input_iterator_; 202 DeepIterator env_iterator_; 203 }; 204 205 206 // Representation of the non-empty interval [start,end[. 207 class UseInterval: public ZoneObject { 208 public: UseInterval(LifetimePosition start,LifetimePosition end)209 UseInterval(LifetimePosition start, LifetimePosition end) 210 : start_(start), end_(end), next_(NULL) { 211 ASSERT(start.Value() < end.Value()); 212 } 213 start()214 LifetimePosition start() const { return start_; } end()215 LifetimePosition end() const { return end_; } next()216 UseInterval* next() const { return next_; } 217 218 // Split this interval at the given position without effecting the 219 // live range that owns it. The interval must contain the position. 220 void SplitAt(LifetimePosition pos); 221 222 // If this interval intersects with other return smallest position 223 // that belongs to both of them. Intersect(const UseInterval * other)224 LifetimePosition Intersect(const UseInterval* other) const { 225 if (other->start().Value() < start_.Value()) return other->Intersect(this); 226 if (other->start().Value() < end_.Value()) return other->start(); 227 return LifetimePosition::Invalid(); 228 } 229 Contains(LifetimePosition point)230 bool Contains(LifetimePosition point) const { 231 return start_.Value() <= point.Value() && point.Value() < end_.Value(); 232 } 233 234 private: set_start(LifetimePosition start)235 void set_start(LifetimePosition start) { start_ = start; } set_next(UseInterval * next)236 void set_next(UseInterval* next) { next_ = next; } 237 238 LifetimePosition start_; 239 LifetimePosition end_; 240 UseInterval* next_; 241 242 friend class LiveRange; // Assigns to start_. 243 }; 244 245 // Representation of a use position. 246 class UsePosition: public ZoneObject { 247 public: 248 UsePosition(LifetimePosition pos, LOperand* operand); 249 operand()250 LOperand* operand() const { return operand_; } HasOperand()251 bool HasOperand() const { return operand_ != NULL; } 252 hint()253 LOperand* hint() const { return hint_; } set_hint(LOperand * hint)254 void set_hint(LOperand* hint) { hint_ = hint; } 255 bool HasHint() const; 256 bool RequiresRegister() const; 257 bool RegisterIsBeneficial() const; 258 pos()259 LifetimePosition pos() const { return pos_; } next()260 UsePosition* next() const { return next_; } 261 262 private: set_next(UsePosition * next)263 void set_next(UsePosition* next) { next_ = next; } 264 265 LOperand* operand_; 266 LOperand* hint_; 267 LifetimePosition pos_; 268 UsePosition* next_; 269 bool requires_reg_; 270 bool register_beneficial_; 271 272 friend class LiveRange; 273 }; 274 275 // Representation of SSA values' live ranges as a collection of (continuous) 276 // intervals over the instruction ordering. 277 class LiveRange: public ZoneObject { 278 public: 279 static const int kInvalidAssignment = 0x7fffffff; 280 281 explicit LiveRange(int id); 282 first_interval()283 UseInterval* first_interval() const { return first_interval_; } first_pos()284 UsePosition* first_pos() const { return first_pos_; } parent()285 LiveRange* parent() const { return parent_; } TopLevel()286 LiveRange* TopLevel() { return (parent_ == NULL) ? this : parent_; } next()287 LiveRange* next() const { return next_; } IsChild()288 bool IsChild() const { return parent() != NULL; } id()289 int id() const { return id_; } IsFixed()290 bool IsFixed() const { return id_ < 0; } IsEmpty()291 bool IsEmpty() const { return first_interval() == NULL; } 292 LOperand* CreateAssignedOperand(); assigned_register()293 int assigned_register() const { return assigned_register_; } spill_start_index()294 int spill_start_index() const { return spill_start_index_; } 295 void set_assigned_register(int reg, RegisterKind register_kind); 296 void MakeSpilled(); 297 298 // Returns use position in this live range that follows both start 299 // and last processed use position. 300 // Modifies internal state of live range! 301 UsePosition* NextUsePosition(LifetimePosition start); 302 303 // Returns use position for which register is required in this live 304 // range and which follows both start and last processed use position 305 // Modifies internal state of live range! 306 UsePosition* NextRegisterPosition(LifetimePosition start); 307 308 // Returns use position for which register is beneficial in this live 309 // range and which follows both start and last processed use position 310 // Modifies internal state of live range! 311 UsePosition* NextUsePositionRegisterIsBeneficial(LifetimePosition start); 312 313 // Can this live range be spilled at this position. 314 bool CanBeSpilled(LifetimePosition pos); 315 316 // Split this live range at the given position which must follow the start of 317 // the range. 318 // All uses following the given position will be moved from this 319 // live range to the result live range. 320 void SplitAt(LifetimePosition position, LiveRange* result); 321 IsDouble()322 bool IsDouble() const { return assigned_register_kind_ == DOUBLE_REGISTERS; } HasRegisterAssigned()323 bool HasRegisterAssigned() const { 324 return assigned_register_ != kInvalidAssignment; 325 } IsSpilled()326 bool IsSpilled() const { return spilled_; } 327 UsePosition* FirstPosWithHint() const; 328 FirstHint()329 LOperand* FirstHint() const { 330 UsePosition* pos = FirstPosWithHint(); 331 if (pos != NULL) return pos->hint(); 332 return NULL; 333 } 334 Start()335 LifetimePosition Start() const { 336 ASSERT(!IsEmpty()); 337 return first_interval()->start(); 338 } 339 End()340 LifetimePosition End() const { 341 ASSERT(!IsEmpty()); 342 return last_interval_->end(); 343 } 344 345 bool HasAllocatedSpillOperand() const; GetSpillOperand()346 LOperand* GetSpillOperand() const { return spill_operand_; } 347 void SetSpillOperand(LOperand* operand); 348 SetSpillStartIndex(int start)349 void SetSpillStartIndex(int start) { 350 spill_start_index_ = Min(start, spill_start_index_); 351 } 352 353 bool ShouldBeAllocatedBefore(const LiveRange* other) const; 354 bool CanCover(LifetimePosition position) const; 355 bool Covers(LifetimePosition position); 356 LifetimePosition FirstIntersection(LiveRange* other); 357 358 // Add a new interval or a new use position to this live range. 359 void EnsureInterval(LifetimePosition start, LifetimePosition end); 360 void AddUseInterval(LifetimePosition start, LifetimePosition end); 361 UsePosition* AddUsePosition(LifetimePosition pos, LOperand* operand); 362 363 // Shorten the most recently added interval by setting a new start. 364 void ShortenTo(LifetimePosition start); 365 366 #ifdef DEBUG 367 // True if target overlaps an existing interval. 368 bool HasOverlap(UseInterval* target) const; 369 void Verify() const; 370 #endif 371 372 private: 373 void ConvertOperands(); 374 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const; 375 void AdvanceLastProcessedMarker(UseInterval* to_start_of, 376 LifetimePosition but_not_past) const; 377 378 int id_; 379 bool spilled_; 380 int assigned_register_; 381 RegisterKind assigned_register_kind_; 382 UseInterval* last_interval_; 383 UseInterval* first_interval_; 384 UsePosition* first_pos_; 385 LiveRange* parent_; 386 LiveRange* next_; 387 // This is used as a cache, it doesn't affect correctness. 388 mutable UseInterval* current_interval_; 389 UsePosition* last_processed_use_; 390 LOperand* spill_operand_; 391 int spill_start_index_; 392 }; 393 394 395 class GrowableBitVector BASE_EMBEDDED { 396 public: GrowableBitVector()397 GrowableBitVector() : bits_(NULL) { } 398 Contains(int value)399 bool Contains(int value) const { 400 if (!InBitsRange(value)) return false; 401 return bits_->Contains(value); 402 } 403 Add(int value)404 void Add(int value) { 405 EnsureCapacity(value); 406 bits_->Add(value); 407 } 408 409 private: 410 static const int kInitialLength = 1024; 411 InBitsRange(int value)412 bool InBitsRange(int value) const { 413 return bits_ != NULL && bits_->length() > value; 414 } 415 EnsureCapacity(int value)416 void EnsureCapacity(int value) { 417 if (InBitsRange(value)) return; 418 int new_length = bits_ == NULL ? kInitialLength : bits_->length(); 419 while (new_length <= value) new_length *= 2; 420 BitVector* new_bits = new BitVector(new_length); 421 if (bits_ != NULL) new_bits->CopyFrom(*bits_); 422 bits_ = new_bits; 423 } 424 425 BitVector* bits_; 426 }; 427 428 429 class LAllocator BASE_EMBEDDED { 430 public: 431 LAllocator(int first_virtual_register, HGraph* graph); 432 433 static void TraceAlloc(const char* msg, ...); 434 435 // Lithium translation support. 436 // Record a use of an input operand in the current instruction. 437 void RecordUse(HValue* value, LUnallocated* operand); 438 // Record the definition of the output operand. 439 void RecordDefinition(HInstruction* instr, LUnallocated* operand); 440 // Record a temporary operand. 441 void RecordTemporary(LUnallocated* operand); 442 443 // Checks whether the value of a given virtual register is tagged. 444 bool HasTaggedValue(int virtual_register) const; 445 446 // Returns the register kind required by the given virtual register. 447 RegisterKind RequiredRegisterKind(int virtual_register) const; 448 449 // Control max function size. 450 static int max_initial_value_ids(); 451 452 void Allocate(LChunk* chunk); 453 live_ranges()454 const ZoneList<LiveRange*>* live_ranges() const { return &live_ranges_; } fixed_live_ranges()455 const Vector<LiveRange*>* fixed_live_ranges() const { 456 return &fixed_live_ranges_; 457 } fixed_double_live_ranges()458 const Vector<LiveRange*>* fixed_double_live_ranges() const { 459 return &fixed_double_live_ranges_; 460 } 461 chunk()462 LChunk* chunk() const { return chunk_; } graph()463 HGraph* graph() const { return graph_; } 464 MarkAsOsrEntry()465 void MarkAsOsrEntry() { 466 // There can be only one. 467 ASSERT(!has_osr_entry_); 468 // Simply set a flag to find and process instruction later. 469 has_osr_entry_ = true; 470 } 471 472 #ifdef DEBUG 473 void Verify() const; 474 #endif 475 476 private: 477 void MeetRegisterConstraints(); 478 void ResolvePhis(); 479 void BuildLiveRanges(); 480 void AllocateGeneralRegisters(); 481 void AllocateDoubleRegisters(); 482 void ConnectRanges(); 483 void ResolveControlFlow(); 484 void PopulatePointerMaps(); 485 void ProcessOsrEntry(); 486 void AllocateRegisters(); 487 bool CanEagerlyResolveControlFlow(HBasicBlock* block) const; 488 inline bool SafePointsAreInOrder() const; 489 490 // Liveness analysis support. 491 void InitializeLivenessAnalysis(); 492 BitVector* ComputeLiveOut(HBasicBlock* block); 493 void AddInitialIntervals(HBasicBlock* block, BitVector* live_out); 494 void ProcessInstructions(HBasicBlock* block, BitVector* live); 495 void MeetRegisterConstraints(HBasicBlock* block); 496 void MeetConstraintsBetween(LInstruction* first, 497 LInstruction* second, 498 int gap_index); 499 void ResolvePhis(HBasicBlock* block); 500 501 // Helper methods for building intervals. 502 LOperand* AllocateFixed(LUnallocated* operand, int pos, bool is_tagged); 503 LiveRange* LiveRangeFor(LOperand* operand); 504 void Define(LifetimePosition position, LOperand* operand, LOperand* hint); 505 void Use(LifetimePosition block_start, 506 LifetimePosition position, 507 LOperand* operand, 508 LOperand* hint); 509 void AddConstraintsGapMove(int index, LOperand* from, LOperand* to); 510 511 // Helper methods for updating the life range lists. 512 void AddToActive(LiveRange* range); 513 void AddToInactive(LiveRange* range); 514 void AddToUnhandledSorted(LiveRange* range); 515 void AddToUnhandledUnsorted(LiveRange* range); 516 void SortUnhandled(); 517 bool UnhandledIsSorted(); 518 void ActiveToHandled(LiveRange* range); 519 void ActiveToInactive(LiveRange* range); 520 void InactiveToHandled(LiveRange* range); 521 void InactiveToActive(LiveRange* range); 522 void FreeSpillSlot(LiveRange* range); 523 LOperand* TryReuseSpillSlot(LiveRange* range); 524 525 // Helper methods for allocating registers. 526 bool TryAllocateFreeReg(LiveRange* range); 527 void AllocateBlockedReg(LiveRange* range); 528 529 // Live range splitting helpers. 530 531 // Split the given range at the given position. 532 // If range starts at or after the given position then the 533 // original range is returned. 534 // Otherwise returns the live range that starts at pos and contains 535 // all uses from the original range that follow pos. Uses at pos will 536 // still be owned by the original range after splitting. 537 LiveRange* SplitAt(LiveRange* range, LifetimePosition pos); 538 539 // Split the given range in a position from the interval [start, end]. 540 LiveRange* SplitBetween(LiveRange* range, 541 LifetimePosition start, 542 LifetimePosition end); 543 544 // Find a lifetime position in the interval [start, end] which 545 // is optimal for splitting: it is either header of the outermost 546 // loop covered by this interval or the latest possible position. 547 LifetimePosition FindOptimalSplitPos(LifetimePosition start, 548 LifetimePosition end); 549 550 // Spill the given life range after position pos. 551 void SpillAfter(LiveRange* range, LifetimePosition pos); 552 553 // Spill the given life range after position start and up to position end. 554 void SpillBetween(LiveRange* range, 555 LifetimePosition start, 556 LifetimePosition end); 557 558 void SplitAndSpillIntersecting(LiveRange* range); 559 560 void Spill(LiveRange* range); 561 bool IsBlockBoundary(LifetimePosition pos); 562 563 // Helper methods for resolving control flow. 564 void ResolveControlFlow(LiveRange* range, 565 HBasicBlock* block, 566 HBasicBlock* pred); 567 568 // Return parallel move that should be used to connect ranges split at the 569 // given position. 570 LParallelMove* GetConnectingParallelMove(LifetimePosition pos); 571 572 // Return the block which contains give lifetime position. 573 HBasicBlock* GetBlock(LifetimePosition pos); 574 575 // Helper methods for the fixed registers. 576 int RegisterCount() const; FixedLiveRangeID(int index)577 static int FixedLiveRangeID(int index) { return -index - 1; } 578 static int FixedDoubleLiveRangeID(int index); 579 LiveRange* FixedLiveRangeFor(int index); 580 LiveRange* FixedDoubleLiveRangeFor(int index); 581 LiveRange* LiveRangeFor(int index); 582 HPhi* LookupPhi(LOperand* operand) const; 583 LGap* GetLastGap(HBasicBlock* block); 584 585 const char* RegisterName(int allocation_index); 586 587 inline bool IsGapAt(int index); 588 589 inline LInstruction* InstructionAt(int index); 590 591 inline LGap* GapAt(int index); 592 593 LChunk* chunk_; 594 595 // During liveness analysis keep a mapping from block id to live_in sets 596 // for blocks already analyzed. 597 ZoneList<BitVector*> live_in_sets_; 598 599 // Liveness analysis results. 600 ZoneList<LiveRange*> live_ranges_; 601 602 // Lists of live ranges 603 EmbeddedVector<LiveRange*, Register::kNumAllocatableRegisters> 604 fixed_live_ranges_; 605 EmbeddedVector<LiveRange*, DoubleRegister::kNumAllocatableRegisters> 606 fixed_double_live_ranges_; 607 ZoneList<LiveRange*> unhandled_live_ranges_; 608 ZoneList<LiveRange*> active_live_ranges_; 609 ZoneList<LiveRange*> inactive_live_ranges_; 610 ZoneList<LiveRange*> reusable_slots_; 611 612 // Next virtual register number to be assigned to temporaries. 613 int next_virtual_register_; 614 int first_artificial_register_; 615 GrowableBitVector double_artificial_registers_; 616 617 RegisterKind mode_; 618 int num_registers_; 619 620 HGraph* graph_; 621 622 bool has_osr_entry_; 623 624 DISALLOW_COPY_AND_ASSIGN(LAllocator); 625 }; 626 627 628 } } // namespace v8::internal 629 630 #endif // V8_LITHIUM_ALLOCATOR_H_ 631