• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
6 #define V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
7 
8 #include "src/base/bits.h"
9 #include "src/base/compiler-specific.h"
10 #include "src/codegen/register-configuration.h"
11 #include "src/common/globals.h"
12 #include "src/compiler/backend/instruction.h"
13 #include "src/compiler/backend/register-allocation.h"
14 #include "src/flags/flags.h"
15 #include "src/utils/ostreams.h"
16 #include "src/zone/zone-containers.h"
17 
18 namespace v8 {
19 namespace internal {
20 
21 class TickCounter;
22 
23 namespace compiler {
24 
25 static const int32_t kUnassignedRegister = RegisterConfiguration::kMaxRegisters;
26 
27 // This class represents a single point of a InstructionOperand's lifetime. For
28 // each instruction there are four lifetime positions:
29 //
30 //   [[START, END], [START, END]]
31 //
32 // Where the first half position corresponds to
33 //
34 //  [GapPosition::START, GapPosition::END]
35 //
36 // and the second half position corresponds to
37 //
38 //  [Lifetime::USED_AT_START, Lifetime::USED_AT_END]
39 //
40 class LifetimePosition final {
41  public:
42   // Return the lifetime position that corresponds to the beginning of
43   // the gap with the given index.
GapFromInstructionIndex(int index)44   static LifetimePosition GapFromInstructionIndex(int index) {
45     return LifetimePosition(index * kStep);
46   }
47   // Return the lifetime position that corresponds to the beginning of
48   // the instruction with the given index.
InstructionFromInstructionIndex(int index)49   static LifetimePosition InstructionFromInstructionIndex(int index) {
50     return LifetimePosition(index * kStep + kHalfStep);
51   }
52 
ExistsGapPositionBetween(LifetimePosition pos1,LifetimePosition pos2)53   static bool ExistsGapPositionBetween(LifetimePosition pos1,
54                                        LifetimePosition pos2) {
55     if (pos1 > pos2) std::swap(pos1, pos2);
56     LifetimePosition next(pos1.value_ + 1);
57     if (next.IsGapPosition()) return next < pos2;
58     return next.NextFullStart() < pos2;
59   }
60 
61   // Returns a numeric representation of this lifetime position.
value()62   int value() const { return value_; }
63 
64   // Returns the index of the instruction to which this lifetime position
65   // corresponds.
ToInstructionIndex()66   int ToInstructionIndex() const {
67     DCHECK(IsValid());
68     return value_ / kStep;
69   }
70 
71   // Returns true if this lifetime position corresponds to a START value
IsStart()72   bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; }
73   // Returns true if this lifetime position corresponds to an END value
IsEnd()74   bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; }
75   // Returns true if this lifetime position corresponds to a gap START value
IsFullStart()76   bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; }
77 
IsGapPosition()78   bool IsGapPosition() const { return (value_ & 0x2) == 0; }
IsInstructionPosition()79   bool IsInstructionPosition() const { return !IsGapPosition(); }
80 
81   // Returns the lifetime position for the current START.
Start()82   LifetimePosition Start() const {
83     DCHECK(IsValid());
84     return LifetimePosition(value_ & ~(kHalfStep - 1));
85   }
86 
87   // Returns the lifetime position for the current gap START.
FullStart()88   LifetimePosition FullStart() const {
89     DCHECK(IsValid());
90     return LifetimePosition(value_ & ~(kStep - 1));
91   }
92 
93   // Returns the lifetime position for the current END.
End()94   LifetimePosition End() const {
95     DCHECK(IsValid());
96     return LifetimePosition(Start().value_ + kHalfStep / 2);
97   }
98 
99   // Returns the lifetime position for the beginning of the next START.
NextStart()100   LifetimePosition NextStart() const {
101     DCHECK(IsValid());
102     return LifetimePosition(Start().value_ + kHalfStep);
103   }
104 
105   // Returns the lifetime position for the beginning of the next gap START.
NextFullStart()106   LifetimePosition NextFullStart() const {
107     DCHECK(IsValid());
108     return LifetimePosition(FullStart().value_ + kStep);
109   }
110 
111   // Returns the lifetime position for the beginning of the previous START.
PrevStart()112   LifetimePosition PrevStart() const {
113     DCHECK(IsValid());
114     DCHECK_LE(kHalfStep, value_);
115     return LifetimePosition(Start().value_ - kHalfStep);
116   }
117 
118   // Constructs the lifetime position which does not correspond to any
119   // instruction.
LifetimePosition()120   LifetimePosition() : value_(-1) {}
121 
122   // Returns true if this lifetime positions corrensponds to some
123   // instruction.
IsValid()124   bool IsValid() const { return value_ != -1; }
125 
126   bool operator<(const LifetimePosition& that) const {
127     return this->value_ < that.value_;
128   }
129 
130   bool operator<=(const LifetimePosition& that) const {
131     return this->value_ <= that.value_;
132   }
133 
134   bool operator==(const LifetimePosition& that) const {
135     return this->value_ == that.value_;
136   }
137 
138   bool operator!=(const LifetimePosition& that) const {
139     return this->value_ != that.value_;
140   }
141 
142   bool operator>(const LifetimePosition& that) const {
143     return this->value_ > that.value_;
144   }
145 
146   bool operator>=(const LifetimePosition& that) const {
147     return this->value_ >= that.value_;
148   }
149 
150   void Print() const;
151 
Invalid()152   static inline LifetimePosition Invalid() { return LifetimePosition(); }
153 
MaxPosition()154   static inline LifetimePosition MaxPosition() {
155     // We have to use this kind of getter instead of static member due to
156     // crash bug in GDB.
157     return LifetimePosition(kMaxInt);
158   }
159 
FromInt(int value)160   static inline LifetimePosition FromInt(int value) {
161     return LifetimePosition(value);
162   }
163 
164  private:
165   static const int kHalfStep = 2;
166   static const int kStep = 2 * kHalfStep;
167 
168   static_assert(base::bits::IsPowerOfTwo(kHalfStep),
169                 "Code relies on kStep and kHalfStep being a power of two");
170 
LifetimePosition(int value)171   explicit LifetimePosition(int value) : value_(value) {}
172 
173   int value_;
174 };
175 
176 std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
177 
178 enum class RegisterAllocationFlag : unsigned { kTraceAllocation = 1 << 0 };
179 
180 using RegisterAllocationFlags = base::Flags<RegisterAllocationFlag>;
181 
182 class SpillRange;
183 class LiveRange;
184 class TopLevelLiveRange;
185 
186 class TopTierRegisterAllocationData final : public RegisterAllocationData {
187  public:
188   TopTierRegisterAllocationData(const TopTierRegisterAllocationData&) = delete;
189   TopTierRegisterAllocationData& operator=(
190       const TopTierRegisterAllocationData&) = delete;
191 
cast(const RegisterAllocationData * data)192   static const TopTierRegisterAllocationData* cast(
193       const RegisterAllocationData* data) {
194     DCHECK_EQ(data->type(), Type::kTopTier);
195     return static_cast<const TopTierRegisterAllocationData*>(data);
196   }
197 
cast(RegisterAllocationData * data)198   static TopTierRegisterAllocationData* cast(RegisterAllocationData* data) {
199     DCHECK_EQ(data->type(), Type::kTopTier);
200     return static_cast<TopTierRegisterAllocationData*>(data);
201   }
202 
cast(const RegisterAllocationData & data)203   static const TopTierRegisterAllocationData& cast(
204       const RegisterAllocationData& data) {
205     DCHECK_EQ(data.type(), Type::kTopTier);
206     return static_cast<const TopTierRegisterAllocationData&>(data);
207   }
208 
209   // Encodes whether a spill happens in deferred code (kSpillDeferred) or
210   // regular code (kSpillAtDefinition).
211   enum SpillMode { kSpillAtDefinition, kSpillDeferred };
212 
is_trace_alloc()213   bool is_trace_alloc() {
214     return flags_ & RegisterAllocationFlag::kTraceAllocation;
215   }
216 
217   static constexpr int kNumberOfFixedRangesPerRegister = 2;
218 
219   class PhiMapValue : public ZoneObject {
220    public:
221     PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
222 
phi()223     const PhiInstruction* phi() const { return phi_; }
block()224     const InstructionBlock* block() const { return block_; }
225 
226     // For hinting.
assigned_register()227     int assigned_register() const { return assigned_register_; }
set_assigned_register(int register_code)228     void set_assigned_register(int register_code) {
229       DCHECK_EQ(assigned_register_, kUnassignedRegister);
230       assigned_register_ = register_code;
231     }
UnsetAssignedRegister()232     void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
233 
234     void AddOperand(InstructionOperand* operand);
235     void CommitAssignment(const InstructionOperand& operand);
236 
237    private:
238     PhiInstruction* const phi_;
239     const InstructionBlock* const block_;
240     ZoneVector<InstructionOperand*> incoming_operands_;
241     int assigned_register_;
242   };
243   using PhiMap = ZoneMap<int, PhiMapValue*>;
244 
245   struct DelayedReference {
246     ReferenceMap* map;
247     InstructionOperand* operand;
248   };
249   using DelayedReferences = ZoneVector<DelayedReference>;
250   using RangesWithPreassignedSlots =
251       ZoneVector<std::pair<TopLevelLiveRange*, int>>;
252 
253   TopTierRegisterAllocationData(const RegisterConfiguration* config,
254                                 Zone* allocation_zone, Frame* frame,
255                                 InstructionSequence* code,
256                                 RegisterAllocationFlags flags,
257                                 TickCounter* tick_counter,
258                                 const char* debug_name = nullptr);
259 
live_ranges()260   const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
261     return live_ranges_;
262   }
live_ranges()263   ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
fixed_live_ranges()264   const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
265     return fixed_live_ranges_;
266   }
fixed_live_ranges()267   ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
268     return fixed_live_ranges_;
269   }
fixed_float_live_ranges()270   ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() {
271     return fixed_float_live_ranges_;
272   }
fixed_float_live_ranges()273   const ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() const {
274     return fixed_float_live_ranges_;
275   }
fixed_double_live_ranges()276   ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
277     return fixed_double_live_ranges_;
278   }
fixed_double_live_ranges()279   const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
280     return fixed_double_live_ranges_;
281   }
fixed_simd128_live_ranges()282   ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() {
283     return fixed_simd128_live_ranges_;
284   }
fixed_simd128_live_ranges()285   const ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() const {
286     return fixed_simd128_live_ranges_;
287   }
live_in_sets()288   ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
live_out_sets()289   ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; }
spill_ranges()290   ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
delayed_references()291   DelayedReferences& delayed_references() { return delayed_references_; }
code()292   InstructionSequence* code() const { return code_; }
293   // This zone is for data structures only needed during register allocation
294   // phases.
allocation_zone()295   Zone* allocation_zone() const { return allocation_zone_; }
296   // This zone is for InstructionOperands and moves that live beyond register
297   // allocation.
code_zone()298   Zone* code_zone() const { return code()->zone(); }
frame()299   Frame* frame() const { return frame_; }
debug_name()300   const char* debug_name() const { return debug_name_; }
config()301   const RegisterConfiguration* config() const { return config_; }
302 
303   MachineRepresentation RepresentationFor(int virtual_register);
304 
305   TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
306   // Creates a new live range.
307   TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep);
308 
309   SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range,
310                                           SpillMode spill_mode);
311   SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
312 
313   MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
314                            const InstructionOperand& from,
315                            const InstructionOperand& to);
316 
317   bool ExistsUseWithoutDefinition();
318   bool RangesDefinedInDeferredStayInDeferred();
319 
320   void MarkFixedUse(MachineRepresentation rep, int index);
321   bool HasFixedUse(MachineRepresentation rep, int index);
322 
323   void MarkAllocated(MachineRepresentation rep, int index);
324 
325   PhiMapValue* InitializePhiMap(const InstructionBlock* block,
326                                 PhiInstruction* phi);
327   PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
328   PhiMapValue* GetPhiMapValueFor(int virtual_register);
329   bool IsBlockBoundary(LifetimePosition pos) const;
330 
preassigned_slot_ranges()331   RangesWithPreassignedSlots& preassigned_slot_ranges() {
332     return preassigned_slot_ranges_;
333   }
334 
RememberSpillState(RpoNumber block,const ZoneVector<LiveRange * > & state)335   void RememberSpillState(RpoNumber block,
336                           const ZoneVector<LiveRange*>& state) {
337     spill_state_[block.ToSize()] = state;
338   }
339 
GetSpillState(RpoNumber block)340   ZoneVector<LiveRange*>& GetSpillState(RpoNumber block) {
341     auto& result = spill_state_[block.ToSize()];
342     return result;
343   }
344 
ResetSpillState()345   void ResetSpillState() {
346     for (auto& state : spill_state_) {
347       state.clear();
348     }
349   }
350 
tick_counter()351   TickCounter* tick_counter() { return tick_counter_; }
352 
slot_for_const_range()353   ZoneMap<TopLevelLiveRange*, AllocatedOperand*>& slot_for_const_range() {
354     return slot_for_const_range_;
355   }
356 
357  private:
358   Zone* const allocation_zone_;
359   Frame* const frame_;
360   InstructionSequence* const code_;
361   const char* const debug_name_;
362   const RegisterConfiguration* const config_;
363   PhiMap phi_map_;
364   ZoneVector<BitVector*> live_in_sets_;
365   ZoneVector<BitVector*> live_out_sets_;
366   ZoneVector<TopLevelLiveRange*> live_ranges_;
367   ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
368   ZoneVector<TopLevelLiveRange*> fixed_float_live_ranges_;
369   ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
370   ZoneVector<TopLevelLiveRange*> fixed_simd128_live_ranges_;
371   ZoneVector<SpillRange*> spill_ranges_;
372   DelayedReferences delayed_references_;
373   BitVector* assigned_registers_;
374   BitVector* assigned_double_registers_;
375   BitVector* assigned_simd128_registers_;
376   BitVector* fixed_register_use_;
377   BitVector* fixed_fp_register_use_;
378   BitVector* fixed_simd128_register_use_;
379   int virtual_register_count_;
380   RangesWithPreassignedSlots preassigned_slot_ranges_;
381   ZoneVector<ZoneVector<LiveRange*>> spill_state_;
382   RegisterAllocationFlags flags_;
383   TickCounter* const tick_counter_;
384   ZoneMap<TopLevelLiveRange*, AllocatedOperand*> slot_for_const_range_;
385 };
386 
387 // Representation of the non-empty interval [start,end[.
388 class UseInterval final : public ZoneObject {
389  public:
UseInterval(LifetimePosition start,LifetimePosition end)390   UseInterval(LifetimePosition start, LifetimePosition end)
391       : start_(start), end_(end), next_(nullptr) {
392     DCHECK(start < end);
393   }
394   UseInterval(const UseInterval&) = delete;
395   UseInterval& operator=(const UseInterval&) = delete;
396 
start()397   LifetimePosition start() const { return start_; }
set_start(LifetimePosition start)398   void set_start(LifetimePosition start) { start_ = start; }
end()399   LifetimePosition end() const { return end_; }
set_end(LifetimePosition end)400   void set_end(LifetimePosition end) { end_ = end; }
next()401   UseInterval* next() const { return next_; }
set_next(UseInterval * next)402   void set_next(UseInterval* next) { next_ = next; }
403 
404   // Split this interval at the given position without effecting the
405   // live range that owns it. The interval must contain the position.
406   UseInterval* SplitAt(LifetimePosition pos, Zone* zone);
407 
408   // If this interval intersects with other return smallest position
409   // that belongs to both of them.
Intersect(const UseInterval * other)410   LifetimePosition Intersect(const UseInterval* other) const {
411     if (other->start() < start_) return other->Intersect(this);
412     if (other->start() < end_) return other->start();
413     return LifetimePosition::Invalid();
414   }
415 
Contains(LifetimePosition point)416   bool Contains(LifetimePosition point) const {
417     return start_ <= point && point < end_;
418   }
419 
420   // Returns the index of the first gap covered by this interval.
FirstGapIndex()421   int FirstGapIndex() const {
422     int ret = start_.ToInstructionIndex();
423     if (start_.IsInstructionPosition()) {
424       ++ret;
425     }
426     return ret;
427   }
428 
429   // Returns the index of the last gap covered by this interval.
LastGapIndex()430   int LastGapIndex() const {
431     int ret = end_.ToInstructionIndex();
432     if (end_.IsGapPosition() && end_.IsStart()) {
433       --ret;
434     }
435     return ret;
436   }
437 
438  private:
439   LifetimePosition start_;
440   LifetimePosition end_;
441   UseInterval* next_;
442 };
443 
444 enum class UsePositionType : uint8_t {
445   kRegisterOrSlot,
446   kRegisterOrSlotOrConstant,
447   kRequiresRegister,
448   kRequiresSlot
449 };
450 
451 enum class UsePositionHintType : uint8_t {
452   kNone,
453   kOperand,
454   kUsePos,
455   kPhi,
456   kUnresolved
457 };
458 
459 // Representation of a use position.
460 class V8_EXPORT_PRIVATE UsePosition final
NON_EXPORTED_BASE(ZoneObject)461     : public NON_EXPORTED_BASE(ZoneObject) {
462  public:
463   UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint,
464               UsePositionHintType hint_type);
465   UsePosition(const UsePosition&) = delete;
466   UsePosition& operator=(const UsePosition&) = delete;
467 
468   InstructionOperand* operand() const { return operand_; }
469   bool HasOperand() const { return operand_ != nullptr; }
470 
471   bool RegisterIsBeneficial() const {
472     return RegisterBeneficialField::decode(flags_);
473   }
474   bool SpillDetrimental() const {
475     return SpillDetrimentalField::decode(flags_);
476   }
477 
478   UsePositionType type() const { return TypeField::decode(flags_); }
479   void set_type(UsePositionType type, bool register_beneficial);
480 
481   LifetimePosition pos() const { return pos_; }
482 
483   UsePosition* next() const { return next_; }
484   void set_next(UsePosition* next) { next_ = next; }
485 
486   // For hinting only.
487   void set_assigned_register(int register_code) {
488     flags_ = AssignedRegisterField::update(flags_, register_code);
489   }
490   void set_spill_detrimental() {
491     flags_ = SpillDetrimentalField::update(flags_, true);
492   }
493 
494   UsePositionHintType hint_type() const {
495     return HintTypeField::decode(flags_);
496   }
497   bool HasHint() const;
498   bool HintRegister(int* register_code) const;
499   void SetHint(UsePosition* use_pos);
500   void ResolveHint(UsePosition* use_pos);
501   bool IsResolved() const {
502     return hint_type() != UsePositionHintType::kUnresolved;
503   }
504   static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
505 
506  private:
507   using TypeField = base::BitField<UsePositionType, 0, 2>;
508   using HintTypeField = base::BitField<UsePositionHintType, 2, 3>;
509   using RegisterBeneficialField = base::BitField<bool, 5, 1>;
510   using AssignedRegisterField = base::BitField<int32_t, 6, 6>;
511   using SpillDetrimentalField = base::BitField<int32_t, 12, 1>;
512 
513   InstructionOperand* const operand_;
514   void* hint_;
515   UsePosition* next_;
516   LifetimePosition const pos_;
517   uint32_t flags_;
518 };
519 
520 class SpillRange;
521 class TopTierRegisterAllocationData;
522 class TopLevelLiveRange;
523 class LiveRangeBundle;
524 
525 // Representation of SSA values' live ranges as a collection of (continuous)
526 // intervals over the instruction ordering.
NON_EXPORTED_BASE(ZoneObject)527 class V8_EXPORT_PRIVATE LiveRange : public NON_EXPORTED_BASE(ZoneObject) {
528  public:
529   LiveRange(const LiveRange&) = delete;
530   LiveRange& operator=(const LiveRange&) = delete;
531 
532   UseInterval* first_interval() const { return first_interval_; }
533   UsePosition* first_pos() const { return first_pos_; }
534   TopLevelLiveRange* TopLevel() { return top_level_; }
535   const TopLevelLiveRange* TopLevel() const { return top_level_; }
536 
537   bool IsTopLevel() const;
538 
539   LiveRange* next() const { return next_; }
540 
541   int relative_id() const { return relative_id_; }
542 
543   bool IsEmpty() const { return first_interval() == nullptr; }
544 
545   InstructionOperand GetAssignedOperand() const;
546 
547   MachineRepresentation representation() const {
548     return RepresentationField::decode(bits_);
549   }
550 
551   int assigned_register() const { return AssignedRegisterField::decode(bits_); }
552   bool HasRegisterAssigned() const {
553     return assigned_register() != kUnassignedRegister;
554   }
555   void set_assigned_register(int reg);
556   void UnsetAssignedRegister();
557 
558   bool ShouldRecombine() const { return RecombineField::decode(bits_); }
559 
560   void SetRecombine() { bits_ = RecombineField::update(bits_, true); }
561   void set_controlflow_hint(int reg) {
562     bits_ = ControlFlowRegisterHint::update(bits_, reg);
563   }
564   int controlflow_hint() const {
565     return ControlFlowRegisterHint::decode(bits_);
566   }
567   bool RegisterFromControlFlow(int* reg) {
568     int hint = controlflow_hint();
569     if (hint != kUnassignedRegister) {
570       *reg = hint;
571       return true;
572     }
573     return false;
574   }
575   bool spilled() const { return SpilledField::decode(bits_); }
576   void AttachToNext();
577   void Unspill();
578   void Spill();
579 
580   RegisterKind kind() const;
581 
582   // Returns use position in this live range that follows both start
583   // and last processed use position.
584   UsePosition* NextUsePosition(LifetimePosition start) const;
585 
586   // Returns use position for which register is required in this live
587   // range and which follows both start and last processed use position
588   UsePosition* NextRegisterPosition(LifetimePosition start) const;
589 
590   // Returns the first use position requiring stack slot, or nullptr.
591   UsePosition* NextSlotPosition(LifetimePosition start) const;
592 
593   // Returns use position for which register is beneficial in this live
594   // range and which follows both start and last processed use position
595   UsePosition* NextUsePositionRegisterIsBeneficial(
596       LifetimePosition start) const;
597 
598   // Returns lifetime position for which register is beneficial in this live
599   // range and which follows both start and last processed use position.
600   LifetimePosition NextLifetimePositionRegisterIsBeneficial(
601       const LifetimePosition& start) const;
602 
603   // Returns use position for which register is beneficial in this live
604   // range and which precedes start.
605   UsePosition* PreviousUsePositionRegisterIsBeneficial(
606       LifetimePosition start) const;
607 
608   // Returns use position for which spilling is detrimental in this live
609   // range and which follows both start and last processed use position
610   UsePosition* NextUsePositionSpillDetrimental(LifetimePosition start) const;
611 
612   // Can this live range be spilled at this position.
613   bool CanBeSpilled(LifetimePosition pos) const;
614 
615   // Splitting primitive used by splitting members.
616   // Performs the split, but does not link the resulting ranges.
617   // The given position must follow the start of the range.
618   // All uses following the given position will be moved from this
619   // live range to the result live range.
620   // The current range will terminate at position, while result will start from
621   // position.
622   enum HintConnectionOption : bool {
623     DoNotConnectHints = false,
624     ConnectHints = true
625   };
626   UsePosition* DetachAt(LifetimePosition position, LiveRange* result,
627                         Zone* zone, HintConnectionOption connect_hints);
628 
629   // Detaches at position, and then links the resulting ranges. Returns the
630   // child, which starts at position.
631   LiveRange* SplitAt(LifetimePosition position, Zone* zone);
632 
633   // Returns nullptr when no register is hinted, otherwise sets register_index.
634   // Uses {current_hint_position_} as a cache, and tries to update it.
635   UsePosition* FirstHintPosition(int* register_index);
636   UsePosition* FirstHintPosition() {
637     int register_index;
638     return FirstHintPosition(&register_index);
639   }
640 
641   UsePosition* current_hint_position() const {
642     return current_hint_position_;
643   }
644 
645   LifetimePosition Start() const {
646     DCHECK(!IsEmpty());
647     return first_interval()->start();
648   }
649 
650   LifetimePosition End() const {
651     DCHECK(!IsEmpty());
652     return last_interval_->end();
653   }
654 
655   bool ShouldBeAllocatedBefore(const LiveRange* other) const;
656   bool CanCover(LifetimePosition position) const;
657   bool Covers(LifetimePosition position) const;
658   LifetimePosition NextStartAfter(LifetimePosition position);
659   LifetimePosition NextEndAfter(LifetimePosition position) const;
660   LifetimePosition FirstIntersection(LiveRange* other) const;
661   LifetimePosition NextStart() const { return next_start_; }
662 
663   void VerifyChildStructure() const {
664     VerifyIntervals();
665     VerifyPositions();
666   }
667 
668   void ConvertUsesToOperand(const InstructionOperand& op,
669                             const InstructionOperand& spill_op);
670   void SetUseHints(int register_index);
671   void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
672   void ResetCurrentHintPosition() { current_hint_position_ = first_pos_; }
673 
674   void Print(const RegisterConfiguration* config, bool with_children) const;
675   void Print(bool with_children) const;
676 
677   void set_bundle(LiveRangeBundle* bundle) { bundle_ = bundle; }
678   LiveRangeBundle* get_bundle() const { return bundle_; }
679   bool RegisterFromBundle(int* hint) const;
680   void UpdateBundleRegister(int reg) const;
681 
682  private:
683   friend class TopLevelLiveRange;
684   friend Zone;
685 
686   explicit LiveRange(int relative_id, MachineRepresentation rep,
687                      TopLevelLiveRange* top_level);
688 
689   void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
690 
691   void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
692 
693   UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
694   void AdvanceLastProcessedMarker(UseInterval* to_start_of,
695                                   LifetimePosition but_not_past) const;
696 
697   void VerifyPositions() const;
698   void VerifyIntervals() const;
699 
700   using SpilledField = base::BitField<bool, 0, 1>;
701   // Bits (1,7[ are used by TopLevelLiveRange.
702   using AssignedRegisterField = base::BitField<int32_t, 7, 6>;
703   using RepresentationField = base::BitField<MachineRepresentation, 13, 8>;
704   using RecombineField = base::BitField<bool, 21, 1>;
705   using ControlFlowRegisterHint = base::BitField<uint8_t, 22, 6>;
706   // Bits 28-31 are used by TopLevelLiveRange.
707 
708   // Unique among children of the same virtual register.
709   int relative_id_;
710   uint32_t bits_;
711   UseInterval* last_interval_;
712   UseInterval* first_interval_;
713   UsePosition* first_pos_;
714   TopLevelLiveRange* top_level_;
715   LiveRange* next_;
716   // This is used as a cache, it doesn't affect correctness.
717   mutable UseInterval* current_interval_;
718   // This is used as a cache, it doesn't affect correctness.
719   mutable UsePosition* last_processed_use_;
720   // This is used as a cache in BuildLiveRanges and during register allocation.
721   UsePosition* current_hint_position_;
722   LiveRangeBundle* bundle_ = nullptr;
723   // Next interval start, relative to the current linear scan position.
724   LifetimePosition next_start_;
725 };
726 
727 struct LiveRangeOrdering {
operatorLiveRangeOrdering728   bool operator()(const LiveRange* left, const LiveRange* right) const {
729     return left->Start() < right->Start();
730   }
731 };
732 class LiveRangeBundle : public ZoneObject {
733  public:
734   void MergeSpillRangesAndClear();
735 
id()736   int id() { return id_; }
737 
reg()738   int reg() { return reg_; }
739 
set_reg(int reg)740   void set_reg(int reg) {
741     DCHECK_EQ(reg_, kUnassignedRegister);
742     reg_ = reg;
743   }
744 
745  private:
746   friend class BundleBuilder;
747   friend Zone;
748 
749   // Representation of the non-empty interval [start,end[.
750   class Range {
751    public:
Range(int s,int e)752     Range(int s, int e) : start(s), end(e) {}
Range(LifetimePosition s,LifetimePosition e)753     Range(LifetimePosition s, LifetimePosition e)
754         : start(s.value()), end(e.value()) {}
755     int start;
756     int end;
757   };
758 
759   struct RangeOrdering {
operatorRangeOrdering760     bool operator()(const Range left, const Range right) const {
761       return left.start < right.start;
762     }
763   };
UsesOverlap(UseInterval * interval)764   bool UsesOverlap(UseInterval* interval) {
765     auto use = uses_.begin();
766     while (interval != nullptr && use != uses_.end()) {
767       if (use->end <= interval->start().value()) {
768         ++use;
769       } else if (interval->end().value() <= use->start) {
770         interval = interval->next();
771       } else {
772         return true;
773       }
774     }
775     return false;
776   }
InsertUses(UseInterval * interval)777   void InsertUses(UseInterval* interval) {
778     while (interval != nullptr) {
779       auto done = uses_.insert({interval->start(), interval->end()});
780       USE(done);
781       DCHECK_EQ(done.second, 1);
782       interval = interval->next();
783     }
784   }
LiveRangeBundle(Zone * zone,int id)785   explicit LiveRangeBundle(Zone* zone, int id)
786       : ranges_(zone), uses_(zone), id_(id) {}
787 
788   bool TryAddRange(LiveRange* range);
789 
790   // If merging is possible, merge either {lhs} into {rhs} or {rhs} into
791   // {lhs}, clear the source and return the result. Otherwise return nullptr.
792   static LiveRangeBundle* TryMerge(LiveRangeBundle* lhs, LiveRangeBundle* rhs,
793                                    bool trace_alloc);
794 
795   ZoneSet<LiveRange*, LiveRangeOrdering> ranges_;
796   ZoneSet<Range, RangeOrdering> uses_;
797   int id_;
798   int reg_ = kUnassignedRegister;
799 };
800 
801 class V8_EXPORT_PRIVATE TopLevelLiveRange final : public LiveRange {
802  public:
803   explicit TopLevelLiveRange(int vreg, MachineRepresentation rep);
804   TopLevelLiveRange(const TopLevelLiveRange&) = delete;
805   TopLevelLiveRange& operator=(const TopLevelLiveRange&) = delete;
806 
spill_start_index()807   int spill_start_index() const { return spill_start_index_; }
808 
IsFixed()809   bool IsFixed() const { return vreg_ < 0; }
810 
IsDeferredFixed()811   bool IsDeferredFixed() const { return DeferredFixedField::decode(bits_); }
set_deferred_fixed()812   void set_deferred_fixed() { bits_ = DeferredFixedField::update(bits_, true); }
is_phi()813   bool is_phi() const { return IsPhiField::decode(bits_); }
set_is_phi(bool value)814   void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
815 
is_non_loop_phi()816   bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
is_loop_phi()817   bool is_loop_phi() const { return is_phi() && !is_non_loop_phi(); }
set_is_non_loop_phi(bool value)818   void set_is_non_loop_phi(bool value) {
819     bits_ = IsNonLoopPhiField::update(bits_, value);
820   }
SpillAtLoopHeaderNotBeneficial()821   bool SpillAtLoopHeaderNotBeneficial() const {
822     return SpillAtLoopHeaderNotBeneficialField::decode(bits_);
823   }
set_spilling_at_loop_header_not_beneficial()824   void set_spilling_at_loop_header_not_beneficial() {
825     bits_ = SpillAtLoopHeaderNotBeneficialField::update(bits_, true);
826   }
827 
828   enum SlotUseKind { kNoSlotUse, kDeferredSlotUse, kGeneralSlotUse };
829 
has_slot_use()830   bool has_slot_use() const {
831     return slot_use_kind() > SlotUseKind::kNoSlotUse;
832   }
833 
has_non_deferred_slot_use()834   bool has_non_deferred_slot_use() const {
835     return slot_use_kind() == SlotUseKind::kGeneralSlotUse;
836   }
837 
reset_slot_use()838   void reset_slot_use() {
839     bits_ = HasSlotUseField::update(bits_, SlotUseKind::kNoSlotUse);
840   }
register_slot_use(SlotUseKind value)841   void register_slot_use(SlotUseKind value) {
842     bits_ = HasSlotUseField::update(bits_, std::max(slot_use_kind(), value));
843   }
slot_use_kind()844   SlotUseKind slot_use_kind() const { return HasSlotUseField::decode(bits_); }
845 
846   // Add a new interval or a new use position to this live range.
847   void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone,
848                       bool trace_alloc);
849   void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone,
850                       bool trace_alloc);
851   void AddUsePosition(UsePosition* pos, bool trace_alloc);
852 
853   // Shorten the most recently added interval by setting a new start.
854   void ShortenTo(LifetimePosition start, bool trace_alloc);
855 
856   // Spill range management.
857   void SetSpillRange(SpillRange* spill_range);
858 
859   // Encodes whether a range is also available from a memory location:
860   //   kNoSpillType: not availble in memory location.
861   //   kSpillOperand: computed in a memory location at range start.
862   //   kSpillRange: copied (spilled) to memory location at the definition,
863   //                or at the beginning of some later blocks if
864   //                LateSpillingSelected() is true.
865   //   kDeferredSpillRange: copied (spilled) to memory location at entry
866   //                        to deferred blocks that have a use from memory.
867   //
868   // Ranges either start out at kSpillOperand, which is also their final
869   // state, or kNoSpillType. When spilled only in deferred code, a range
870   // ends up with kDeferredSpillRange, while when spilled in regular code,
871   // a range will be tagged as kSpillRange.
872   enum class SpillType {
873     kNoSpillType,
874     kSpillOperand,
875     kSpillRange,
876     kDeferredSpillRange
877   };
set_spill_type(SpillType value)878   void set_spill_type(SpillType value) {
879     bits_ = SpillTypeField::update(bits_, value);
880   }
spill_type()881   SpillType spill_type() const { return SpillTypeField::decode(bits_); }
GetSpillOperand()882   InstructionOperand* GetSpillOperand() const {
883     DCHECK_EQ(SpillType::kSpillOperand, spill_type());
884     return spill_operand_;
885   }
886 
GetAllocatedSpillRange()887   SpillRange* GetAllocatedSpillRange() const {
888     DCHECK_NE(SpillType::kSpillOperand, spill_type());
889     return spill_range_;
890   }
891 
GetSpillRange()892   SpillRange* GetSpillRange() const {
893     DCHECK_GE(spill_type(), SpillType::kSpillRange);
894     return spill_range_;
895   }
HasNoSpillType()896   bool HasNoSpillType() const {
897     return spill_type() == SpillType::kNoSpillType;
898   }
HasSpillOperand()899   bool HasSpillOperand() const {
900     return spill_type() == SpillType::kSpillOperand;
901   }
HasSpillRange()902   bool HasSpillRange() const { return spill_type() >= SpillType::kSpillRange; }
HasGeneralSpillRange()903   bool HasGeneralSpillRange() const {
904     return spill_type() == SpillType::kSpillRange;
905   }
906   AllocatedOperand GetSpillRangeOperand() const;
907 
908   void RecordSpillLocation(Zone* zone, int gap_index,
909                            InstructionOperand* operand);
910   void SetSpillOperand(InstructionOperand* operand);
SetSpillStartIndex(int start)911   void SetSpillStartIndex(int start) {
912     spill_start_index_ = std::min(start, spill_start_index_);
913   }
914 
915   // Omits any moves from spill_move_insertion_locations_ that can be skipped.
916   void FilterSpillMoves(TopTierRegisterAllocationData* data,
917                         const InstructionOperand& operand);
918 
919   // Writes all moves from spill_move_insertion_locations_ to the schedule.
920   void CommitSpillMoves(TopTierRegisterAllocationData* data,
921                         const InstructionOperand& operand);
922 
923   // If all the children of this range are spilled in deferred blocks, and if
924   // for any non-spilled child with a use position requiring a slot, that range
925   // is contained in a deferred block, mark the range as
926   // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
927   // and instead let the LiveRangeConnector perform the spills within the
928   // deferred blocks. If so, we insert here spills for non-spilled ranges
929   // with slot use positions.
TreatAsSpilledInDeferredBlock(Zone * zone,int total_block_count)930   void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) {
931     spill_start_index_ = -1;
932     spilled_in_deferred_blocks_ = true;
933     spill_move_insertion_locations_ = nullptr;
934     list_of_blocks_requiring_spill_operands_ =
935         zone->New<BitVector>(total_block_count, zone);
936   }
937 
938   // Updates internal data structures to reflect that this range is not
939   // spilled at definition but instead spilled in some blocks only.
TransitionRangeToDeferredSpill(Zone * zone,int total_block_count)940   void TransitionRangeToDeferredSpill(Zone* zone, int total_block_count) {
941     spill_start_index_ = -1;
942     spill_move_insertion_locations_ = nullptr;
943     list_of_blocks_requiring_spill_operands_ =
944         zone->New<BitVector>(total_block_count, zone);
945   }
946 
947   // Promotes this range to spill at definition if it was marked for spilling
948   // in deferred blocks before.
TransitionRangeToSpillAtDefinition()949   void TransitionRangeToSpillAtDefinition() {
950     DCHECK_NOT_NULL(spill_move_insertion_locations_);
951     if (spill_type() == SpillType::kDeferredSpillRange) {
952       set_spill_type(SpillType::kSpillRange);
953     }
954   }
955 
MayRequireSpillRange()956   bool MayRequireSpillRange() const {
957     return !HasSpillOperand() && spill_range_ == nullptr;
958   }
959   void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
vreg()960   int vreg() const { return vreg_; }
961 
962   void Verify() const;
963   void VerifyChildrenInOrder() const;
964 
965   // Returns the LiveRange covering the given position, or nullptr if no such
966   // range exists. Uses a linear search through child ranges. The range at the
967   // previously requested position is cached, so this function will be very fast
968   // if you call it with a non-decreasing sequence of positions.
969   LiveRange* GetChildCovers(LifetimePosition pos);
970 
GetNextChildId()971   int GetNextChildId() { return ++last_child_id_; }
972 
GetMaxChildCount()973   int GetMaxChildCount() const { return last_child_id_ + 1; }
974 
IsSpilledOnlyInDeferredBlocks(const TopTierRegisterAllocationData * data)975   bool IsSpilledOnlyInDeferredBlocks(
976       const TopTierRegisterAllocationData* data) const {
977     return spill_type() == SpillType::kDeferredSpillRange;
978   }
979 
980   struct SpillMoveInsertionList;
981 
GetSpillMoveInsertionLocations(const TopTierRegisterAllocationData * data)982   SpillMoveInsertionList* GetSpillMoveInsertionLocations(
983       const TopTierRegisterAllocationData* data) const {
984     DCHECK(!IsSpilledOnlyInDeferredBlocks(data));
985     return spill_move_insertion_locations_;
986   }
987 
MarkHasPreassignedSlot()988   void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
has_preassigned_slot()989   bool has_preassigned_slot() const { return has_preassigned_slot_; }
990 
991   // Late spilling refers to spilling at places after the definition. These
992   // spills are guaranteed to cover at least all of the sub-ranges where the
993   // register allocator chose to evict the value from a register.
SetLateSpillingSelected(bool late_spilling_selected)994   void SetLateSpillingSelected(bool late_spilling_selected) {
995     DCHECK(spill_type() == SpillType::kSpillRange);
996     SpillRangeMode new_mode = late_spilling_selected
997                                   ? SpillRangeMode::kSpillLater
998                                   : SpillRangeMode::kSpillAtDefinition;
999     // A single TopLevelLiveRange should never be used in both modes.
1000     DCHECK(SpillRangeModeField::decode(bits_) == SpillRangeMode::kNotSet ||
1001            SpillRangeModeField::decode(bits_) == new_mode);
1002     bits_ = SpillRangeModeField::update(bits_, new_mode);
1003   }
LateSpillingSelected()1004   bool LateSpillingSelected() const {
1005     // Nobody should be reading this value until it's been decided.
1006     DCHECK_IMPLIES(HasGeneralSpillRange(), SpillRangeModeField::decode(bits_) !=
1007                                                SpillRangeMode::kNotSet);
1008     return SpillRangeModeField::decode(bits_) == SpillRangeMode::kSpillLater;
1009   }
1010 
AddBlockRequiringSpillOperand(RpoNumber block_id,const TopTierRegisterAllocationData * data)1011   void AddBlockRequiringSpillOperand(
1012       RpoNumber block_id, const TopTierRegisterAllocationData* data) {
1013     DCHECK(IsSpilledOnlyInDeferredBlocks(data));
1014     GetListOfBlocksRequiringSpillOperands(data)->Add(block_id.ToInt());
1015   }
1016 
GetListOfBlocksRequiringSpillOperands(const TopTierRegisterAllocationData * data)1017   BitVector* GetListOfBlocksRequiringSpillOperands(
1018       const TopTierRegisterAllocationData* data) const {
1019     DCHECK(IsSpilledOnlyInDeferredBlocks(data));
1020     return list_of_blocks_requiring_spill_operands_;
1021   }
1022 
1023  private:
1024   friend class LiveRange;
1025 
1026   // If spill type is kSpillRange, then this value indicates whether we've
1027   // chosen to spill at the definition or at some later points.
1028   enum class SpillRangeMode : uint8_t {
1029     kNotSet,
1030     kSpillAtDefinition,
1031     kSpillLater,
1032   };
1033 
1034   using HasSlotUseField = base::BitField<SlotUseKind, 1, 2>;
1035   using IsPhiField = base::BitField<bool, 3, 1>;
1036   using IsNonLoopPhiField = base::BitField<bool, 4, 1>;
1037   using SpillTypeField = base::BitField<SpillType, 5, 2>;
1038   using DeferredFixedField = base::BitField<bool, 28, 1>;
1039   using SpillAtLoopHeaderNotBeneficialField = base::BitField<bool, 29, 1>;
1040   using SpillRangeModeField = base::BitField<SpillRangeMode, 30, 2>;
1041 
1042   int vreg_;
1043   int last_child_id_;
1044   union {
1045     // Correct value determined by spill_type()
1046     InstructionOperand* spill_operand_;
1047     SpillRange* spill_range_;
1048   };
1049 
1050   union {
1051     SpillMoveInsertionList* spill_move_insertion_locations_;
1052     BitVector* list_of_blocks_requiring_spill_operands_;
1053   };
1054 
1055   // TODO(mtrofin): generalize spilling after definition, currently specialized
1056   // just for spill in a single deferred block.
1057   bool spilled_in_deferred_blocks_;
1058   bool has_preassigned_slot_;
1059 
1060   int spill_start_index_;
1061   UsePosition* last_pos_;
1062   LiveRange* last_child_covers_;
1063 };
1064 
1065 struct PrintableLiveRange {
1066   const RegisterConfiguration* register_configuration_;
1067   const LiveRange* range_;
1068 };
1069 
1070 std::ostream& operator<<(std::ostream& os,
1071                          const PrintableLiveRange& printable_range);
1072 
1073 class SpillRange final : public ZoneObject {
1074  public:
1075   static const int kUnassignedSlot = -1;
1076   SpillRange(TopLevelLiveRange* range, Zone* zone);
1077   SpillRange(const SpillRange&) = delete;
1078   SpillRange& operator=(const SpillRange&) = delete;
1079 
interval()1080   UseInterval* interval() const { return use_interval_; }
1081 
IsEmpty()1082   bool IsEmpty() const { return live_ranges_.empty(); }
1083   bool TryMerge(SpillRange* other);
HasSlot()1084   bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; }
1085 
set_assigned_slot(int index)1086   void set_assigned_slot(int index) {
1087     DCHECK_EQ(kUnassignedSlot, assigned_slot_);
1088     assigned_slot_ = index;
1089   }
assigned_slot()1090   int assigned_slot() {
1091     DCHECK_NE(kUnassignedSlot, assigned_slot_);
1092     return assigned_slot_;
1093   }
live_ranges()1094   const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
1095     return live_ranges_;
1096   }
live_ranges()1097   ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
1098   // Spill slots can be 4, 8, or 16 bytes wide.
byte_width()1099   int byte_width() const { return byte_width_; }
1100   void Print() const;
1101 
1102  private:
End()1103   LifetimePosition End() const { return end_position_; }
1104   bool IsIntersectingWith(SpillRange* other) const;
1105   // Merge intervals, making sure the use intervals are sorted
1106   void MergeDisjointIntervals(UseInterval* other);
1107 
1108   ZoneVector<TopLevelLiveRange*> live_ranges_;
1109   UseInterval* use_interval_;
1110   LifetimePosition end_position_;
1111   int assigned_slot_;
1112   int byte_width_;
1113 };
1114 
1115 class LiveRangeBound {
1116  public:
LiveRangeBound(LiveRange * range,bool skip)1117   explicit LiveRangeBound(LiveRange* range, bool skip)
1118       : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
1119     DCHECK(!range->IsEmpty());
1120   }
1121   LiveRangeBound(const LiveRangeBound&) = delete;
1122   LiveRangeBound& operator=(const LiveRangeBound&) = delete;
1123 
CanCover(LifetimePosition position)1124   bool CanCover(LifetimePosition position) {
1125     return start_ <= position && position < end_;
1126   }
1127 
1128   LiveRange* const range_;
1129   const LifetimePosition start_;
1130   const LifetimePosition end_;
1131   const bool skip_;
1132 };
1133 
1134 struct FindResult {
1135   LiveRange* cur_cover_;
1136   LiveRange* pred_cover_;
1137 };
1138 
1139 class LiveRangeBoundArray {
1140  public:
LiveRangeBoundArray()1141   LiveRangeBoundArray() : length_(0), start_(nullptr) {}
1142   LiveRangeBoundArray(const LiveRangeBoundArray&) = delete;
1143   LiveRangeBoundArray& operator=(const LiveRangeBoundArray&) = delete;
1144 
ShouldInitialize()1145   bool ShouldInitialize() { return start_ == nullptr; }
1146   void Initialize(Zone* zone, TopLevelLiveRange* range);
1147   LiveRangeBound* Find(const LifetimePosition position) const;
1148   LiveRangeBound* FindPred(const InstructionBlock* pred);
1149   LiveRangeBound* FindSucc(const InstructionBlock* succ);
1150   bool FindConnectableSubranges(const InstructionBlock* block,
1151                                 const InstructionBlock* pred,
1152                                 FindResult* result) const;
1153 
1154  private:
1155   size_t length_;
1156   LiveRangeBound* start_;
1157 };
1158 
1159 class LiveRangeFinder {
1160  public:
1161   explicit LiveRangeFinder(const TopTierRegisterAllocationData* data,
1162                            Zone* zone);
1163   LiveRangeFinder(const LiveRangeFinder&) = delete;
1164   LiveRangeFinder& operator=(const LiveRangeFinder&) = delete;
1165 
1166   LiveRangeBoundArray* ArrayFor(int operand_index);
1167 
1168  private:
1169   const TopTierRegisterAllocationData* const data_;
1170   const int bounds_length_;
1171   LiveRangeBoundArray* const bounds_;
1172   Zone* const zone_;
1173 };
1174 
1175 class ConstraintBuilder final : public ZoneObject {
1176  public:
1177   explicit ConstraintBuilder(TopTierRegisterAllocationData* data);
1178   ConstraintBuilder(const ConstraintBuilder&) = delete;
1179   ConstraintBuilder& operator=(const ConstraintBuilder&) = delete;
1180 
1181   // Phase 1 : insert moves to account for fixed register operands.
1182   void MeetRegisterConstraints();
1183 
1184   // Phase 2: deconstruct SSA by inserting moves in successors and the headers
1185   // of blocks containing phis.
1186   void ResolvePhis();
1187 
1188  private:
data()1189   TopTierRegisterAllocationData* data() const { return data_; }
code()1190   InstructionSequence* code() const { return data()->code(); }
allocation_zone()1191   Zone* allocation_zone() const { return data()->allocation_zone(); }
1192 
1193   InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
1194                                     bool is_tagged, bool is_input);
1195   void MeetRegisterConstraints(const InstructionBlock* block);
1196   void MeetConstraintsBefore(int index);
1197   void MeetConstraintsAfter(int index);
1198   void MeetRegisterConstraintsForLastInstructionInBlock(
1199       const InstructionBlock* block);
1200   void ResolvePhis(const InstructionBlock* block);
1201 
1202   TopTierRegisterAllocationData* const data_;
1203 };
1204 
1205 class LiveRangeBuilder final : public ZoneObject {
1206  public:
1207   explicit LiveRangeBuilder(TopTierRegisterAllocationData* data,
1208                             Zone* local_zone);
1209   LiveRangeBuilder(const LiveRangeBuilder&) = delete;
1210   LiveRangeBuilder& operator=(const LiveRangeBuilder&) = delete;
1211 
1212   // Phase 3: compute liveness of all virtual register.
1213   void BuildLiveRanges();
1214   static BitVector* ComputeLiveOut(const InstructionBlock* block,
1215                                    TopTierRegisterAllocationData* data);
1216 
1217  private:
1218   using SpillMode = TopTierRegisterAllocationData::SpillMode;
1219   static constexpr int kNumberOfFixedRangesPerRegister =
1220       TopTierRegisterAllocationData::kNumberOfFixedRangesPerRegister;
1221 
data()1222   TopTierRegisterAllocationData* data() const { return data_; }
code()1223   InstructionSequence* code() const { return data()->code(); }
allocation_zone()1224   Zone* allocation_zone() const { return data()->allocation_zone(); }
code_zone()1225   Zone* code_zone() const { return code()->zone(); }
config()1226   const RegisterConfiguration* config() const { return data()->config(); }
live_in_sets()1227   ZoneVector<BitVector*>& live_in_sets() const {
1228     return data()->live_in_sets();
1229   }
1230 
1231   // Verification.
1232   void Verify() const;
1233   bool IntervalStartsAtBlockBoundary(const UseInterval* interval) const;
1234   bool IntervalPredecessorsCoveredByRange(const UseInterval* interval,
1235                                           const TopLevelLiveRange* range) const;
1236   bool NextIntervalStartsInDifferentBlocks(const UseInterval* interval) const;
1237 
1238   // Liveness analysis support.
1239   void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
1240   void ProcessInstructions(const InstructionBlock* block, BitVector* live);
1241   void ProcessPhis(const InstructionBlock* block, BitVector* live);
1242   void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
1243 
FixedLiveRangeID(int index)1244   static int FixedLiveRangeID(int index) { return -index - 1; }
1245   int FixedFPLiveRangeID(int index, MachineRepresentation rep);
1246   TopLevelLiveRange* FixedLiveRangeFor(int index, SpillMode spill_mode);
1247   TopLevelLiveRange* FixedFPLiveRangeFor(int index, MachineRepresentation rep,
1248                                          SpillMode spill_mode);
1249   TopLevelLiveRange* FixedSIMD128LiveRangeFor(int index, SpillMode spill_mode);
1250 
1251   void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
1252   void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
1253 
1254   UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
1255                               void* hint, UsePositionHintType hint_type);
NewUsePosition(LifetimePosition pos)1256   UsePosition* NewUsePosition(LifetimePosition pos) {
1257     return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
1258   }
1259   TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand,
1260                                   SpillMode spill_mode);
1261   // Helper methods for building intervals.
1262   UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
1263                       void* hint, UsePositionHintType hint_type,
1264                       SpillMode spill_mode);
Define(LifetimePosition position,InstructionOperand * operand,SpillMode spill_mode)1265   void Define(LifetimePosition position, InstructionOperand* operand,
1266               SpillMode spill_mode) {
1267     Define(position, operand, nullptr, UsePositionHintType::kNone, spill_mode);
1268   }
1269   UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
1270                    InstructionOperand* operand, void* hint,
1271                    UsePositionHintType hint_type, SpillMode spill_mode);
Use(LifetimePosition block_start,LifetimePosition position,InstructionOperand * operand,SpillMode spill_mode)1272   void Use(LifetimePosition block_start, LifetimePosition position,
1273            InstructionOperand* operand, SpillMode spill_mode) {
1274     Use(block_start, position, operand, nullptr, UsePositionHintType::kNone,
1275         spill_mode);
1276   }
SpillModeForBlock(const InstructionBlock * block)1277   SpillMode SpillModeForBlock(const InstructionBlock* block) const {
1278     return block->IsDeferred() ? SpillMode::kSpillDeferred
1279                                : SpillMode::kSpillAtDefinition;
1280   }
1281   TopTierRegisterAllocationData* const data_;
1282   ZoneMap<InstructionOperand*, UsePosition*> phi_hints_;
1283 };
1284 
1285 class BundleBuilder final : public ZoneObject {
1286  public:
BundleBuilder(TopTierRegisterAllocationData * data)1287   explicit BundleBuilder(TopTierRegisterAllocationData* data) : data_(data) {}
1288 
1289   void BuildBundles();
1290 
1291  private:
data()1292   TopTierRegisterAllocationData* data() const { return data_; }
code()1293   InstructionSequence* code() const { return data_->code(); }
1294   TopTierRegisterAllocationData* data_;
1295   int next_bundle_id_ = 0;
1296 };
1297 
1298 class RegisterAllocator : public ZoneObject {
1299  public:
1300   RegisterAllocator(TopTierRegisterAllocationData* data, RegisterKind kind);
1301   RegisterAllocator(const RegisterAllocator&) = delete;
1302   RegisterAllocator& operator=(const RegisterAllocator&) = delete;
1303 
1304  protected:
1305   using SpillMode = TopTierRegisterAllocationData::SpillMode;
data()1306   TopTierRegisterAllocationData* data() const { return data_; }
code()1307   InstructionSequence* code() const { return data()->code(); }
mode()1308   RegisterKind mode() const { return mode_; }
num_registers()1309   int num_registers() const { return num_registers_; }
num_allocatable_registers()1310   int num_allocatable_registers() const { return num_allocatable_registers_; }
allocatable_register_codes()1311   const int* allocatable_register_codes() const {
1312     return allocatable_register_codes_;
1313   }
1314   // Returns true iff. we must check float register aliasing.
check_fp_aliasing()1315   bool check_fp_aliasing() const { return check_fp_aliasing_; }
1316 
1317   // TODO(mtrofin): explain why splitting in gap START is always OK.
1318   LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
1319                                                   int instruction_index);
1320 
allocation_zone()1321   Zone* allocation_zone() const { return data()->allocation_zone(); }
1322 
1323   // Find the optimal split for ranges defined by a memory operand, e.g.
1324   // constants or function parameters passed on the stack.
1325   void SplitAndSpillRangesDefinedByMemoryOperand();
1326 
1327   // Split the given range at the given position.
1328   // If range starts at or after the given position then the
1329   // original range is returned.
1330   // Otherwise returns the live range that starts at pos and contains
1331   // all uses from the original range that follow pos. Uses at pos will
1332   // still be owned by the original range after splitting.
1333   LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
1334 
CanProcessRange(LiveRange * range)1335   bool CanProcessRange(LiveRange* range) const {
1336     return range != nullptr && !range->IsEmpty() && range->kind() == mode();
1337   }
1338 
1339   // Split the given range in a position from the interval [start, end].
1340   LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
1341                           LifetimePosition end);
1342 
1343   // Find a lifetime position in the interval [start, end] which
1344   // is optimal for splitting: it is either header of the outermost
1345   // loop covered by this interval or the latest possible position.
1346   LifetimePosition FindOptimalSplitPos(LifetimePosition start,
1347                                        LifetimePosition end);
1348 
1349   void Spill(LiveRange* range, SpillMode spill_mode);
1350 
1351   // If we are trying to spill a range inside the loop try to
1352   // hoist spill position out to the point just before the loop.
1353   LifetimePosition FindOptimalSpillingPos(LiveRange* range,
1354                                           LifetimePosition pos,
1355                                           SpillMode spill_mode,
1356                                           LiveRange** begin_spill_out);
1357 
1358   const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
1359   const char* RegisterName(int allocation_index) const;
1360 
1361  private:
1362   TopTierRegisterAllocationData* const data_;
1363   const RegisterKind mode_;
1364   const int num_registers_;
1365   int num_allocatable_registers_;
1366   const int* allocatable_register_codes_;
1367   bool check_fp_aliasing_;
1368 
1369  private:
1370   bool no_combining_;
1371 };
1372 
1373 class LinearScanAllocator final : public RegisterAllocator {
1374  public:
1375   LinearScanAllocator(TopTierRegisterAllocationData* data, RegisterKind kind,
1376                       Zone* local_zone);
1377   LinearScanAllocator(const LinearScanAllocator&) = delete;
1378   LinearScanAllocator& operator=(const LinearScanAllocator&) = delete;
1379 
1380   // Phase 4: compute register assignments.
1381   void AllocateRegisters();
1382 
1383  private:
1384   struct RangeWithRegister {
1385     TopLevelLiveRange* range;
1386     int expected_register;
1387     struct Hash {
operatorRangeWithRegister::Hash1388       size_t operator()(const RangeWithRegister item) const {
1389         return item.range->vreg();
1390       }
1391     };
1392     struct Equals {
operatorRangeWithRegister::Equals1393       bool operator()(const RangeWithRegister one,
1394                       const RangeWithRegister two) const {
1395         return one.range == two.range;
1396       }
1397     };
1398 
RangeWithRegisterRangeWithRegister1399     explicit RangeWithRegister(LiveRange* a_range)
1400         : range(a_range->TopLevel()),
1401           expected_register(a_range->assigned_register()) {}
RangeWithRegisterRangeWithRegister1402     RangeWithRegister(TopLevelLiveRange* toplevel, int reg)
1403         : range(toplevel), expected_register(reg) {}
1404   };
1405 
1406   using RangeWithRegisterSet =
1407       ZoneUnorderedSet<RangeWithRegister, RangeWithRegister::Hash,
1408                        RangeWithRegister::Equals>;
1409 
1410   void MaybeSpillPreviousRanges(LiveRange* begin_range,
1411                                 LifetimePosition begin_pos,
1412                                 LiveRange* end_range);
1413   void MaybeUndoPreviousSplit(LiveRange* range);
1414   void SpillNotLiveRanges(RangeWithRegisterSet* to_be_live,
1415                           LifetimePosition position, SpillMode spill_mode);
1416   LiveRange* AssignRegisterOnReload(LiveRange* range, int reg);
1417   void ReloadLiveRanges(RangeWithRegisterSet const& to_be_live,
1418                         LifetimePosition position);
1419 
1420   void UpdateDeferredFixedRanges(SpillMode spill_mode, InstructionBlock* block);
1421   bool BlockIsDeferredOrImmediatePredecessorIsNotDeferred(
1422       const InstructionBlock* block);
1423   bool HasNonDeferredPredecessor(InstructionBlock* block);
1424 
1425   struct UnhandledLiveRangeOrdering {
operatorUnhandledLiveRangeOrdering1426     bool operator()(const LiveRange* a, const LiveRange* b) const {
1427       return a->ShouldBeAllocatedBefore(b);
1428     }
1429   };
1430 
1431   struct InactiveLiveRangeOrdering {
operatorInactiveLiveRangeOrdering1432     bool operator()(const LiveRange* a, const LiveRange* b) const {
1433       return a->NextStart() < b->NextStart();
1434     }
1435   };
1436 
1437   using UnhandledLiveRangeQueue =
1438       ZoneMultiset<LiveRange*, UnhandledLiveRangeOrdering>;
1439   using InactiveLiveRangeQueue =
1440       ZoneMultiset<LiveRange*, InactiveLiveRangeOrdering>;
unhandled_live_ranges()1441   UnhandledLiveRangeQueue& unhandled_live_ranges() {
1442     return unhandled_live_ranges_;
1443   }
active_live_ranges()1444   ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
inactive_live_ranges(int reg)1445   InactiveLiveRangeQueue& inactive_live_ranges(int reg) {
1446     return inactive_live_ranges_[reg];
1447   }
1448 
1449   void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
1450 
1451   // Helper methods for updating the life range lists.
1452   void AddToActive(LiveRange* range);
1453   void AddToInactive(LiveRange* range);
1454   void AddToUnhandled(LiveRange* range);
1455   ZoneVector<LiveRange*>::iterator ActiveToHandled(
1456       ZoneVector<LiveRange*>::iterator it);
1457   ZoneVector<LiveRange*>::iterator ActiveToInactive(
1458       ZoneVector<LiveRange*>::iterator it, LifetimePosition position);
1459   InactiveLiveRangeQueue::iterator InactiveToHandled(
1460       InactiveLiveRangeQueue::iterator it);
1461   InactiveLiveRangeQueue::iterator InactiveToActive(
1462       InactiveLiveRangeQueue::iterator it, LifetimePosition position);
1463 
1464   void ForwardStateTo(LifetimePosition position);
1465 
1466   int LastDeferredInstructionIndex(InstructionBlock* start);
1467 
1468   // Helper methods for choosing state after control flow events.
1469 
1470   bool ConsiderBlockForControlFlow(InstructionBlock* current_block,
1471                                    RpoNumber predecessor);
1472   RpoNumber ChooseOneOfTwoPredecessorStates(InstructionBlock* current_block,
1473                                             LifetimePosition boundary);
1474   bool CheckConflict(MachineRepresentation rep, int reg,
1475                      RangeWithRegisterSet* to_be_live);
1476   void ComputeStateFromManyPredecessors(InstructionBlock* current_block,
1477                                         RangeWithRegisterSet* to_be_live);
1478 
1479   // Helper methods for allocating registers.
1480   bool TryReuseSpillForPhi(TopLevelLiveRange* range);
1481   int PickRegisterThatIsAvailableLongest(
1482       LiveRange* current, int hint_reg,
1483       const base::Vector<LifetimePosition>& free_until_pos);
1484   bool TryAllocateFreeReg(LiveRange* range,
1485                           const base::Vector<LifetimePosition>& free_until_pos);
1486   bool TryAllocatePreferredReg(
1487       LiveRange* range, const base::Vector<LifetimePosition>& free_until_pos);
1488   void GetFPRegisterSet(MachineRepresentation rep, int* num_regs,
1489                         int* num_codes, const int** codes) const;
1490   void GetSIMD128RegisterSet(int* num_regs, int* num_codes,
1491                              const int** codes) const;
1492   void FindFreeRegistersForRange(LiveRange* range,
1493                                  base::Vector<LifetimePosition> free_until_pos);
1494   void ProcessCurrentRange(LiveRange* current, SpillMode spill_mode);
1495   void AllocateBlockedReg(LiveRange* range, SpillMode spill_mode);
1496 
1497   // Spill the given life range after position pos.
1498   void SpillAfter(LiveRange* range, LifetimePosition pos, SpillMode spill_mode);
1499 
1500   // Spill the given life range after position [start] and up to position [end].
1501   void SpillBetween(LiveRange* range, LifetimePosition start,
1502                     LifetimePosition end, SpillMode spill_mode);
1503 
1504   // Spill the given life range after position [start] and up to position [end].
1505   // Range is guaranteed to be spilled at least until position [until].
1506   void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
1507                          LifetimePosition until, LifetimePosition end,
1508                          SpillMode spill_mode);
1509   void SplitAndSpillIntersecting(LiveRange* range, SpillMode spill_mode);
1510 
1511   void PrintRangeRow(std::ostream& os, const TopLevelLiveRange* toplevel);
1512 
1513   void PrintRangeOverview();
1514 
1515   UnhandledLiveRangeQueue unhandled_live_ranges_;
1516   ZoneVector<LiveRange*> active_live_ranges_;
1517   ZoneVector<InactiveLiveRangeQueue> inactive_live_ranges_;
1518 
1519   // Approximate at what position the set of ranges will change next.
1520   // Used to avoid scanning for updates even if none are present.
1521   LifetimePosition next_active_ranges_change_;
1522   LifetimePosition next_inactive_ranges_change_;
1523 
1524 #ifdef DEBUG
1525   LifetimePosition allocation_finger_;
1526 #endif
1527 };
1528 
1529 class OperandAssigner final : public ZoneObject {
1530  public:
1531   explicit OperandAssigner(TopTierRegisterAllocationData* data);
1532   OperandAssigner(const OperandAssigner&) = delete;
1533   OperandAssigner& operator=(const OperandAssigner&) = delete;
1534 
1535   // Phase 5: final decision on spilling mode.
1536   void DecideSpillingMode();
1537 
1538   // Phase 6: assign spill splots.
1539   void AssignSpillSlots();
1540 
1541   // Phase 7: commit assignment.
1542   void CommitAssignment();
1543 
1544  private:
data()1545   TopTierRegisterAllocationData* data() const { return data_; }
1546 
1547   TopTierRegisterAllocationData* const data_;
1548 };
1549 
1550 class ReferenceMapPopulator final : public ZoneObject {
1551  public:
1552   explicit ReferenceMapPopulator(TopTierRegisterAllocationData* data);
1553   ReferenceMapPopulator(const ReferenceMapPopulator&) = delete;
1554   ReferenceMapPopulator& operator=(const ReferenceMapPopulator&) = delete;
1555 
1556   // Phase 10: compute values for pointer maps.
1557   void PopulateReferenceMaps();
1558 
1559  private:
data()1560   TopTierRegisterAllocationData* data() const { return data_; }
1561 
1562   bool SafePointsAreInOrder() const;
1563 
1564   TopTierRegisterAllocationData* const data_;
1565 };
1566 
1567 class LiveRangeBoundArray;
1568 // Insert moves of the form
1569 //
1570 //          Operand(child_(k+1)) = Operand(child_k)
1571 //
1572 // where child_k and child_(k+1) are consecutive children of a range (so
1573 // child_k->next() == child_(k+1)), and Operand(...) refers to the
1574 // assigned operand, be it a register or a slot.
1575 class LiveRangeConnector final : public ZoneObject {
1576  public:
1577   explicit LiveRangeConnector(TopTierRegisterAllocationData* data);
1578   LiveRangeConnector(const LiveRangeConnector&) = delete;
1579   LiveRangeConnector& operator=(const LiveRangeConnector&) = delete;
1580 
1581   // Phase 8: reconnect split ranges with moves, when the control flow
1582   // between the ranges is trivial (no branches).
1583   void ConnectRanges(Zone* local_zone);
1584 
1585   // Phase 9: insert moves to connect ranges across basic blocks, when the
1586   // control flow between them cannot be trivially resolved, such as joining
1587   // branches. Also determines whether to spill at the definition or later, and
1588   // adds spill moves to the gaps in the schedule.
1589   void ResolveControlFlow(Zone* local_zone);
1590 
1591  private:
data()1592   TopTierRegisterAllocationData* data() const { return data_; }
code()1593   InstructionSequence* code() const { return data()->code(); }
code_zone()1594   Zone* code_zone() const { return code()->zone(); }
1595 
1596   bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
1597 
1598   int ResolveControlFlow(const InstructionBlock* block,
1599                          const InstructionOperand& cur_op,
1600                          const InstructionBlock* pred,
1601                          const InstructionOperand& pred_op);
1602 
1603   void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range,
1604                                     LiveRangeBoundArray* array,
1605                                     Zone* temp_zone);
1606 
1607   TopTierRegisterAllocationData* const data_;
1608 };
1609 
1610 }  // namespace compiler
1611 }  // namespace internal
1612 }  // namespace v8
1613 
1614 #endif  // V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
1615