• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
6 #define V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
7 
8 #include "src/base/bits.h"
9 #include "src/base/compiler-specific.h"
10 #include "src/codegen/register-configuration.h"
11 #include "src/common/globals.h"
12 #include "src/compiler/backend/instruction.h"
13 #include "src/compiler/backend/register-allocation.h"
14 #include "src/flags/flags.h"
15 #include "src/utils/ostreams.h"
16 #include "src/zone/zone-containers.h"
17 
18 namespace v8 {
19 namespace internal {
20 
21 class TickCounter;
22 
23 namespace compiler {
24 
25 static const int32_t kUnassignedRegister = RegisterConfiguration::kMaxRegisters;
26 
27 // This class represents a single point of a InstructionOperand's lifetime. For
28 // each instruction there are four lifetime positions:
29 //
30 //   [[START, END], [START, END]]
31 //
32 // Where the first half position corresponds to
33 //
34 //  [GapPosition::START, GapPosition::END]
35 //
36 // and the second half position corresponds to
37 //
38 //  [Lifetime::USED_AT_START, Lifetime::USED_AT_END]
39 //
40 class LifetimePosition final {
41  public:
42   // Return the lifetime position that corresponds to the beginning of
43   // the gap with the given index.
GapFromInstructionIndex(int index)44   static LifetimePosition GapFromInstructionIndex(int index) {
45     return LifetimePosition(index * kStep);
46   }
47   // Return the lifetime position that corresponds to the beginning of
48   // the instruction with the given index.
InstructionFromInstructionIndex(int index)49   static LifetimePosition InstructionFromInstructionIndex(int index) {
50     return LifetimePosition(index * kStep + kHalfStep);
51   }
52 
ExistsGapPositionBetween(LifetimePosition pos1,LifetimePosition pos2)53   static bool ExistsGapPositionBetween(LifetimePosition pos1,
54                                        LifetimePosition pos2) {
55     if (pos1 > pos2) std::swap(pos1, pos2);
56     LifetimePosition next(pos1.value_ + 1);
57     if (next.IsGapPosition()) return next < pos2;
58     return next.NextFullStart() < pos2;
59   }
60 
61   // Returns a numeric representation of this lifetime position.
value()62   int value() const { return value_; }
63 
64   // Returns the index of the instruction to which this lifetime position
65   // corresponds.
ToInstructionIndex()66   int ToInstructionIndex() const {
67     DCHECK(IsValid());
68     return value_ / kStep;
69   }
70 
71   // Returns true if this lifetime position corresponds to a START value
IsStart()72   bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; }
73   // Returns true if this lifetime position corresponds to an END value
IsEnd()74   bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; }
75   // Returns true if this lifetime position corresponds to a gap START value
IsFullStart()76   bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; }
77 
IsGapPosition()78   bool IsGapPosition() const { return (value_ & 0x2) == 0; }
IsInstructionPosition()79   bool IsInstructionPosition() const { return !IsGapPosition(); }
80 
81   // Returns the lifetime position for the current START.
Start()82   LifetimePosition Start() const {
83     DCHECK(IsValid());
84     return LifetimePosition(value_ & ~(kHalfStep - 1));
85   }
86 
87   // Returns the lifetime position for the current gap START.
FullStart()88   LifetimePosition FullStart() const {
89     DCHECK(IsValid());
90     return LifetimePosition(value_ & ~(kStep - 1));
91   }
92 
93   // Returns the lifetime position for the current END.
End()94   LifetimePosition End() const {
95     DCHECK(IsValid());
96     return LifetimePosition(Start().value_ + kHalfStep / 2);
97   }
98 
99   // Returns the lifetime position for the beginning of the next START.
NextStart()100   LifetimePosition NextStart() const {
101     DCHECK(IsValid());
102     return LifetimePosition(Start().value_ + kHalfStep);
103   }
104 
105   // Returns the lifetime position for the beginning of the next gap START.
NextFullStart()106   LifetimePosition NextFullStart() const {
107     DCHECK(IsValid());
108     return LifetimePosition(FullStart().value_ + kStep);
109   }
110 
111   // Returns the lifetime position for the beginning of the previous START.
PrevStart()112   LifetimePosition PrevStart() const {
113     DCHECK(IsValid());
114     DCHECK_LE(kHalfStep, value_);
115     return LifetimePosition(Start().value_ - kHalfStep);
116   }
117 
118   // Constructs the lifetime position which does not correspond to any
119   // instruction.
LifetimePosition()120   LifetimePosition() : value_(-1) {}
121 
122   // Returns true if this lifetime positions corrensponds to some
123   // instruction.
IsValid()124   bool IsValid() const { return value_ != -1; }
125 
126   bool operator<(const LifetimePosition& that) const {
127     return this->value_ < that.value_;
128   }
129 
130   bool operator<=(const LifetimePosition& that) const {
131     return this->value_ <= that.value_;
132   }
133 
134   bool operator==(const LifetimePosition& that) const {
135     return this->value_ == that.value_;
136   }
137 
138   bool operator!=(const LifetimePosition& that) const {
139     return this->value_ != that.value_;
140   }
141 
142   bool operator>(const LifetimePosition& that) const {
143     return this->value_ > that.value_;
144   }
145 
146   bool operator>=(const LifetimePosition& that) const {
147     return this->value_ >= that.value_;
148   }
149 
150   void Print() const;
151 
Invalid()152   static inline LifetimePosition Invalid() { return LifetimePosition(); }
153 
MaxPosition()154   static inline LifetimePosition MaxPosition() {
155     // We have to use this kind of getter instead of static member due to
156     // crash bug in GDB.
157     return LifetimePosition(kMaxInt);
158   }
159 
FromInt(int value)160   static inline LifetimePosition FromInt(int value) {
161     return LifetimePosition(value);
162   }
163 
164  private:
165   static const int kHalfStep = 2;
166   static const int kStep = 2 * kHalfStep;
167 
168   static_assert(base::bits::IsPowerOfTwo(kHalfStep),
169                 "Code relies on kStep and kHalfStep being a power of two");
170 
LifetimePosition(int value)171   explicit LifetimePosition(int value) : value_(value) {}
172 
173   int value_;
174 };
175 
176 std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
177 
178 enum class RegisterAllocationFlag : unsigned { kTraceAllocation = 1 << 0 };
179 
180 using RegisterAllocationFlags = base::Flags<RegisterAllocationFlag>;
181 
182 class SpillRange;
183 class LiveRange;
184 class TopLevelLiveRange;
185 
186 class TopTierRegisterAllocationData final : public RegisterAllocationData {
187  public:
188   TopTierRegisterAllocationData(const TopTierRegisterAllocationData&) = delete;
189   TopTierRegisterAllocationData& operator=(
190       const TopTierRegisterAllocationData&) = delete;
191 
cast(const RegisterAllocationData * data)192   static const TopTierRegisterAllocationData* cast(
193       const RegisterAllocationData* data) {
194     DCHECK_EQ(data->type(), Type::kTopTier);
195     return static_cast<const TopTierRegisterAllocationData*>(data);
196   }
197 
cast(RegisterAllocationData * data)198   static TopTierRegisterAllocationData* cast(RegisterAllocationData* data) {
199     DCHECK_EQ(data->type(), Type::kTopTier);
200     return static_cast<TopTierRegisterAllocationData*>(data);
201   }
202 
cast(const RegisterAllocationData & data)203   static const TopTierRegisterAllocationData& cast(
204       const RegisterAllocationData& data) {
205     DCHECK_EQ(data.type(), Type::kTopTier);
206     return static_cast<const TopTierRegisterAllocationData&>(data);
207   }
208 
209   // Encodes whether a spill happens in deferred code (kSpillDeferred) or
210   // regular code (kSpillAtDefinition).
211   enum SpillMode { kSpillAtDefinition, kSpillDeferred };
212 
is_trace_alloc()213   bool is_trace_alloc() {
214     return flags_ & RegisterAllocationFlag::kTraceAllocation;
215   }
216 
217   static constexpr int kNumberOfFixedRangesPerRegister = 2;
218 
219   class PhiMapValue : public ZoneObject {
220    public:
221     PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
222 
phi()223     const PhiInstruction* phi() const { return phi_; }
block()224     const InstructionBlock* block() const { return block_; }
225 
226     // For hinting.
assigned_register()227     int assigned_register() const { return assigned_register_; }
set_assigned_register(int register_code)228     void set_assigned_register(int register_code) {
229       DCHECK_EQ(assigned_register_, kUnassignedRegister);
230       assigned_register_ = register_code;
231     }
UnsetAssignedRegister()232     void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
233 
234     void AddOperand(InstructionOperand* operand);
235     void CommitAssignment(const InstructionOperand& operand);
236 
237    private:
238     PhiInstruction* const phi_;
239     const InstructionBlock* const block_;
240     ZoneVector<InstructionOperand*> incoming_operands_;
241     int assigned_register_;
242   };
243   using PhiMap = ZoneMap<int, PhiMapValue*>;
244 
245   struct DelayedReference {
246     ReferenceMap* map;
247     InstructionOperand* operand;
248   };
249   using DelayedReferences = ZoneVector<DelayedReference>;
250   using RangesWithPreassignedSlots =
251       ZoneVector<std::pair<TopLevelLiveRange*, int>>;
252 
253   TopTierRegisterAllocationData(const RegisterConfiguration* config,
254                                 Zone* allocation_zone, Frame* frame,
255                                 InstructionSequence* code,
256                                 RegisterAllocationFlags flags,
257                                 TickCounter* tick_counter,
258                                 const char* debug_name = nullptr);
259 
live_ranges()260   const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
261     return live_ranges_;
262   }
live_ranges()263   ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
fixed_live_ranges()264   const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
265     return fixed_live_ranges_;
266   }
fixed_live_ranges()267   ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
268     return fixed_live_ranges_;
269   }
fixed_float_live_ranges()270   ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() {
271     return fixed_float_live_ranges_;
272   }
fixed_float_live_ranges()273   const ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() const {
274     return fixed_float_live_ranges_;
275   }
fixed_double_live_ranges()276   ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
277     return fixed_double_live_ranges_;
278   }
fixed_double_live_ranges()279   const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
280     return fixed_double_live_ranges_;
281   }
fixed_simd128_live_ranges()282   ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() {
283     return fixed_simd128_live_ranges_;
284   }
fixed_simd128_live_ranges()285   const ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() const {
286     return fixed_simd128_live_ranges_;
287   }
live_in_sets()288   ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
live_out_sets()289   ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; }
spill_ranges()290   ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
delayed_references()291   DelayedReferences& delayed_references() { return delayed_references_; }
code()292   InstructionSequence* code() const { return code_; }
293   // This zone is for data structures only needed during register allocation
294   // phases.
allocation_zone()295   Zone* allocation_zone() const { return allocation_zone_; }
296   // This zone is for InstructionOperands and moves that live beyond register
297   // allocation.
code_zone()298   Zone* code_zone() const { return code()->zone(); }
frame()299   Frame* frame() const { return frame_; }
debug_name()300   const char* debug_name() const { return debug_name_; }
config()301   const RegisterConfiguration* config() const { return config_; }
302 
303   MachineRepresentation RepresentationFor(int virtual_register);
304 
305   TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
306   // Creates a new live range.
307   TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep);
308   TopLevelLiveRange* NextLiveRange(MachineRepresentation rep);
309 
310   SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range,
311                                           SpillMode spill_mode);
312   SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
313 
314   MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
315                            const InstructionOperand& from,
316                            const InstructionOperand& to);
317 
318   bool ExistsUseWithoutDefinition();
319   bool RangesDefinedInDeferredStayInDeferred();
320 
321   void MarkFixedUse(MachineRepresentation rep, int index);
322   bool HasFixedUse(MachineRepresentation rep, int index);
323 
324   void MarkAllocated(MachineRepresentation rep, int index);
325 
326   PhiMapValue* InitializePhiMap(const InstructionBlock* block,
327                                 PhiInstruction* phi);
328   PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
329   PhiMapValue* GetPhiMapValueFor(int virtual_register);
330   bool IsBlockBoundary(LifetimePosition pos) const;
331 
preassigned_slot_ranges()332   RangesWithPreassignedSlots& preassigned_slot_ranges() {
333     return preassigned_slot_ranges_;
334   }
335 
RememberSpillState(RpoNumber block,const ZoneVector<LiveRange * > & state)336   void RememberSpillState(RpoNumber block,
337                           const ZoneVector<LiveRange*>& state) {
338     spill_state_[block.ToSize()] = state;
339   }
340 
GetSpillState(RpoNumber block)341   ZoneVector<LiveRange*>& GetSpillState(RpoNumber block) {
342     auto& result = spill_state_[block.ToSize()];
343     return result;
344   }
345 
ResetSpillState()346   void ResetSpillState() {
347     for (auto& state : spill_state_) {
348       state.clear();
349     }
350   }
351 
tick_counter()352   TickCounter* tick_counter() { return tick_counter_; }
353 
354  private:
355   int GetNextLiveRangeId();
356 
357   Zone* const allocation_zone_;
358   Frame* const frame_;
359   InstructionSequence* const code_;
360   const char* const debug_name_;
361   const RegisterConfiguration* const config_;
362   PhiMap phi_map_;
363   ZoneVector<BitVector*> live_in_sets_;
364   ZoneVector<BitVector*> live_out_sets_;
365   ZoneVector<TopLevelLiveRange*> live_ranges_;
366   ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
367   ZoneVector<TopLevelLiveRange*> fixed_float_live_ranges_;
368   ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
369   ZoneVector<TopLevelLiveRange*> fixed_simd128_live_ranges_;
370   ZoneVector<SpillRange*> spill_ranges_;
371   DelayedReferences delayed_references_;
372   BitVector* assigned_registers_;
373   BitVector* assigned_double_registers_;
374   BitVector* fixed_register_use_;
375   BitVector* fixed_fp_register_use_;
376   int virtual_register_count_;
377   RangesWithPreassignedSlots preassigned_slot_ranges_;
378   ZoneVector<ZoneVector<LiveRange*>> spill_state_;
379   RegisterAllocationFlags flags_;
380   TickCounter* const tick_counter_;
381 };
382 
383 // Representation of the non-empty interval [start,end[.
384 class UseInterval final : public ZoneObject {
385  public:
UseInterval(LifetimePosition start,LifetimePosition end)386   UseInterval(LifetimePosition start, LifetimePosition end)
387       : start_(start), end_(end), next_(nullptr) {
388     DCHECK(start < end);
389   }
390   UseInterval(const UseInterval&) = delete;
391   UseInterval& operator=(const UseInterval&) = delete;
392 
start()393   LifetimePosition start() const { return start_; }
set_start(LifetimePosition start)394   void set_start(LifetimePosition start) { start_ = start; }
end()395   LifetimePosition end() const { return end_; }
set_end(LifetimePosition end)396   void set_end(LifetimePosition end) { end_ = end; }
next()397   UseInterval* next() const { return next_; }
set_next(UseInterval * next)398   void set_next(UseInterval* next) { next_ = next; }
399 
400   // Split this interval at the given position without effecting the
401   // live range that owns it. The interval must contain the position.
402   UseInterval* SplitAt(LifetimePosition pos, Zone* zone);
403 
404   // If this interval intersects with other return smallest position
405   // that belongs to both of them.
Intersect(const UseInterval * other)406   LifetimePosition Intersect(const UseInterval* other) const {
407     if (other->start() < start_) return other->Intersect(this);
408     if (other->start() < end_) return other->start();
409     return LifetimePosition::Invalid();
410   }
411 
Contains(LifetimePosition point)412   bool Contains(LifetimePosition point) const {
413     return start_ <= point && point < end_;
414   }
415 
416   // Returns the index of the first gap covered by this interval.
FirstGapIndex()417   int FirstGapIndex() const {
418     int ret = start_.ToInstructionIndex();
419     if (start_.IsInstructionPosition()) {
420       ++ret;
421     }
422     return ret;
423   }
424 
425   // Returns the index of the last gap covered by this interval.
LastGapIndex()426   int LastGapIndex() const {
427     int ret = end_.ToInstructionIndex();
428     if (end_.IsGapPosition() && end_.IsStart()) {
429       --ret;
430     }
431     return ret;
432   }
433 
434  private:
435   LifetimePosition start_;
436   LifetimePosition end_;
437   UseInterval* next_;
438 };
439 
440 enum class UsePositionType : uint8_t {
441   kRegisterOrSlot,
442   kRegisterOrSlotOrConstant,
443   kRequiresRegister,
444   kRequiresSlot
445 };
446 
447 enum class UsePositionHintType : uint8_t {
448   kNone,
449   kOperand,
450   kUsePos,
451   kPhi,
452   kUnresolved
453 };
454 
455 // Representation of a use position.
456 class V8_EXPORT_PRIVATE UsePosition final
NON_EXPORTED_BASE(ZoneObject)457     : public NON_EXPORTED_BASE(ZoneObject) {
458  public:
459   UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint,
460               UsePositionHintType hint_type);
461   UsePosition(const UsePosition&) = delete;
462   UsePosition& operator=(const UsePosition&) = delete;
463 
464   InstructionOperand* operand() const { return operand_; }
465   bool HasOperand() const { return operand_ != nullptr; }
466 
467   bool RegisterIsBeneficial() const {
468     return RegisterBeneficialField::decode(flags_);
469   }
470   bool SpillDetrimental() const {
471     return SpillDetrimentalField::decode(flags_);
472   }
473 
474   UsePositionType type() const { return TypeField::decode(flags_); }
475   void set_type(UsePositionType type, bool register_beneficial);
476 
477   LifetimePosition pos() const { return pos_; }
478 
479   UsePosition* next() const { return next_; }
480   void set_next(UsePosition* next) { next_ = next; }
481 
482   // For hinting only.
483   void set_assigned_register(int register_code) {
484     flags_ = AssignedRegisterField::update(flags_, register_code);
485   }
486   void set_spill_detrimental() {
487     flags_ = SpillDetrimentalField::update(flags_, true);
488   }
489 
490   UsePositionHintType hint_type() const {
491     return HintTypeField::decode(flags_);
492   }
493   bool HasHint() const;
494   bool HintRegister(int* register_code) const;
495   void SetHint(UsePosition* use_pos);
496   void ResolveHint(UsePosition* use_pos);
497   bool IsResolved() const {
498     return hint_type() != UsePositionHintType::kUnresolved;
499   }
500   static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
501 
502  private:
503   using TypeField = base::BitField<UsePositionType, 0, 2>;
504   using HintTypeField = base::BitField<UsePositionHintType, 2, 3>;
505   using RegisterBeneficialField = base::BitField<bool, 5, 1>;
506   using AssignedRegisterField = base::BitField<int32_t, 6, 6>;
507   using SpillDetrimentalField = base::BitField<int32_t, 12, 1>;
508 
509   InstructionOperand* const operand_;
510   void* hint_;
511   UsePosition* next_;
512   LifetimePosition const pos_;
513   uint32_t flags_;
514 };
515 
516 class SpillRange;
517 class TopTierRegisterAllocationData;
518 class TopLevelLiveRange;
519 class LiveRangeBundle;
520 
521 // Representation of SSA values' live ranges as a collection of (continuous)
522 // intervals over the instruction ordering.
NON_EXPORTED_BASE(ZoneObject)523 class V8_EXPORT_PRIVATE LiveRange : public NON_EXPORTED_BASE(ZoneObject) {
524  public:
525   LiveRange(const LiveRange&) = delete;
526   LiveRange& operator=(const LiveRange&) = delete;
527 
528   UseInterval* first_interval() const { return first_interval_; }
529   UsePosition* first_pos() const { return first_pos_; }
530   TopLevelLiveRange* TopLevel() { return top_level_; }
531   const TopLevelLiveRange* TopLevel() const { return top_level_; }
532 
533   bool IsTopLevel() const;
534 
535   LiveRange* next() const { return next_; }
536 
537   int relative_id() const { return relative_id_; }
538 
539   bool IsEmpty() const { return first_interval() == nullptr; }
540 
541   InstructionOperand GetAssignedOperand() const;
542 
543   MachineRepresentation representation() const {
544     return RepresentationField::decode(bits_);
545   }
546 
547   int assigned_register() const { return AssignedRegisterField::decode(bits_); }
548   bool HasRegisterAssigned() const {
549     return assigned_register() != kUnassignedRegister;
550   }
551   void set_assigned_register(int reg);
552   void UnsetAssignedRegister();
553 
554   bool ShouldRecombine() const { return RecombineField::decode(bits_); }
555 
556   void SetRecombine() { bits_ = RecombineField::update(bits_, true); }
557   void set_controlflow_hint(int reg) {
558     bits_ = ControlFlowRegisterHint::update(bits_, reg);
559   }
560   int controlflow_hint() const {
561     return ControlFlowRegisterHint::decode(bits_);
562   }
563   bool RegisterFromControlFlow(int* reg) {
564     int hint = controlflow_hint();
565     if (hint != kUnassignedRegister) {
566       *reg = hint;
567       return true;
568     }
569     return false;
570   }
571   bool spilled() const { return SpilledField::decode(bits_); }
572   void AttachToNext();
573   void Unspill();
574   void Spill();
575 
576   RegisterKind kind() const;
577 
578   // Returns use position in this live range that follows both start
579   // and last processed use position.
580   UsePosition* NextUsePosition(LifetimePosition start) const;
581 
582   // Returns use position for which register is required in this live
583   // range and which follows both start and last processed use position
584   UsePosition* NextRegisterPosition(LifetimePosition start) const;
585 
586   // Returns the first use position requiring stack slot, or nullptr.
587   UsePosition* NextSlotPosition(LifetimePosition start) const;
588 
589   // Returns use position for which register is beneficial in this live
590   // range and which follows both start and last processed use position
591   UsePosition* NextUsePositionRegisterIsBeneficial(
592       LifetimePosition start) const;
593 
594   // Returns lifetime position for which register is beneficial in this live
595   // range and which follows both start and last processed use position.
596   LifetimePosition NextLifetimePositionRegisterIsBeneficial(
597       const LifetimePosition& start) const;
598 
599   // Returns use position for which register is beneficial in this live
600   // range and which precedes start.
601   UsePosition* PreviousUsePositionRegisterIsBeneficial(
602       LifetimePosition start) const;
603 
604   // Returns use position for which spilling is detrimental in this live
605   // range and which follows both start and last processed use position
606   UsePosition* NextUsePositionSpillDetrimental(LifetimePosition start) const;
607 
608   // Can this live range be spilled at this position.
609   bool CanBeSpilled(LifetimePosition pos) const;
610 
611   // Splitting primitive used by splitting members.
612   // Performs the split, but does not link the resulting ranges.
613   // The given position must follow the start of the range.
614   // All uses following the given position will be moved from this
615   // live range to the result live range.
616   // The current range will terminate at position, while result will start from
617   // position.
618   enum HintConnectionOption : bool {
619     DoNotConnectHints = false,
620     ConnectHints = true
621   };
622   UsePosition* DetachAt(LifetimePosition position, LiveRange* result,
623                         Zone* zone, HintConnectionOption connect_hints);
624 
625   // Detaches at position, and then links the resulting ranges. Returns the
626   // child, which starts at position.
627   LiveRange* SplitAt(LifetimePosition position, Zone* zone);
628 
629   // Returns nullptr when no register is hinted, otherwise sets register_index.
630   // Uses {current_hint_position_} as a cache, and tries to update it.
631   UsePosition* FirstHintPosition(int* register_index);
632   UsePosition* FirstHintPosition() {
633     int register_index;
634     return FirstHintPosition(&register_index);
635   }
636 
637   UsePosition* current_hint_position() const {
638     return current_hint_position_;
639   }
640 
641   LifetimePosition Start() const {
642     DCHECK(!IsEmpty());
643     return first_interval()->start();
644   }
645 
646   LifetimePosition End() const {
647     DCHECK(!IsEmpty());
648     return last_interval_->end();
649   }
650 
651   bool ShouldBeAllocatedBefore(const LiveRange* other) const;
652   bool CanCover(LifetimePosition position) const;
653   bool Covers(LifetimePosition position) const;
654   LifetimePosition NextStartAfter(LifetimePosition position);
655   LifetimePosition NextEndAfter(LifetimePosition position) const;
656   LifetimePosition FirstIntersection(LiveRange* other) const;
657   LifetimePosition NextStart() const { return next_start_; }
658 
659   void VerifyChildStructure() const {
660     VerifyIntervals();
661     VerifyPositions();
662   }
663 
664   void ConvertUsesToOperand(const InstructionOperand& op,
665                             const InstructionOperand& spill_op);
666   void SetUseHints(int register_index);
667   void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
668   void ResetCurrentHintPosition() { current_hint_position_ = first_pos_; }
669 
670   void Print(const RegisterConfiguration* config, bool with_children) const;
671   void Print(bool with_children) const;
672 
673   void set_bundle(LiveRangeBundle* bundle) { bundle_ = bundle; }
674   LiveRangeBundle* get_bundle() const { return bundle_; }
675   bool RegisterFromBundle(int* hint) const;
676   void UpdateBundleRegister(int reg) const;
677 
678  private:
679   friend class TopLevelLiveRange;
680   friend Zone;
681 
682   explicit LiveRange(int relative_id, MachineRepresentation rep,
683                      TopLevelLiveRange* top_level);
684 
685   void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
686 
687   void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
688 
689   UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
690   void AdvanceLastProcessedMarker(UseInterval* to_start_of,
691                                   LifetimePosition but_not_past) const;
692 
693   void VerifyPositions() const;
694   void VerifyIntervals() const;
695 
696   using SpilledField = base::BitField<bool, 0, 1>;
697   // Bits (1,7[ are used by TopLevelLiveRange.
698   using AssignedRegisterField = base::BitField<int32_t, 7, 6>;
699   using RepresentationField = base::BitField<MachineRepresentation, 13, 8>;
700   using RecombineField = base::BitField<bool, 21, 1>;
701   using ControlFlowRegisterHint = base::BitField<uint8_t, 22, 6>;
702   // Bits 28-31 are used by TopLevelLiveRange.
703 
704   // Unique among children of the same virtual register.
705   int relative_id_;
706   uint32_t bits_;
707   UseInterval* last_interval_;
708   UseInterval* first_interval_;
709   UsePosition* first_pos_;
710   TopLevelLiveRange* top_level_;
711   LiveRange* next_;
712   // This is used as a cache, it doesn't affect correctness.
713   mutable UseInterval* current_interval_;
714   // This is used as a cache, it doesn't affect correctness.
715   mutable UsePosition* last_processed_use_;
716   // This is used as a cache in BuildLiveRanges and during register allocation.
717   UsePosition* current_hint_position_;
718   LiveRangeBundle* bundle_ = nullptr;
719   // Next interval start, relative to the current linear scan position.
720   LifetimePosition next_start_;
721 };
722 
723 struct LiveRangeOrdering {
operatorLiveRangeOrdering724   bool operator()(const LiveRange* left, const LiveRange* right) const {
725     return left->Start() < right->Start();
726   }
727 };
728 class LiveRangeBundle : public ZoneObject {
729  public:
730   void MergeSpillRanges();
731 
id()732   int id() { return id_; }
733 
reg()734   int reg() { return reg_; }
735 
set_reg(int reg)736   void set_reg(int reg) {
737     DCHECK_EQ(reg_, kUnassignedRegister);
738     reg_ = reg;
739   }
740 
741  private:
742   friend class BundleBuilder;
743   friend Zone;
744 
745   // Representation of the non-empty interval [start,end[.
746   class Range {
747    public:
Range(int s,int e)748     Range(int s, int e) : start(s), end(e) {}
Range(LifetimePosition s,LifetimePosition e)749     Range(LifetimePosition s, LifetimePosition e)
750         : start(s.value()), end(e.value()) {}
751     int start;
752     int end;
753   };
754 
755   struct RangeOrdering {
operatorRangeOrdering756     bool operator()(const Range left, const Range right) const {
757       return left.start < right.start;
758     }
759   };
UsesOverlap(UseInterval * interval)760   bool UsesOverlap(UseInterval* interval) {
761     auto use = uses_.begin();
762     while (interval != nullptr && use != uses_.end()) {
763       if (use->end <= interval->start().value()) {
764         ++use;
765       } else if (interval->end().value() <= use->start) {
766         interval = interval->next();
767       } else {
768         return true;
769       }
770     }
771     return false;
772   }
InsertUses(UseInterval * interval)773   void InsertUses(UseInterval* interval) {
774     while (interval != nullptr) {
775       auto done = uses_.insert({interval->start(), interval->end()});
776       USE(done);
777       DCHECK_EQ(done.second, 1);
778       interval = interval->next();
779     }
780   }
LiveRangeBundle(Zone * zone,int id)781   explicit LiveRangeBundle(Zone* zone, int id)
782       : ranges_(zone), uses_(zone), id_(id) {}
783 
784   bool TryAddRange(LiveRange* range);
785   bool TryMerge(LiveRangeBundle* other, bool trace_alloc);
786 
787   ZoneSet<LiveRange*, LiveRangeOrdering> ranges_;
788   ZoneSet<Range, RangeOrdering> uses_;
789   int id_;
790   int reg_ = kUnassignedRegister;
791 };
792 
793 class V8_EXPORT_PRIVATE TopLevelLiveRange final : public LiveRange {
794  public:
795   explicit TopLevelLiveRange(int vreg, MachineRepresentation rep);
796   TopLevelLiveRange(const TopLevelLiveRange&) = delete;
797   TopLevelLiveRange& operator=(const TopLevelLiveRange&) = delete;
798 
spill_start_index()799   int spill_start_index() const { return spill_start_index_; }
800 
IsFixed()801   bool IsFixed() const { return vreg_ < 0; }
802 
IsDeferredFixed()803   bool IsDeferredFixed() const { return DeferredFixedField::decode(bits_); }
set_deferred_fixed()804   void set_deferred_fixed() { bits_ = DeferredFixedField::update(bits_, true); }
is_phi()805   bool is_phi() const { return IsPhiField::decode(bits_); }
set_is_phi(bool value)806   void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
807 
is_non_loop_phi()808   bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
is_loop_phi()809   bool is_loop_phi() const { return is_phi() && !is_non_loop_phi(); }
set_is_non_loop_phi(bool value)810   void set_is_non_loop_phi(bool value) {
811     bits_ = IsNonLoopPhiField::update(bits_, value);
812   }
SpillAtLoopHeaderNotBeneficial()813   bool SpillAtLoopHeaderNotBeneficial() const {
814     return SpillAtLoopHeaderNotBeneficialField::decode(bits_);
815   }
set_spilling_at_loop_header_not_beneficial()816   void set_spilling_at_loop_header_not_beneficial() {
817     bits_ = SpillAtLoopHeaderNotBeneficialField::update(bits_, true);
818   }
819 
820   enum SlotUseKind { kNoSlotUse, kDeferredSlotUse, kGeneralSlotUse };
821 
has_slot_use()822   bool has_slot_use() const {
823     return slot_use_kind() > SlotUseKind::kNoSlotUse;
824   }
825 
has_non_deferred_slot_use()826   bool has_non_deferred_slot_use() const {
827     return slot_use_kind() == SlotUseKind::kGeneralSlotUse;
828   }
829 
reset_slot_use()830   void reset_slot_use() {
831     bits_ = HasSlotUseField::update(bits_, SlotUseKind::kNoSlotUse);
832   }
register_slot_use(SlotUseKind value)833   void register_slot_use(SlotUseKind value) {
834     bits_ = HasSlotUseField::update(bits_, std::max(slot_use_kind(), value));
835   }
slot_use_kind()836   SlotUseKind slot_use_kind() const { return HasSlotUseField::decode(bits_); }
837 
838   // Add a new interval or a new use position to this live range.
839   void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone,
840                       bool trace_alloc);
841   void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone,
842                       bool trace_alloc);
843   void AddUsePosition(UsePosition* pos, bool trace_alloc);
844 
845   // Shorten the most recently added interval by setting a new start.
846   void ShortenTo(LifetimePosition start, bool trace_alloc);
847 
848   // Spill range management.
849   void SetSpillRange(SpillRange* spill_range);
850 
851   // Encodes whether a range is also available from a memory location:
852   //   kNoSpillType: not availble in memory location.
853   //   kSpillOperand: computed in a memory location at range start.
854   //   kSpillRange: copied (spilled) to memory location at the definition,
855   //                or at the beginning of some later blocks if
856   //                LateSpillingSelected() is true.
857   //   kDeferredSpillRange: copied (spilled) to memory location at entry
858   //                        to deferred blocks that have a use from memory.
859   //
860   // Ranges either start out at kSpillOperand, which is also their final
861   // state, or kNoSpillType. When spilled only in deferred code, a range
862   // ends up with kDeferredSpillRange, while when spilled in regular code,
863   // a range will be tagged as kSpillRange.
864   enum class SpillType {
865     kNoSpillType,
866     kSpillOperand,
867     kSpillRange,
868     kDeferredSpillRange
869   };
set_spill_type(SpillType value)870   void set_spill_type(SpillType value) {
871     bits_ = SpillTypeField::update(bits_, value);
872   }
spill_type()873   SpillType spill_type() const { return SpillTypeField::decode(bits_); }
GetSpillOperand()874   InstructionOperand* GetSpillOperand() const {
875     DCHECK_EQ(SpillType::kSpillOperand, spill_type());
876     return spill_operand_;
877   }
878 
GetAllocatedSpillRange()879   SpillRange* GetAllocatedSpillRange() const {
880     DCHECK_NE(SpillType::kSpillOperand, spill_type());
881     return spill_range_;
882   }
883 
GetSpillRange()884   SpillRange* GetSpillRange() const {
885     DCHECK_GE(spill_type(), SpillType::kSpillRange);
886     return spill_range_;
887   }
HasNoSpillType()888   bool HasNoSpillType() const {
889     return spill_type() == SpillType::kNoSpillType;
890   }
HasSpillOperand()891   bool HasSpillOperand() const {
892     return spill_type() == SpillType::kSpillOperand;
893   }
HasSpillRange()894   bool HasSpillRange() const { return spill_type() >= SpillType::kSpillRange; }
HasGeneralSpillRange()895   bool HasGeneralSpillRange() const {
896     return spill_type() == SpillType::kSpillRange;
897   }
898   AllocatedOperand GetSpillRangeOperand() const;
899 
900   void RecordSpillLocation(Zone* zone, int gap_index,
901                            InstructionOperand* operand);
902   void SetSpillOperand(InstructionOperand* operand);
SetSpillStartIndex(int start)903   void SetSpillStartIndex(int start) {
904     spill_start_index_ = std::min(start, spill_start_index_);
905   }
906 
907   // Omits any moves from spill_move_insertion_locations_ that can be skipped.
908   void FilterSpillMoves(TopTierRegisterAllocationData* data,
909                         const InstructionOperand& operand);
910 
911   // Writes all moves from spill_move_insertion_locations_ to the schedule.
912   void CommitSpillMoves(TopTierRegisterAllocationData* data,
913                         const InstructionOperand& operand);
914 
915   // If all the children of this range are spilled in deferred blocks, and if
916   // for any non-spilled child with a use position requiring a slot, that range
917   // is contained in a deferred block, mark the range as
918   // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
919   // and instead let the LiveRangeConnector perform the spills within the
920   // deferred blocks. If so, we insert here spills for non-spilled ranges
921   // with slot use positions.
TreatAsSpilledInDeferredBlock(Zone * zone,int total_block_count)922   void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) {
923     spill_start_index_ = -1;
924     spilled_in_deferred_blocks_ = true;
925     spill_move_insertion_locations_ = nullptr;
926     list_of_blocks_requiring_spill_operands_ =
927         zone->New<BitVector>(total_block_count, zone);
928   }
929 
930   // Updates internal data structures to reflect that this range is not
931   // spilled at definition but instead spilled in some blocks only.
TransitionRangeToDeferredSpill(Zone * zone,int total_block_count)932   void TransitionRangeToDeferredSpill(Zone* zone, int total_block_count) {
933     spill_start_index_ = -1;
934     spill_move_insertion_locations_ = nullptr;
935     list_of_blocks_requiring_spill_operands_ =
936         zone->New<BitVector>(total_block_count, zone);
937   }
938 
939   // Promotes this range to spill at definition if it was marked for spilling
940   // in deferred blocks before.
TransitionRangeToSpillAtDefinition()941   void TransitionRangeToSpillAtDefinition() {
942     DCHECK_NOT_NULL(spill_move_insertion_locations_);
943     if (spill_type() == SpillType::kDeferredSpillRange) {
944       set_spill_type(SpillType::kSpillRange);
945     }
946   }
947 
MayRequireSpillRange()948   bool MayRequireSpillRange() const {
949     return !HasSpillOperand() && spill_range_ == nullptr;
950   }
951   void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
vreg()952   int vreg() const { return vreg_; }
953 
954   void Verify() const;
955   void VerifyChildrenInOrder() const;
956 
957   // Returns the LiveRange covering the given position, or nullptr if no such
958   // range exists. Uses a linear search through child ranges. The range at the
959   // previously requested position is cached, so this function will be very fast
960   // if you call it with a non-decreasing sequence of positions.
961   LiveRange* GetChildCovers(LifetimePosition pos);
962 
GetNextChildId()963   int GetNextChildId() { return ++last_child_id_; }
964 
GetMaxChildCount()965   int GetMaxChildCount() const { return last_child_id_ + 1; }
966 
IsSpilledOnlyInDeferredBlocks(const TopTierRegisterAllocationData * data)967   bool IsSpilledOnlyInDeferredBlocks(
968       const TopTierRegisterAllocationData* data) const {
969     return spill_type() == SpillType::kDeferredSpillRange;
970   }
971 
972   struct SpillMoveInsertionList;
973 
GetSpillMoveInsertionLocations(const TopTierRegisterAllocationData * data)974   SpillMoveInsertionList* GetSpillMoveInsertionLocations(
975       const TopTierRegisterAllocationData* data) const {
976     DCHECK(!IsSpilledOnlyInDeferredBlocks(data));
977     return spill_move_insertion_locations_;
978   }
979 
MarkHasPreassignedSlot()980   void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
has_preassigned_slot()981   bool has_preassigned_slot() const { return has_preassigned_slot_; }
982 
983   // Late spilling refers to spilling at places after the definition. These
984   // spills are guaranteed to cover at least all of the sub-ranges where the
985   // register allocator chose to evict the value from a register.
SetLateSpillingSelected(bool late_spilling_selected)986   void SetLateSpillingSelected(bool late_spilling_selected) {
987     DCHECK(spill_type() == SpillType::kSpillRange);
988     SpillRangeMode new_mode = late_spilling_selected
989                                   ? SpillRangeMode::kSpillLater
990                                   : SpillRangeMode::kSpillAtDefinition;
991     // A single TopLevelLiveRange should never be used in both modes.
992     DCHECK(SpillRangeModeField::decode(bits_) == SpillRangeMode::kNotSet ||
993            SpillRangeModeField::decode(bits_) == new_mode);
994     bits_ = SpillRangeModeField::update(bits_, new_mode);
995   }
LateSpillingSelected()996   bool LateSpillingSelected() const {
997     // Nobody should be reading this value until it's been decided.
998     DCHECK_IMPLIES(HasGeneralSpillRange(), SpillRangeModeField::decode(bits_) !=
999                                                SpillRangeMode::kNotSet);
1000     return SpillRangeModeField::decode(bits_) == SpillRangeMode::kSpillLater;
1001   }
1002 
AddBlockRequiringSpillOperand(RpoNumber block_id,const TopTierRegisterAllocationData * data)1003   void AddBlockRequiringSpillOperand(
1004       RpoNumber block_id, const TopTierRegisterAllocationData* data) {
1005     DCHECK(IsSpilledOnlyInDeferredBlocks(data));
1006     GetListOfBlocksRequiringSpillOperands(data)->Add(block_id.ToInt());
1007   }
1008 
GetListOfBlocksRequiringSpillOperands(const TopTierRegisterAllocationData * data)1009   BitVector* GetListOfBlocksRequiringSpillOperands(
1010       const TopTierRegisterAllocationData* data) const {
1011     DCHECK(IsSpilledOnlyInDeferredBlocks(data));
1012     return list_of_blocks_requiring_spill_operands_;
1013   }
1014 
1015  private:
1016   friend class LiveRange;
1017 
1018   // If spill type is kSpillRange, then this value indicates whether we've
1019   // chosen to spill at the definition or at some later points.
1020   enum class SpillRangeMode : uint8_t {
1021     kNotSet,
1022     kSpillAtDefinition,
1023     kSpillLater,
1024   };
1025 
1026   using HasSlotUseField = base::BitField<SlotUseKind, 1, 2>;
1027   using IsPhiField = base::BitField<bool, 3, 1>;
1028   using IsNonLoopPhiField = base::BitField<bool, 4, 1>;
1029   using SpillTypeField = base::BitField<SpillType, 5, 2>;
1030   using DeferredFixedField = base::BitField<bool, 28, 1>;
1031   using SpillAtLoopHeaderNotBeneficialField = base::BitField<bool, 29, 1>;
1032   using SpillRangeModeField = base::BitField<SpillRangeMode, 30, 2>;
1033 
1034   int vreg_;
1035   int last_child_id_;
1036   union {
1037     // Correct value determined by spill_type()
1038     InstructionOperand* spill_operand_;
1039     SpillRange* spill_range_;
1040   };
1041 
1042   union {
1043     SpillMoveInsertionList* spill_move_insertion_locations_;
1044     BitVector* list_of_blocks_requiring_spill_operands_;
1045   };
1046 
1047   // TODO(mtrofin): generalize spilling after definition, currently specialized
1048   // just for spill in a single deferred block.
1049   bool spilled_in_deferred_blocks_;
1050   bool has_preassigned_slot_;
1051 
1052   int spill_start_index_;
1053   UsePosition* last_pos_;
1054   LiveRange* last_child_covers_;
1055 };
1056 
1057 struct PrintableLiveRange {
1058   const RegisterConfiguration* register_configuration_;
1059   const LiveRange* range_;
1060 };
1061 
1062 std::ostream& operator<<(std::ostream& os,
1063                          const PrintableLiveRange& printable_range);
1064 
1065 class SpillRange final : public ZoneObject {
1066  public:
1067   static const int kUnassignedSlot = -1;
1068   SpillRange(TopLevelLiveRange* range, Zone* zone);
1069   SpillRange(const SpillRange&) = delete;
1070   SpillRange& operator=(const SpillRange&) = delete;
1071 
interval()1072   UseInterval* interval() const { return use_interval_; }
1073 
IsEmpty()1074   bool IsEmpty() const { return live_ranges_.empty(); }
1075   bool TryMerge(SpillRange* other);
HasSlot()1076   bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; }
1077 
set_assigned_slot(int index)1078   void set_assigned_slot(int index) {
1079     DCHECK_EQ(kUnassignedSlot, assigned_slot_);
1080     assigned_slot_ = index;
1081   }
assigned_slot()1082   int assigned_slot() {
1083     DCHECK_NE(kUnassignedSlot, assigned_slot_);
1084     return assigned_slot_;
1085   }
live_ranges()1086   const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
1087     return live_ranges_;
1088   }
live_ranges()1089   ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
1090   // Spill slots can be 4, 8, or 16 bytes wide.
byte_width()1091   int byte_width() const { return byte_width_; }
1092   void Print() const;
1093 
1094  private:
End()1095   LifetimePosition End() const { return end_position_; }
1096   bool IsIntersectingWith(SpillRange* other) const;
1097   // Merge intervals, making sure the use intervals are sorted
1098   void MergeDisjointIntervals(UseInterval* other);
1099 
1100   ZoneVector<TopLevelLiveRange*> live_ranges_;
1101   UseInterval* use_interval_;
1102   LifetimePosition end_position_;
1103   int assigned_slot_;
1104   int byte_width_;
1105 };
1106 
1107 class LiveRangeBound {
1108  public:
LiveRangeBound(LiveRange * range,bool skip)1109   explicit LiveRangeBound(LiveRange* range, bool skip)
1110       : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
1111     DCHECK(!range->IsEmpty());
1112   }
1113   LiveRangeBound(const LiveRangeBound&) = delete;
1114   LiveRangeBound& operator=(const LiveRangeBound&) = delete;
1115 
CanCover(LifetimePosition position)1116   bool CanCover(LifetimePosition position) {
1117     return start_ <= position && position < end_;
1118   }
1119 
1120   LiveRange* const range_;
1121   const LifetimePosition start_;
1122   const LifetimePosition end_;
1123   const bool skip_;
1124 };
1125 
1126 struct FindResult {
1127   LiveRange* cur_cover_;
1128   LiveRange* pred_cover_;
1129 };
1130 
1131 class LiveRangeBoundArray {
1132  public:
LiveRangeBoundArray()1133   LiveRangeBoundArray() : length_(0), start_(nullptr) {}
1134   LiveRangeBoundArray(const LiveRangeBoundArray&) = delete;
1135   LiveRangeBoundArray& operator=(const LiveRangeBoundArray&) = delete;
1136 
ShouldInitialize()1137   bool ShouldInitialize() { return start_ == nullptr; }
1138   void Initialize(Zone* zone, TopLevelLiveRange* range);
1139   LiveRangeBound* Find(const LifetimePosition position) const;
1140   LiveRangeBound* FindPred(const InstructionBlock* pred);
1141   LiveRangeBound* FindSucc(const InstructionBlock* succ);
1142   bool FindConnectableSubranges(const InstructionBlock* block,
1143                                 const InstructionBlock* pred,
1144                                 FindResult* result) const;
1145 
1146  private:
1147   size_t length_;
1148   LiveRangeBound* start_;
1149 };
1150 
1151 class LiveRangeFinder {
1152  public:
1153   explicit LiveRangeFinder(const TopTierRegisterAllocationData* data,
1154                            Zone* zone);
1155   LiveRangeFinder(const LiveRangeFinder&) = delete;
1156   LiveRangeFinder& operator=(const LiveRangeFinder&) = delete;
1157 
1158   LiveRangeBoundArray* ArrayFor(int operand_index);
1159 
1160  private:
1161   const TopTierRegisterAllocationData* const data_;
1162   const int bounds_length_;
1163   LiveRangeBoundArray* const bounds_;
1164   Zone* const zone_;
1165 };
1166 
1167 class ConstraintBuilder final : public ZoneObject {
1168  public:
1169   explicit ConstraintBuilder(TopTierRegisterAllocationData* data);
1170   ConstraintBuilder(const ConstraintBuilder&) = delete;
1171   ConstraintBuilder& operator=(const ConstraintBuilder&) = delete;
1172 
1173   // Phase 1 : insert moves to account for fixed register operands.
1174   void MeetRegisterConstraints();
1175 
1176   // Phase 2: deconstruct SSA by inserting moves in successors and the headers
1177   // of blocks containing phis.
1178   void ResolvePhis();
1179 
1180  private:
data()1181   TopTierRegisterAllocationData* data() const { return data_; }
code()1182   InstructionSequence* code() const { return data()->code(); }
allocation_zone()1183   Zone* allocation_zone() const { return data()->allocation_zone(); }
1184 
1185   InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
1186                                     bool is_tagged, bool is_input);
1187   void MeetRegisterConstraints(const InstructionBlock* block);
1188   void MeetConstraintsBefore(int index);
1189   void MeetConstraintsAfter(int index);
1190   void MeetRegisterConstraintsForLastInstructionInBlock(
1191       const InstructionBlock* block);
1192   void ResolvePhis(const InstructionBlock* block);
1193 
1194   TopTierRegisterAllocationData* const data_;
1195 };
1196 
1197 class LiveRangeBuilder final : public ZoneObject {
1198  public:
1199   explicit LiveRangeBuilder(TopTierRegisterAllocationData* data,
1200                             Zone* local_zone);
1201   LiveRangeBuilder(const LiveRangeBuilder&) = delete;
1202   LiveRangeBuilder& operator=(const LiveRangeBuilder&) = delete;
1203 
1204   // Phase 3: compute liveness of all virtual register.
1205   void BuildLiveRanges();
1206   static BitVector* ComputeLiveOut(const InstructionBlock* block,
1207                                    TopTierRegisterAllocationData* data);
1208 
1209  private:
1210   using SpillMode = TopTierRegisterAllocationData::SpillMode;
1211   static constexpr int kNumberOfFixedRangesPerRegister =
1212       TopTierRegisterAllocationData::kNumberOfFixedRangesPerRegister;
1213 
data()1214   TopTierRegisterAllocationData* data() const { return data_; }
code()1215   InstructionSequence* code() const { return data()->code(); }
allocation_zone()1216   Zone* allocation_zone() const { return data()->allocation_zone(); }
code_zone()1217   Zone* code_zone() const { return code()->zone(); }
config()1218   const RegisterConfiguration* config() const { return data()->config(); }
live_in_sets()1219   ZoneVector<BitVector*>& live_in_sets() const {
1220     return data()->live_in_sets();
1221   }
1222 
1223   // Verification.
1224   void Verify() const;
1225   bool IntervalStartsAtBlockBoundary(const UseInterval* interval) const;
1226   bool IntervalPredecessorsCoveredByRange(const UseInterval* interval,
1227                                           const TopLevelLiveRange* range) const;
1228   bool NextIntervalStartsInDifferentBlocks(const UseInterval* interval) const;
1229 
1230   // Liveness analysis support.
1231   void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
1232   void ProcessInstructions(const InstructionBlock* block, BitVector* live);
1233   void ProcessPhis(const InstructionBlock* block, BitVector* live);
1234   void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
1235 
FixedLiveRangeID(int index)1236   static int FixedLiveRangeID(int index) { return -index - 1; }
1237   int FixedFPLiveRangeID(int index, MachineRepresentation rep);
1238   TopLevelLiveRange* FixedLiveRangeFor(int index, SpillMode spill_mode);
1239   TopLevelLiveRange* FixedFPLiveRangeFor(int index, MachineRepresentation rep,
1240                                          SpillMode spill_mode);
1241 
1242   void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
1243   void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
1244 
1245   UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
1246                               void* hint, UsePositionHintType hint_type);
NewUsePosition(LifetimePosition pos)1247   UsePosition* NewUsePosition(LifetimePosition pos) {
1248     return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
1249   }
1250   TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand,
1251                                   SpillMode spill_mode);
1252   // Helper methods for building intervals.
1253   UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
1254                       void* hint, UsePositionHintType hint_type,
1255                       SpillMode spill_mode);
Define(LifetimePosition position,InstructionOperand * operand,SpillMode spill_mode)1256   void Define(LifetimePosition position, InstructionOperand* operand,
1257               SpillMode spill_mode) {
1258     Define(position, operand, nullptr, UsePositionHintType::kNone, spill_mode);
1259   }
1260   UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
1261                    InstructionOperand* operand, void* hint,
1262                    UsePositionHintType hint_type, SpillMode spill_mode);
Use(LifetimePosition block_start,LifetimePosition position,InstructionOperand * operand,SpillMode spill_mode)1263   void Use(LifetimePosition block_start, LifetimePosition position,
1264            InstructionOperand* operand, SpillMode spill_mode) {
1265     Use(block_start, position, operand, nullptr, UsePositionHintType::kNone,
1266         spill_mode);
1267   }
SpillModeForBlock(const InstructionBlock * block)1268   SpillMode SpillModeForBlock(const InstructionBlock* block) const {
1269     return block->IsDeferred() ? SpillMode::kSpillDeferred
1270                                : SpillMode::kSpillAtDefinition;
1271   }
1272   TopTierRegisterAllocationData* const data_;
1273   ZoneMap<InstructionOperand*, UsePosition*> phi_hints_;
1274 };
1275 
1276 class BundleBuilder final : public ZoneObject {
1277  public:
BundleBuilder(TopTierRegisterAllocationData * data)1278   explicit BundleBuilder(TopTierRegisterAllocationData* data) : data_(data) {}
1279 
1280   void BuildBundles();
1281 
1282  private:
data()1283   TopTierRegisterAllocationData* data() const { return data_; }
code()1284   InstructionSequence* code() const { return data_->code(); }
1285   TopTierRegisterAllocationData* data_;
1286   int next_bundle_id_ = 0;
1287 };
1288 
1289 class RegisterAllocator : public ZoneObject {
1290  public:
1291   RegisterAllocator(TopTierRegisterAllocationData* data, RegisterKind kind);
1292   RegisterAllocator(const RegisterAllocator&) = delete;
1293   RegisterAllocator& operator=(const RegisterAllocator&) = delete;
1294 
1295  protected:
1296   using SpillMode = TopTierRegisterAllocationData::SpillMode;
data()1297   TopTierRegisterAllocationData* data() const { return data_; }
code()1298   InstructionSequence* code() const { return data()->code(); }
mode()1299   RegisterKind mode() const { return mode_; }
num_registers()1300   int num_registers() const { return num_registers_; }
num_allocatable_registers()1301   int num_allocatable_registers() const { return num_allocatable_registers_; }
allocatable_register_codes()1302   const int* allocatable_register_codes() const {
1303     return allocatable_register_codes_;
1304   }
1305   // Returns true iff. we must check float register aliasing.
check_fp_aliasing()1306   bool check_fp_aliasing() const { return check_fp_aliasing_; }
1307 
1308   // TODO(mtrofin): explain why splitting in gap START is always OK.
1309   LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
1310                                                   int instruction_index);
1311 
allocation_zone()1312   Zone* allocation_zone() const { return data()->allocation_zone(); }
1313 
1314   // Find the optimal split for ranges defined by a memory operand, e.g.
1315   // constants or function parameters passed on the stack.
1316   void SplitAndSpillRangesDefinedByMemoryOperand();
1317 
1318   // Split the given range at the given position.
1319   // If range starts at or after the given position then the
1320   // original range is returned.
1321   // Otherwise returns the live range that starts at pos and contains
1322   // all uses from the original range that follow pos. Uses at pos will
1323   // still be owned by the original range after splitting.
1324   LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
1325 
CanProcessRange(LiveRange * range)1326   bool CanProcessRange(LiveRange* range) const {
1327     return range != nullptr && !range->IsEmpty() && range->kind() == mode();
1328   }
1329 
1330   // Split the given range in a position from the interval [start, end].
1331   LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
1332                           LifetimePosition end);
1333 
1334   // Find a lifetime position in the interval [start, end] which
1335   // is optimal for splitting: it is either header of the outermost
1336   // loop covered by this interval or the latest possible position.
1337   LifetimePosition FindOptimalSplitPos(LifetimePosition start,
1338                                        LifetimePosition end);
1339 
1340   void Spill(LiveRange* range, SpillMode spill_mode);
1341 
1342   // If we are trying to spill a range inside the loop try to
1343   // hoist spill position out to the point just before the loop.
1344   LifetimePosition FindOptimalSpillingPos(LiveRange* range,
1345                                           LifetimePosition pos,
1346                                           SpillMode spill_mode,
1347                                           LiveRange** begin_spill_out);
1348 
1349   const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
1350   const char* RegisterName(int allocation_index) const;
1351 
1352  private:
1353   TopTierRegisterAllocationData* const data_;
1354   const RegisterKind mode_;
1355   const int num_registers_;
1356   int num_allocatable_registers_;
1357   const int* allocatable_register_codes_;
1358   bool check_fp_aliasing_;
1359 
1360  private:
1361   bool no_combining_;
1362 };
1363 
1364 class LinearScanAllocator final : public RegisterAllocator {
1365  public:
1366   LinearScanAllocator(TopTierRegisterAllocationData* data, RegisterKind kind,
1367                       Zone* local_zone);
1368   LinearScanAllocator(const LinearScanAllocator&) = delete;
1369   LinearScanAllocator& operator=(const LinearScanAllocator&) = delete;
1370 
1371   // Phase 4: compute register assignments.
1372   void AllocateRegisters();
1373 
1374  private:
1375   struct RangeWithRegister {
1376     TopLevelLiveRange* range;
1377     int expected_register;
1378     struct Hash {
operatorRangeWithRegister::Hash1379       size_t operator()(const RangeWithRegister item) const {
1380         return item.range->vreg();
1381       }
1382     };
1383     struct Equals {
operatorRangeWithRegister::Equals1384       bool operator()(const RangeWithRegister one,
1385                       const RangeWithRegister two) const {
1386         return one.range == two.range;
1387       }
1388     };
1389 
RangeWithRegisterRangeWithRegister1390     explicit RangeWithRegister(LiveRange* a_range)
1391         : range(a_range->TopLevel()),
1392           expected_register(a_range->assigned_register()) {}
RangeWithRegisterRangeWithRegister1393     RangeWithRegister(TopLevelLiveRange* toplevel, int reg)
1394         : range(toplevel), expected_register(reg) {}
1395   };
1396 
1397   using RangeWithRegisterSet =
1398       ZoneUnorderedSet<RangeWithRegister, RangeWithRegister::Hash,
1399                        RangeWithRegister::Equals>;
1400 
1401   void MaybeSpillPreviousRanges(LiveRange* begin_range,
1402                                 LifetimePosition begin_pos,
1403                                 LiveRange* end_range);
1404   void MaybeUndoPreviousSplit(LiveRange* range);
1405   void SpillNotLiveRanges(RangeWithRegisterSet* to_be_live,
1406                           LifetimePosition position, SpillMode spill_mode);
1407   LiveRange* AssignRegisterOnReload(LiveRange* range, int reg);
1408   void ReloadLiveRanges(RangeWithRegisterSet const& to_be_live,
1409                         LifetimePosition position);
1410 
1411   void UpdateDeferredFixedRanges(SpillMode spill_mode, InstructionBlock* block);
1412   bool BlockIsDeferredOrImmediatePredecessorIsNotDeferred(
1413       const InstructionBlock* block);
1414   bool HasNonDeferredPredecessor(InstructionBlock* block);
1415 
1416   struct UnhandledLiveRangeOrdering {
operatorUnhandledLiveRangeOrdering1417     bool operator()(const LiveRange* a, const LiveRange* b) const {
1418       return a->ShouldBeAllocatedBefore(b);
1419     }
1420   };
1421 
1422   struct InactiveLiveRangeOrdering {
operatorInactiveLiveRangeOrdering1423     bool operator()(const LiveRange* a, const LiveRange* b) const {
1424       return a->NextStart() < b->NextStart();
1425     }
1426   };
1427 
1428   using UnhandledLiveRangeQueue =
1429       ZoneMultiset<LiveRange*, UnhandledLiveRangeOrdering>;
1430   using InactiveLiveRangeQueue =
1431       ZoneMultiset<LiveRange*, InactiveLiveRangeOrdering>;
unhandled_live_ranges()1432   UnhandledLiveRangeQueue& unhandled_live_ranges() {
1433     return unhandled_live_ranges_;
1434   }
active_live_ranges()1435   ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
inactive_live_ranges(int reg)1436   InactiveLiveRangeQueue& inactive_live_ranges(int reg) {
1437     return inactive_live_ranges_[reg];
1438   }
1439 
1440   void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
1441 
1442   // Helper methods for updating the life range lists.
1443   void AddToActive(LiveRange* range);
1444   void AddToInactive(LiveRange* range);
1445   void AddToUnhandled(LiveRange* range);
1446   ZoneVector<LiveRange*>::iterator ActiveToHandled(
1447       ZoneVector<LiveRange*>::iterator it);
1448   ZoneVector<LiveRange*>::iterator ActiveToInactive(
1449       ZoneVector<LiveRange*>::iterator it, LifetimePosition position);
1450   InactiveLiveRangeQueue::iterator InactiveToHandled(
1451       InactiveLiveRangeQueue::iterator it);
1452   InactiveLiveRangeQueue::iterator InactiveToActive(
1453       InactiveLiveRangeQueue::iterator it, LifetimePosition position);
1454 
1455   void ForwardStateTo(LifetimePosition position);
1456 
1457   int LastDeferredInstructionIndex(InstructionBlock* start);
1458 
1459   // Helper methods for choosing state after control flow events.
1460 
1461   bool ConsiderBlockForControlFlow(InstructionBlock* current_block,
1462                                    RpoNumber predecessor);
1463   RpoNumber ChooseOneOfTwoPredecessorStates(InstructionBlock* current_block,
1464                                             LifetimePosition boundary);
1465   bool CheckConflict(MachineRepresentation rep, int reg,
1466                      RangeWithRegisterSet* to_be_live);
1467   void ComputeStateFromManyPredecessors(InstructionBlock* current_block,
1468                                         RangeWithRegisterSet* to_be_live);
1469 
1470   // Helper methods for allocating registers.
1471   bool TryReuseSpillForPhi(TopLevelLiveRange* range);
1472   int PickRegisterThatIsAvailableLongest(
1473       LiveRange* current, int hint_reg,
1474       const Vector<LifetimePosition>& free_until_pos);
1475   bool TryAllocateFreeReg(LiveRange* range,
1476                           const Vector<LifetimePosition>& free_until_pos);
1477   bool TryAllocatePreferredReg(LiveRange* range,
1478                                const Vector<LifetimePosition>& free_until_pos);
1479   void GetFPRegisterSet(MachineRepresentation rep, int* num_regs,
1480                         int* num_codes, const int** codes) const;
1481   void FindFreeRegistersForRange(LiveRange* range,
1482                                  Vector<LifetimePosition> free_until_pos);
1483   void ProcessCurrentRange(LiveRange* current, SpillMode spill_mode);
1484   void AllocateBlockedReg(LiveRange* range, SpillMode spill_mode);
1485 
1486   // Spill the given life range after position pos.
1487   void SpillAfter(LiveRange* range, LifetimePosition pos, SpillMode spill_mode);
1488 
1489   // Spill the given life range after position [start] and up to position [end].
1490   void SpillBetween(LiveRange* range, LifetimePosition start,
1491                     LifetimePosition end, SpillMode spill_mode);
1492 
1493   // Spill the given life range after position [start] and up to position [end].
1494   // Range is guaranteed to be spilled at least until position [until].
1495   void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
1496                          LifetimePosition until, LifetimePosition end,
1497                          SpillMode spill_mode);
1498   void SplitAndSpillIntersecting(LiveRange* range, SpillMode spill_mode);
1499 
1500   void PrintRangeRow(std::ostream& os, const TopLevelLiveRange* toplevel);
1501 
1502   void PrintRangeOverview(std::ostream& os);
1503 
1504   UnhandledLiveRangeQueue unhandled_live_ranges_;
1505   ZoneVector<LiveRange*> active_live_ranges_;
1506   ZoneVector<InactiveLiveRangeQueue> inactive_live_ranges_;
1507 
1508   // Approximate at what position the set of ranges will change next.
1509   // Used to avoid scanning for updates even if none are present.
1510   LifetimePosition next_active_ranges_change_;
1511   LifetimePosition next_inactive_ranges_change_;
1512 
1513 #ifdef DEBUG
1514   LifetimePosition allocation_finger_;
1515 #endif
1516 };
1517 
1518 class OperandAssigner final : public ZoneObject {
1519  public:
1520   explicit OperandAssigner(TopTierRegisterAllocationData* data);
1521   OperandAssigner(const OperandAssigner&) = delete;
1522   OperandAssigner& operator=(const OperandAssigner&) = delete;
1523 
1524   // Phase 5: final decision on spilling mode.
1525   void DecideSpillingMode();
1526 
1527   // Phase 6: assign spill splots.
1528   void AssignSpillSlots();
1529 
1530   // Phase 7: commit assignment.
1531   void CommitAssignment();
1532 
1533  private:
data()1534   TopTierRegisterAllocationData* data() const { return data_; }
1535 
1536   TopTierRegisterAllocationData* const data_;
1537 };
1538 
1539 class ReferenceMapPopulator final : public ZoneObject {
1540  public:
1541   explicit ReferenceMapPopulator(TopTierRegisterAllocationData* data);
1542   ReferenceMapPopulator(const ReferenceMapPopulator&) = delete;
1543   ReferenceMapPopulator& operator=(const ReferenceMapPopulator&) = delete;
1544 
1545   // Phase 10: compute values for pointer maps.
1546   void PopulateReferenceMaps();
1547 
1548  private:
data()1549   TopTierRegisterAllocationData* data() const { return data_; }
1550 
1551   bool SafePointsAreInOrder() const;
1552 
1553   TopTierRegisterAllocationData* const data_;
1554 };
1555 
1556 class LiveRangeBoundArray;
1557 // Insert moves of the form
1558 //
1559 //          Operand(child_(k+1)) = Operand(child_k)
1560 //
1561 // where child_k and child_(k+1) are consecutive children of a range (so
1562 // child_k->next() == child_(k+1)), and Operand(...) refers to the
1563 // assigned operand, be it a register or a slot.
1564 class LiveRangeConnector final : public ZoneObject {
1565  public:
1566   explicit LiveRangeConnector(TopTierRegisterAllocationData* data);
1567   LiveRangeConnector(const LiveRangeConnector&) = delete;
1568   LiveRangeConnector& operator=(const LiveRangeConnector&) = delete;
1569 
1570   // Phase 8: reconnect split ranges with moves, when the control flow
1571   // between the ranges is trivial (no branches).
1572   void ConnectRanges(Zone* local_zone);
1573 
1574   // Phase 9: insert moves to connect ranges across basic blocks, when the
1575   // control flow between them cannot be trivially resolved, such as joining
1576   // branches. Also determines whether to spill at the definition or later, and
1577   // adds spill moves to the gaps in the schedule.
1578   void ResolveControlFlow(Zone* local_zone);
1579 
1580  private:
data()1581   TopTierRegisterAllocationData* data() const { return data_; }
code()1582   InstructionSequence* code() const { return data()->code(); }
code_zone()1583   Zone* code_zone() const { return code()->zone(); }
1584 
1585   bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
1586 
1587   int ResolveControlFlow(const InstructionBlock* block,
1588                          const InstructionOperand& cur_op,
1589                          const InstructionBlock* pred,
1590                          const InstructionOperand& pred_op);
1591 
1592   void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range,
1593                                     LiveRangeBoundArray* array,
1594                                     Zone* temp_zone);
1595 
1596   TopTierRegisterAllocationData* const data_;
1597 };
1598 
1599 }  // namespace compiler
1600 }  // namespace internal
1601 }  // namespace v8
1602 
1603 #endif  // V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
1604