1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_COMPILER_REGISTER_ALLOCATOR_H_
6 #define V8_COMPILER_REGISTER_ALLOCATOR_H_
7
8 #include "src/base/bits.h"
9 #include "src/base/compiler-specific.h"
10 #include "src/compiler/instruction.h"
11 #include "src/globals.h"
12 #include "src/ostreams.h"
13 #include "src/register-configuration.h"
14 #include "src/zone/zone-containers.h"
15
16 namespace v8 {
17 namespace internal {
18 namespace compiler {
19
20 enum RegisterKind { GENERAL_REGISTERS, FP_REGISTERS };
21
22 // This class represents a single point of a InstructionOperand's lifetime. For
23 // each instruction there are four lifetime positions:
24 //
25 // [[START, END], [START, END]]
26 //
27 // Where the first half position corresponds to
28 //
29 // [GapPosition::START, GapPosition::END]
30 //
31 // and the second half position corresponds to
32 //
33 // [Lifetime::USED_AT_START, Lifetime::USED_AT_END]
34 //
35 class LifetimePosition final {
36 public:
37 // Return the lifetime position that corresponds to the beginning of
38 // the gap with the given index.
GapFromInstructionIndex(int index)39 static LifetimePosition GapFromInstructionIndex(int index) {
40 return LifetimePosition(index * kStep);
41 }
42 // Return the lifetime position that corresponds to the beginning of
43 // the instruction with the given index.
InstructionFromInstructionIndex(int index)44 static LifetimePosition InstructionFromInstructionIndex(int index) {
45 return LifetimePosition(index * kStep + kHalfStep);
46 }
47
ExistsGapPositionBetween(LifetimePosition pos1,LifetimePosition pos2)48 static bool ExistsGapPositionBetween(LifetimePosition pos1,
49 LifetimePosition pos2) {
50 if (pos1 > pos2) std::swap(pos1, pos2);
51 LifetimePosition next(pos1.value_ + 1);
52 if (next.IsGapPosition()) return next < pos2;
53 return next.NextFullStart() < pos2;
54 }
55
56 // Returns a numeric representation of this lifetime position.
value()57 int value() const { return value_; }
58
59 // Returns the index of the instruction to which this lifetime position
60 // corresponds.
ToInstructionIndex()61 int ToInstructionIndex() const {
62 DCHECK(IsValid());
63 return value_ / kStep;
64 }
65
66 // Returns true if this lifetime position corresponds to a START value
IsStart()67 bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; }
68 // Returns true if this lifetime position corresponds to an END value
IsEnd()69 bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; }
70 // Returns true if this lifetime position corresponds to a gap START value
IsFullStart()71 bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; }
72
IsGapPosition()73 bool IsGapPosition() const { return (value_ & 0x2) == 0; }
IsInstructionPosition()74 bool IsInstructionPosition() const { return !IsGapPosition(); }
75
76 // Returns the lifetime position for the current START.
Start()77 LifetimePosition Start() const {
78 DCHECK(IsValid());
79 return LifetimePosition(value_ & ~(kHalfStep - 1));
80 }
81
82 // Returns the lifetime position for the current gap START.
FullStart()83 LifetimePosition FullStart() const {
84 DCHECK(IsValid());
85 return LifetimePosition(value_ & ~(kStep - 1));
86 }
87
88 // Returns the lifetime position for the current END.
End()89 LifetimePosition End() const {
90 DCHECK(IsValid());
91 return LifetimePosition(Start().value_ + kHalfStep / 2);
92 }
93
94 // Returns the lifetime position for the beginning of the next START.
NextStart()95 LifetimePosition NextStart() const {
96 DCHECK(IsValid());
97 return LifetimePosition(Start().value_ + kHalfStep);
98 }
99
100 // Returns the lifetime position for the beginning of the next gap START.
NextFullStart()101 LifetimePosition NextFullStart() const {
102 DCHECK(IsValid());
103 return LifetimePosition(FullStart().value_ + kStep);
104 }
105
106 // Returns the lifetime position for the beginning of the previous START.
PrevStart()107 LifetimePosition PrevStart() const {
108 DCHECK(IsValid());
109 DCHECK_LE(kHalfStep, value_);
110 return LifetimePosition(Start().value_ - kHalfStep);
111 }
112
113 // Constructs the lifetime position which does not correspond to any
114 // instruction.
LifetimePosition()115 LifetimePosition() : value_(-1) {}
116
117 // Returns true if this lifetime positions corrensponds to some
118 // instruction.
IsValid()119 bool IsValid() const { return value_ != -1; }
120
121 bool operator<(const LifetimePosition& that) const {
122 return this->value_ < that.value_;
123 }
124
125 bool operator<=(const LifetimePosition& that) const {
126 return this->value_ <= that.value_;
127 }
128
129 bool operator==(const LifetimePosition& that) const {
130 return this->value_ == that.value_;
131 }
132
133 bool operator!=(const LifetimePosition& that) const {
134 return this->value_ != that.value_;
135 }
136
137 bool operator>(const LifetimePosition& that) const {
138 return this->value_ > that.value_;
139 }
140
141 bool operator>=(const LifetimePosition& that) const {
142 return this->value_ >= that.value_;
143 }
144
145 void Print() const;
146
Invalid()147 static inline LifetimePosition Invalid() { return LifetimePosition(); }
148
MaxPosition()149 static inline LifetimePosition MaxPosition() {
150 // We have to use this kind of getter instead of static member due to
151 // crash bug in GDB.
152 return LifetimePosition(kMaxInt);
153 }
154
FromInt(int value)155 static inline LifetimePosition FromInt(int value) {
156 return LifetimePosition(value);
157 }
158
159 private:
160 static const int kHalfStep = 2;
161 static const int kStep = 2 * kHalfStep;
162
163 static_assert(base::bits::IsPowerOfTwo(kHalfStep),
164 "Code relies on kStep and kHalfStep being a power of two");
165
LifetimePosition(int value)166 explicit LifetimePosition(int value) : value_(value) {}
167
168 int value_;
169 };
170
171
172 std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
173
174
175 // Representation of the non-empty interval [start,end[.
176 class UseInterval final : public ZoneObject {
177 public:
UseInterval(LifetimePosition start,LifetimePosition end)178 UseInterval(LifetimePosition start, LifetimePosition end)
179 : start_(start), end_(end), next_(nullptr) {
180 DCHECK(start < end);
181 }
182
start()183 LifetimePosition start() const { return start_; }
set_start(LifetimePosition start)184 void set_start(LifetimePosition start) { start_ = start; }
end()185 LifetimePosition end() const { return end_; }
set_end(LifetimePosition end)186 void set_end(LifetimePosition end) { end_ = end; }
next()187 UseInterval* next() const { return next_; }
set_next(UseInterval * next)188 void set_next(UseInterval* next) { next_ = next; }
189
190 // Split this interval at the given position without effecting the
191 // live range that owns it. The interval must contain the position.
192 UseInterval* SplitAt(LifetimePosition pos, Zone* zone);
193
194 // If this interval intersects with other return smallest position
195 // that belongs to both of them.
Intersect(const UseInterval * other)196 LifetimePosition Intersect(const UseInterval* other) const {
197 if (other->start() < start_) return other->Intersect(this);
198 if (other->start() < end_) return other->start();
199 return LifetimePosition::Invalid();
200 }
201
Contains(LifetimePosition point)202 bool Contains(LifetimePosition point) const {
203 return start_ <= point && point < end_;
204 }
205
206 // Returns the index of the first gap covered by this interval.
FirstGapIndex()207 int FirstGapIndex() const {
208 int ret = start_.ToInstructionIndex();
209 if (start_.IsInstructionPosition()) {
210 ++ret;
211 }
212 return ret;
213 }
214
215 // Returns the index of the last gap covered by this interval.
LastGapIndex()216 int LastGapIndex() const {
217 int ret = end_.ToInstructionIndex();
218 if (end_.IsGapPosition() && end_.IsStart()) {
219 --ret;
220 }
221 return ret;
222 }
223
224 private:
225 LifetimePosition start_;
226 LifetimePosition end_;
227 UseInterval* next_;
228
229 DISALLOW_COPY_AND_ASSIGN(UseInterval);
230 };
231
232 enum class UsePositionType : uint8_t {
233 kRegisterOrSlot,
234 kRegisterOrSlotOrConstant,
235 kRequiresRegister,
236 kRequiresSlot
237 };
238
239 enum class UsePositionHintType : uint8_t {
240 kNone,
241 kOperand,
242 kUsePos,
243 kPhi,
244 kUnresolved
245 };
246
247
248 static const int32_t kUnassignedRegister =
249 RegisterConfiguration::kMaxGeneralRegisters;
250
251 static_assert(kUnassignedRegister <= RegisterConfiguration::kMaxFPRegisters,
252 "kUnassignedRegister too small");
253
254 // Representation of a use position.
255 class V8_EXPORT_PRIVATE UsePosition final
NON_EXPORTED_BASE(ZoneObject)256 : public NON_EXPORTED_BASE(ZoneObject) {
257 public:
258 UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint,
259 UsePositionHintType hint_type);
260
261 InstructionOperand* operand() const { return operand_; }
262 bool HasOperand() const { return operand_ != nullptr; }
263
264 bool RegisterIsBeneficial() const {
265 return RegisterBeneficialField::decode(flags_);
266 }
267 UsePositionType type() const { return TypeField::decode(flags_); }
268 void set_type(UsePositionType type, bool register_beneficial);
269
270 LifetimePosition pos() const { return pos_; }
271
272 UsePosition* next() const { return next_; }
273 void set_next(UsePosition* next) { next_ = next; }
274
275 // For hinting only.
276 void set_assigned_register(int register_code) {
277 flags_ = AssignedRegisterField::update(flags_, register_code);
278 }
279
280 UsePositionHintType hint_type() const {
281 return HintTypeField::decode(flags_);
282 }
283 bool HasHint() const;
284 bool HintRegister(int* register_code) const;
285 void SetHint(UsePosition* use_pos);
286 void ResolveHint(UsePosition* use_pos);
287 bool IsResolved() const {
288 return hint_type() != UsePositionHintType::kUnresolved;
289 }
290 static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
291
292 private:
293 typedef BitField<UsePositionType, 0, 2> TypeField;
294 typedef BitField<UsePositionHintType, 2, 3> HintTypeField;
295 typedef BitField<bool, 5, 1> RegisterBeneficialField;
296 typedef BitField<int32_t, 6, 6> AssignedRegisterField;
297
298 InstructionOperand* const operand_;
299 void* hint_;
300 UsePosition* next_;
301 LifetimePosition const pos_;
302 uint32_t flags_;
303
304 DISALLOW_COPY_AND_ASSIGN(UsePosition);
305 };
306
307
308 class SpillRange;
309 class RegisterAllocationData;
310 class TopLevelLiveRange;
311
312 // Representation of SSA values' live ranges as a collection of (continuous)
313 // intervals over the instruction ordering.
NON_EXPORTED_BASE(ZoneObject)314 class V8_EXPORT_PRIVATE LiveRange : public NON_EXPORTED_BASE(ZoneObject) {
315 public:
316 UseInterval* first_interval() const { return first_interval_; }
317 UsePosition* first_pos() const { return first_pos_; }
318 TopLevelLiveRange* TopLevel() { return top_level_; }
319 const TopLevelLiveRange* TopLevel() const { return top_level_; }
320
321 bool IsTopLevel() const;
322
323 LiveRange* next() const { return next_; }
324
325 int relative_id() const { return relative_id_; }
326
327 bool IsEmpty() const { return first_interval() == nullptr; }
328
329 InstructionOperand GetAssignedOperand() const;
330
331 MachineRepresentation representation() const {
332 return RepresentationField::decode(bits_);
333 }
334
335 int assigned_register() const { return AssignedRegisterField::decode(bits_); }
336 bool HasRegisterAssigned() const {
337 return assigned_register() != kUnassignedRegister;
338 }
339 void set_assigned_register(int reg);
340 void UnsetAssignedRegister();
341
342 bool spilled() const { return SpilledField::decode(bits_); }
343 void Spill();
344
345 RegisterKind kind() const;
346
347 // Returns use position in this live range that follows both start
348 // and last processed use position.
349 UsePosition* NextUsePosition(LifetimePosition start) const;
350
351 // Returns use position for which register is required in this live
352 // range and which follows both start and last processed use position
353 UsePosition* NextRegisterPosition(LifetimePosition start) const;
354
355 // Returns the first use position requiring stack slot, or nullptr.
356 UsePosition* NextSlotPosition(LifetimePosition start) const;
357
358 // Returns use position for which register is beneficial in this live
359 // range and which follows both start and last processed use position
360 UsePosition* NextUsePositionRegisterIsBeneficial(
361 LifetimePosition start) const;
362
363 // Returns lifetime position for which register is beneficial in this live
364 // range and which follows both start and last processed use position.
365 LifetimePosition NextLifetimePositionRegisterIsBeneficial(
366 const LifetimePosition& start) const;
367
368 // Returns use position for which register is beneficial in this live
369 // range and which precedes start.
370 UsePosition* PreviousUsePositionRegisterIsBeneficial(
371 LifetimePosition start) const;
372
373 // Can this live range be spilled at this position.
374 bool CanBeSpilled(LifetimePosition pos) const;
375
376 // Splitting primitive used by both splitting and splintering members.
377 // Performs the split, but does not link the resulting ranges.
378 // The given position must follow the start of the range.
379 // All uses following the given position will be moved from this
380 // live range to the result live range.
381 // The current range will terminate at position, while result will start from
382 // position.
383 enum HintConnectionOption : bool {
384 DoNotConnectHints = false,
385 ConnectHints = true
386 };
387 UsePosition* DetachAt(LifetimePosition position, LiveRange* result,
388 Zone* zone, HintConnectionOption connect_hints);
389
390 // Detaches at position, and then links the resulting ranges. Returns the
391 // child, which starts at position.
392 LiveRange* SplitAt(LifetimePosition position, Zone* zone);
393
394 // Returns nullptr when no register is hinted, otherwise sets register_index.
395 UsePosition* FirstHintPosition(int* register_index) const;
396 UsePosition* FirstHintPosition() const {
397 int register_index;
398 return FirstHintPosition(®ister_index);
399 }
400
401 UsePosition* current_hint_position() const {
402 DCHECK(current_hint_position_ == FirstHintPosition());
403 return current_hint_position_;
404 }
405
406 LifetimePosition Start() const {
407 DCHECK(!IsEmpty());
408 return first_interval()->start();
409 }
410
411 LifetimePosition End() const {
412 DCHECK(!IsEmpty());
413 return last_interval_->end();
414 }
415
416 bool ShouldBeAllocatedBefore(const LiveRange* other) const;
417 bool CanCover(LifetimePosition position) const;
418 bool Covers(LifetimePosition position) const;
419 LifetimePosition FirstIntersection(LiveRange* other) const;
420
421 void VerifyChildStructure() const {
422 VerifyIntervals();
423 VerifyPositions();
424 }
425
426 void ConvertUsesToOperand(const InstructionOperand& op,
427 const InstructionOperand& spill_op);
428 void SetUseHints(int register_index);
429 void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
430
431 void Print(const RegisterConfiguration* config, bool with_children) const;
432 void Print(bool with_children) const;
433
434 private:
435 friend class TopLevelLiveRange;
436 explicit LiveRange(int relative_id, MachineRepresentation rep,
437 TopLevelLiveRange* top_level);
438
439 void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
440
441 void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
442
443 UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
444 void AdvanceLastProcessedMarker(UseInterval* to_start_of,
445 LifetimePosition but_not_past) const;
446
447 void VerifyPositions() const;
448 void VerifyIntervals() const;
449
450 typedef BitField<bool, 0, 1> SpilledField;
451 typedef BitField<int32_t, 6, 6> AssignedRegisterField;
452 typedef BitField<MachineRepresentation, 12, 8> RepresentationField;
453
454 // Unique among children and splinters of the same virtual register.
455 int relative_id_;
456 uint32_t bits_;
457 UseInterval* last_interval_;
458 UseInterval* first_interval_;
459 UsePosition* first_pos_;
460 TopLevelLiveRange* top_level_;
461 LiveRange* next_;
462 // This is used as a cache, it doesn't affect correctness.
463 mutable UseInterval* current_interval_;
464 // This is used as a cache, it doesn't affect correctness.
465 mutable UsePosition* last_processed_use_;
466 // This is used as a cache, it's invalid outside of BuildLiveRanges.
467 mutable UsePosition* current_hint_position_;
468 // Cache the last position splintering stopped at.
469 mutable UsePosition* splitting_pointer_;
470
471 DISALLOW_COPY_AND_ASSIGN(LiveRange);
472 };
473
474
475 class V8_EXPORT_PRIVATE TopLevelLiveRange final : public LiveRange {
476 public:
477 explicit TopLevelLiveRange(int vreg, MachineRepresentation rep);
spill_start_index()478 int spill_start_index() const { return spill_start_index_; }
479
IsFixed()480 bool IsFixed() const { return vreg_ < 0; }
481
is_phi()482 bool is_phi() const { return IsPhiField::decode(bits_); }
set_is_phi(bool value)483 void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
484
is_non_loop_phi()485 bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
set_is_non_loop_phi(bool value)486 void set_is_non_loop_phi(bool value) {
487 bits_ = IsNonLoopPhiField::update(bits_, value);
488 }
489
has_slot_use()490 bool has_slot_use() const { return HasSlotUseField::decode(bits_); }
set_has_slot_use(bool value)491 void set_has_slot_use(bool value) {
492 bits_ = HasSlotUseField::update(bits_, value);
493 }
494
495 // Add a new interval or a new use position to this live range.
496 void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
497 void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
498 void AddUsePosition(UsePosition* pos);
499
500 // Shorten the most recently added interval by setting a new start.
501 void ShortenTo(LifetimePosition start);
502
503 // Detaches between start and end, and attributes the resulting range to
504 // result.
505 // The current range is pointed to as "splintered_from". No parent/child
506 // relationship is established between this and result.
507 void Splinter(LifetimePosition start, LifetimePosition end, Zone* zone);
508
509 // Assuming other was splintered from this range, embeds other and its
510 // children as part of the children sequence of this range.
511 void Merge(TopLevelLiveRange* other, Zone* zone);
512
513 // Spill range management.
514 void SetSpillRange(SpillRange* spill_range);
515 enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
set_spill_type(SpillType value)516 void set_spill_type(SpillType value) {
517 bits_ = SpillTypeField::update(bits_, value);
518 }
spill_type()519 SpillType spill_type() const { return SpillTypeField::decode(bits_); }
GetSpillOperand()520 InstructionOperand* GetSpillOperand() const {
521 DCHECK_EQ(SpillType::kSpillOperand, spill_type());
522 return spill_operand_;
523 }
524
GetAllocatedSpillRange()525 SpillRange* GetAllocatedSpillRange() const {
526 DCHECK_NE(SpillType::kSpillOperand, spill_type());
527 return spill_range_;
528 }
529
GetSpillRange()530 SpillRange* GetSpillRange() const {
531 DCHECK_EQ(SpillType::kSpillRange, spill_type());
532 return spill_range_;
533 }
HasNoSpillType()534 bool HasNoSpillType() const {
535 return spill_type() == SpillType::kNoSpillType;
536 }
HasSpillOperand()537 bool HasSpillOperand() const {
538 return spill_type() == SpillType::kSpillOperand;
539 }
HasSpillRange()540 bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; }
541
542 AllocatedOperand GetSpillRangeOperand() const;
543
544 void RecordSpillLocation(Zone* zone, int gap_index,
545 InstructionOperand* operand);
546 void SetSpillOperand(InstructionOperand* operand);
SetSpillStartIndex(int start)547 void SetSpillStartIndex(int start) {
548 spill_start_index_ = Min(start, spill_start_index_);
549 }
550
551 void CommitSpillMoves(InstructionSequence* sequence,
552 const InstructionOperand& operand,
553 bool might_be_duplicated);
554
555 // If all the children of this range are spilled in deferred blocks, and if
556 // for any non-spilled child with a use position requiring a slot, that range
557 // is contained in a deferred block, mark the range as
558 // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
559 // and instead let the LiveRangeConnector perform the spills within the
560 // deferred blocks. If so, we insert here spills for non-spilled ranges
561 // with slot use positions.
TreatAsSpilledInDeferredBlock(Zone * zone,int total_block_count)562 void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) {
563 spill_start_index_ = -1;
564 spilled_in_deferred_blocks_ = true;
565 spill_move_insertion_locations_ = nullptr;
566 list_of_blocks_requiring_spill_operands_ =
567 new (zone) BitVector(total_block_count, zone);
568 }
569
570 void CommitSpillInDeferredBlocks(RegisterAllocationData* data,
571 const InstructionOperand& spill_operand,
572 BitVector* necessary_spill_points);
573
splintered_from()574 TopLevelLiveRange* splintered_from() const { return splintered_from_; }
IsSplinter()575 bool IsSplinter() const { return splintered_from_ != nullptr; }
MayRequireSpillRange()576 bool MayRequireSpillRange() const {
577 DCHECK(!IsSplinter());
578 return !HasSpillOperand() && spill_range_ == nullptr;
579 }
580 void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
vreg()581 int vreg() const { return vreg_; }
582
583 #if DEBUG
584 int debug_virt_reg() const;
585 #endif
586
587 void Verify() const;
588 void VerifyChildrenInOrder() const;
589
GetNextChildId()590 int GetNextChildId() {
591 return IsSplinter() ? splintered_from()->GetNextChildId()
592 : ++last_child_id_;
593 }
594
GetChildCount()595 int GetChildCount() const { return last_child_id_ + 1; }
596
IsSpilledOnlyInDeferredBlocks()597 bool IsSpilledOnlyInDeferredBlocks() const {
598 return spilled_in_deferred_blocks_;
599 }
600
601 struct SpillMoveInsertionList;
602
GetSpillMoveInsertionLocations()603 SpillMoveInsertionList* GetSpillMoveInsertionLocations() const {
604 DCHECK(!IsSpilledOnlyInDeferredBlocks());
605 return spill_move_insertion_locations_;
606 }
splinter()607 TopLevelLiveRange* splinter() const { return splinter_; }
SetSplinter(TopLevelLiveRange * splinter)608 void SetSplinter(TopLevelLiveRange* splinter) {
609 DCHECK_NULL(splinter_);
610 DCHECK_NOT_NULL(splinter);
611
612 splinter_ = splinter;
613 splinter->relative_id_ = GetNextChildId();
614 splinter->set_spill_type(spill_type());
615 splinter->SetSplinteredFrom(this);
616 }
617
MarkHasPreassignedSlot()618 void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
has_preassigned_slot()619 bool has_preassigned_slot() const { return has_preassigned_slot_; }
620
AddBlockRequiringSpillOperand(RpoNumber block_id)621 void AddBlockRequiringSpillOperand(RpoNumber block_id) {
622 DCHECK(IsSpilledOnlyInDeferredBlocks());
623 GetListOfBlocksRequiringSpillOperands()->Add(block_id.ToInt());
624 }
625
GetListOfBlocksRequiringSpillOperands()626 BitVector* GetListOfBlocksRequiringSpillOperands() const {
627 DCHECK(IsSpilledOnlyInDeferredBlocks());
628 return list_of_blocks_requiring_spill_operands_;
629 }
630
631 private:
632 void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
633
634 typedef BitField<bool, 1, 1> HasSlotUseField;
635 typedef BitField<bool, 2, 1> IsPhiField;
636 typedef BitField<bool, 3, 1> IsNonLoopPhiField;
637 typedef BitField<SpillType, 4, 2> SpillTypeField;
638
639 int vreg_;
640 int last_child_id_;
641 TopLevelLiveRange* splintered_from_;
642 union {
643 // Correct value determined by spill_type()
644 InstructionOperand* spill_operand_;
645 SpillRange* spill_range_;
646 };
647
648 union {
649 SpillMoveInsertionList* spill_move_insertion_locations_;
650 BitVector* list_of_blocks_requiring_spill_operands_;
651 };
652
653 // TODO(mtrofin): generalize spilling after definition, currently specialized
654 // just for spill in a single deferred block.
655 bool spilled_in_deferred_blocks_;
656 int spill_start_index_;
657 UsePosition* last_pos_;
658 TopLevelLiveRange* splinter_;
659 bool has_preassigned_slot_;
660
661 DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
662 };
663
664
665 struct PrintableLiveRange {
666 const RegisterConfiguration* register_configuration_;
667 const LiveRange* range_;
668 };
669
670
671 std::ostream& operator<<(std::ostream& os,
672 const PrintableLiveRange& printable_range);
673
674
675 class SpillRange final : public ZoneObject {
676 public:
677 static const int kUnassignedSlot = -1;
678 SpillRange(TopLevelLiveRange* range, Zone* zone);
679
interval()680 UseInterval* interval() const { return use_interval_; }
681
IsEmpty()682 bool IsEmpty() const { return live_ranges_.empty(); }
683 bool TryMerge(SpillRange* other);
HasSlot()684 bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; }
685
set_assigned_slot(int index)686 void set_assigned_slot(int index) {
687 DCHECK_EQ(kUnassignedSlot, assigned_slot_);
688 assigned_slot_ = index;
689 }
assigned_slot()690 int assigned_slot() {
691 DCHECK_NE(kUnassignedSlot, assigned_slot_);
692 return assigned_slot_;
693 }
live_ranges()694 const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
695 return live_ranges_;
696 }
live_ranges()697 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
698 // Spill slots can be 4, 8, or 16 bytes wide.
byte_width()699 int byte_width() const { return byte_width_; }
700 void Print() const;
701
702 private:
End()703 LifetimePosition End() const { return end_position_; }
704 bool IsIntersectingWith(SpillRange* other) const;
705 // Merge intervals, making sure the use intervals are sorted
706 void MergeDisjointIntervals(UseInterval* other);
707
708 ZoneVector<TopLevelLiveRange*> live_ranges_;
709 UseInterval* use_interval_;
710 LifetimePosition end_position_;
711 int assigned_slot_;
712 int byte_width_;
713
714 DISALLOW_COPY_AND_ASSIGN(SpillRange);
715 };
716
717
718 class RegisterAllocationData final : public ZoneObject {
719 public:
720 class PhiMapValue : public ZoneObject {
721 public:
722 PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
723
phi()724 const PhiInstruction* phi() const { return phi_; }
block()725 const InstructionBlock* block() const { return block_; }
726
727 // For hinting.
assigned_register()728 int assigned_register() const { return assigned_register_; }
set_assigned_register(int register_code)729 void set_assigned_register(int register_code) {
730 DCHECK_EQ(assigned_register_, kUnassignedRegister);
731 assigned_register_ = register_code;
732 }
UnsetAssignedRegister()733 void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
734
735 void AddOperand(InstructionOperand* operand);
736 void CommitAssignment(const InstructionOperand& operand);
737
738 private:
739 PhiInstruction* const phi_;
740 const InstructionBlock* const block_;
741 ZoneVector<InstructionOperand*> incoming_operands_;
742 int assigned_register_;
743 };
744 typedef ZoneMap<int, PhiMapValue*> PhiMap;
745
746 struct DelayedReference {
747 ReferenceMap* map;
748 InstructionOperand* operand;
749 };
750 typedef ZoneVector<DelayedReference> DelayedReferences;
751 typedef ZoneVector<std::pair<TopLevelLiveRange*, int>>
752 RangesWithPreassignedSlots;
753
754 RegisterAllocationData(const RegisterConfiguration* config,
755 Zone* allocation_zone, Frame* frame,
756 InstructionSequence* code,
757 const char* debug_name = nullptr);
758
live_ranges()759 const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
760 return live_ranges_;
761 }
live_ranges()762 ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
fixed_live_ranges()763 const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
764 return fixed_live_ranges_;
765 }
fixed_live_ranges()766 ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
767 return fixed_live_ranges_;
768 }
fixed_float_live_ranges()769 ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() {
770 return fixed_float_live_ranges_;
771 }
fixed_float_live_ranges()772 const ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() const {
773 return fixed_float_live_ranges_;
774 }
fixed_double_live_ranges()775 ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
776 return fixed_double_live_ranges_;
777 }
fixed_double_live_ranges()778 const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
779 return fixed_double_live_ranges_;
780 }
fixed_simd128_live_ranges()781 ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() {
782 return fixed_simd128_live_ranges_;
783 }
fixed_simd128_live_ranges()784 const ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() const {
785 return fixed_simd128_live_ranges_;
786 }
live_in_sets()787 ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
live_out_sets()788 ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; }
spill_ranges()789 ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
delayed_references()790 DelayedReferences& delayed_references() { return delayed_references_; }
code()791 InstructionSequence* code() const { return code_; }
792 // This zone is for data structures only needed during register allocation
793 // phases.
allocation_zone()794 Zone* allocation_zone() const { return allocation_zone_; }
795 // This zone is for InstructionOperands and moves that live beyond register
796 // allocation.
code_zone()797 Zone* code_zone() const { return code()->zone(); }
frame()798 Frame* frame() const { return frame_; }
debug_name()799 const char* debug_name() const { return debug_name_; }
config()800 const RegisterConfiguration* config() const { return config_; }
801
802 MachineRepresentation RepresentationFor(int virtual_register);
803
804 TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
805 // Creates a new live range.
806 TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep);
807 TopLevelLiveRange* NextLiveRange(MachineRepresentation rep);
808
809 SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range);
810 SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
811
812 MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
813 const InstructionOperand& from,
814 const InstructionOperand& to);
815
IsReference(TopLevelLiveRange * top_range)816 bool IsReference(TopLevelLiveRange* top_range) const {
817 return code()->IsReference(top_range->vreg());
818 }
819
820 bool ExistsUseWithoutDefinition();
821 bool RangesDefinedInDeferredStayInDeferred();
822
823 void MarkAllocated(MachineRepresentation rep, int index);
824
825 PhiMapValue* InitializePhiMap(const InstructionBlock* block,
826 PhiInstruction* phi);
827 PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
828 PhiMapValue* GetPhiMapValueFor(int virtual_register);
829 bool IsBlockBoundary(LifetimePosition pos) const;
830
preassigned_slot_ranges()831 RangesWithPreassignedSlots& preassigned_slot_ranges() {
832 return preassigned_slot_ranges_;
833 }
834
835 private:
836 int GetNextLiveRangeId();
837
838 Zone* const allocation_zone_;
839 Frame* const frame_;
840 InstructionSequence* const code_;
841 const char* const debug_name_;
842 const RegisterConfiguration* const config_;
843 PhiMap phi_map_;
844 ZoneVector<BitVector*> live_in_sets_;
845 ZoneVector<BitVector*> live_out_sets_;
846 ZoneVector<TopLevelLiveRange*> live_ranges_;
847 ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
848 ZoneVector<TopLevelLiveRange*> fixed_float_live_ranges_;
849 ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
850 ZoneVector<TopLevelLiveRange*> fixed_simd128_live_ranges_;
851 ZoneVector<SpillRange*> spill_ranges_;
852 DelayedReferences delayed_references_;
853 BitVector* assigned_registers_;
854 BitVector* assigned_double_registers_;
855 int virtual_register_count_;
856 RangesWithPreassignedSlots preassigned_slot_ranges_;
857
858 DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData);
859 };
860
861
862 class ConstraintBuilder final : public ZoneObject {
863 public:
864 explicit ConstraintBuilder(RegisterAllocationData* data);
865
866 // Phase 1 : insert moves to account for fixed register operands.
867 void MeetRegisterConstraints();
868
869 // Phase 2: deconstruct SSA by inserting moves in successors and the headers
870 // of blocks containing phis.
871 void ResolvePhis();
872
873 private:
data()874 RegisterAllocationData* data() const { return data_; }
code()875 InstructionSequence* code() const { return data()->code(); }
allocation_zone()876 Zone* allocation_zone() const { return data()->allocation_zone(); }
877
878 InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
879 bool is_tagged);
880 void MeetRegisterConstraints(const InstructionBlock* block);
881 void MeetConstraintsBefore(int index);
882 void MeetConstraintsAfter(int index);
883 void MeetRegisterConstraintsForLastInstructionInBlock(
884 const InstructionBlock* block);
885 void ResolvePhis(const InstructionBlock* block);
886
887 RegisterAllocationData* const data_;
888
889 DISALLOW_COPY_AND_ASSIGN(ConstraintBuilder);
890 };
891
892
893 class LiveRangeBuilder final : public ZoneObject {
894 public:
895 explicit LiveRangeBuilder(RegisterAllocationData* data, Zone* local_zone);
896
897 // Phase 3: compute liveness of all virtual register.
898 void BuildLiveRanges();
899 static BitVector* ComputeLiveOut(const InstructionBlock* block,
900 RegisterAllocationData* data);
901
902 private:
data()903 RegisterAllocationData* data() const { return data_; }
code()904 InstructionSequence* code() const { return data()->code(); }
allocation_zone()905 Zone* allocation_zone() const { return data()->allocation_zone(); }
code_zone()906 Zone* code_zone() const { return code()->zone(); }
config()907 const RegisterConfiguration* config() const { return data()->config(); }
live_in_sets()908 ZoneVector<BitVector*>& live_in_sets() const {
909 return data()->live_in_sets();
910 }
911
912 // Verification.
913 void Verify() const;
914 bool IntervalStartsAtBlockBoundary(const UseInterval* interval) const;
915 bool IntervalPredecessorsCoveredByRange(const UseInterval* interval,
916 const TopLevelLiveRange* range) const;
917 bool NextIntervalStartsInDifferentBlocks(const UseInterval* interval) const;
918
919 // Liveness analysis support.
920 void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
921 void ProcessInstructions(const InstructionBlock* block, BitVector* live);
922 void ProcessPhis(const InstructionBlock* block, BitVector* live);
923 void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
924
FixedLiveRangeID(int index)925 static int FixedLiveRangeID(int index) { return -index - 1; }
926 int FixedFPLiveRangeID(int index, MachineRepresentation rep);
927 TopLevelLiveRange* FixedLiveRangeFor(int index);
928 TopLevelLiveRange* FixedFPLiveRangeFor(int index, MachineRepresentation rep);
929
930 void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
931 void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
932
933 UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
934 void* hint, UsePositionHintType hint_type);
NewUsePosition(LifetimePosition pos)935 UsePosition* NewUsePosition(LifetimePosition pos) {
936 return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
937 }
938 TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand);
939 // Helper methods for building intervals.
940 UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
941 void* hint, UsePositionHintType hint_type);
Define(LifetimePosition position,InstructionOperand * operand)942 void Define(LifetimePosition position, InstructionOperand* operand) {
943 Define(position, operand, nullptr, UsePositionHintType::kNone);
944 }
945 UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
946 InstructionOperand* operand, void* hint,
947 UsePositionHintType hint_type);
Use(LifetimePosition block_start,LifetimePosition position,InstructionOperand * operand)948 void Use(LifetimePosition block_start, LifetimePosition position,
949 InstructionOperand* operand) {
950 Use(block_start, position, operand, nullptr, UsePositionHintType::kNone);
951 }
952
953 RegisterAllocationData* const data_;
954 ZoneMap<InstructionOperand*, UsePosition*> phi_hints_;
955
956 DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder);
957 };
958
959
960 class RegisterAllocator : public ZoneObject {
961 public:
962 RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
963
964 protected:
data()965 RegisterAllocationData* data() const { return data_; }
code()966 InstructionSequence* code() const { return data()->code(); }
mode()967 RegisterKind mode() const { return mode_; }
num_registers()968 int num_registers() const { return num_registers_; }
num_allocatable_registers()969 int num_allocatable_registers() const { return num_allocatable_registers_; }
allocatable_register_codes()970 const int* allocatable_register_codes() const {
971 return allocatable_register_codes_;
972 }
973 // Returns true iff. we must check float register aliasing.
check_fp_aliasing()974 bool check_fp_aliasing() const { return check_fp_aliasing_; }
975
976 // TODO(mtrofin): explain why splitting in gap START is always OK.
977 LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
978 int instruction_index);
979
allocation_zone()980 Zone* allocation_zone() const { return data()->allocation_zone(); }
981
982 // Find the optimal split for ranges defined by a memory operand, e.g.
983 // constants or function parameters passed on the stack.
984 void SplitAndSpillRangesDefinedByMemoryOperand();
985
986 // Split the given range at the given position.
987 // If range starts at or after the given position then the
988 // original range is returned.
989 // Otherwise returns the live range that starts at pos and contains
990 // all uses from the original range that follow pos. Uses at pos will
991 // still be owned by the original range after splitting.
992 LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
993
CanProcessRange(LiveRange * range)994 bool CanProcessRange(LiveRange* range) const {
995 return range != nullptr && !range->IsEmpty() && range->kind() == mode();
996 }
997
998
999 // Split the given range in a position from the interval [start, end].
1000 LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
1001 LifetimePosition end);
1002
1003 // Find a lifetime position in the interval [start, end] which
1004 // is optimal for splitting: it is either header of the outermost
1005 // loop covered by this interval or the latest possible position.
1006 LifetimePosition FindOptimalSplitPos(LifetimePosition start,
1007 LifetimePosition end);
1008
1009 void Spill(LiveRange* range);
1010
1011 // If we are trying to spill a range inside the loop try to
1012 // hoist spill position out to the point just before the loop.
1013 LifetimePosition FindOptimalSpillingPos(LiveRange* range,
1014 LifetimePosition pos);
1015
1016 const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
1017 const char* RegisterName(int allocation_index) const;
1018
1019 private:
1020 RegisterAllocationData* const data_;
1021 const RegisterKind mode_;
1022 const int num_registers_;
1023 int num_allocatable_registers_;
1024 const int* allocatable_register_codes_;
1025 bool check_fp_aliasing_;
1026
1027 private:
1028 bool no_combining_;
1029
1030 DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
1031 };
1032
1033
1034 class LinearScanAllocator final : public RegisterAllocator {
1035 public:
1036 LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind,
1037 Zone* local_zone);
1038
1039 // Phase 4: compute register assignments.
1040 void AllocateRegisters();
1041
1042 private:
1043 struct LiveRangeOrdering {
operatorLiveRangeOrdering1044 bool operator()(LiveRange* a, LiveRange* b) {
1045 return a->ShouldBeAllocatedBefore(b);
1046 }
1047 };
1048 using LiveRangeQueue = ZoneMultiset<LiveRange*, LiveRangeOrdering>;
unhandled_live_ranges()1049 LiveRangeQueue& unhandled_live_ranges() { return unhandled_live_ranges_; }
active_live_ranges()1050 ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
inactive_live_ranges()1051 ZoneVector<LiveRange*>& inactive_live_ranges() {
1052 return inactive_live_ranges_;
1053 }
1054
1055 void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
1056
1057 // Helper methods for updating the life range lists.
1058 void AddToActive(LiveRange* range);
1059 void AddToInactive(LiveRange* range);
1060 void AddToUnhandled(LiveRange* range);
1061 void ActiveToHandled(LiveRange* range);
1062 void ActiveToInactive(LiveRange* range);
1063 void InactiveToHandled(LiveRange* range);
1064 void InactiveToActive(LiveRange* range);
1065
1066 // Helper methods for allocating registers.
1067 bool TryReuseSpillForPhi(TopLevelLiveRange* range);
1068 bool TryAllocateFreeReg(LiveRange* range,
1069 const Vector<LifetimePosition>& free_until_pos);
1070 bool TryAllocatePreferredReg(LiveRange* range,
1071 const Vector<LifetimePosition>& free_until_pos);
1072 void GetFPRegisterSet(MachineRepresentation rep, int* num_regs,
1073 int* num_codes, const int** codes) const;
1074 void FindFreeRegistersForRange(LiveRange* range,
1075 Vector<LifetimePosition> free_until_pos);
1076 void ProcessCurrentRange(LiveRange* current);
1077 void AllocateBlockedReg(LiveRange* range);
1078 bool TrySplitAndSpillSplinter(LiveRange* range);
1079
1080 // Spill the given life range after position pos.
1081 void SpillAfter(LiveRange* range, LifetimePosition pos);
1082
1083 // Spill the given life range after position [start] and up to position [end].
1084 void SpillBetween(LiveRange* range, LifetimePosition start,
1085 LifetimePosition end);
1086
1087 // Spill the given life range after position [start] and up to position [end].
1088 // Range is guaranteed to be spilled at least until position [until].
1089 void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
1090 LifetimePosition until, LifetimePosition end);
1091
1092 void SplitAndSpillIntersecting(LiveRange* range);
1093
1094 LiveRangeQueue unhandled_live_ranges_;
1095 ZoneVector<LiveRange*> active_live_ranges_;
1096 ZoneVector<LiveRange*> inactive_live_ranges_;
1097
1098 #ifdef DEBUG
1099 LifetimePosition allocation_finger_;
1100 #endif
1101
1102 DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator);
1103 };
1104
1105
1106 class SpillSlotLocator final : public ZoneObject {
1107 public:
1108 explicit SpillSlotLocator(RegisterAllocationData* data);
1109
1110 void LocateSpillSlots();
1111
1112 private:
data()1113 RegisterAllocationData* data() const { return data_; }
1114
1115 RegisterAllocationData* const data_;
1116
1117 DISALLOW_COPY_AND_ASSIGN(SpillSlotLocator);
1118 };
1119
1120
1121 class OperandAssigner final : public ZoneObject {
1122 public:
1123 explicit OperandAssigner(RegisterAllocationData* data);
1124
1125 // Phase 5: assign spill splots.
1126 void AssignSpillSlots();
1127
1128 // Phase 6: commit assignment.
1129 void CommitAssignment();
1130
1131 private:
data()1132 RegisterAllocationData* data() const { return data_; }
1133
1134 RegisterAllocationData* const data_;
1135
1136 DISALLOW_COPY_AND_ASSIGN(OperandAssigner);
1137 };
1138
1139
1140 class ReferenceMapPopulator final : public ZoneObject {
1141 public:
1142 explicit ReferenceMapPopulator(RegisterAllocationData* data);
1143
1144 // Phase 7: compute values for pointer maps.
1145 void PopulateReferenceMaps();
1146
1147 private:
data()1148 RegisterAllocationData* data() const { return data_; }
1149
1150 bool SafePointsAreInOrder() const;
1151
1152 RegisterAllocationData* const data_;
1153
1154 DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator);
1155 };
1156
1157
1158 class LiveRangeBoundArray;
1159 // Insert moves of the form
1160 //
1161 // Operand(child_(k+1)) = Operand(child_k)
1162 //
1163 // where child_k and child_(k+1) are consecutive children of a range (so
1164 // child_k->next() == child_(k+1)), and Operand(...) refers to the
1165 // assigned operand, be it a register or a slot.
1166 class LiveRangeConnector final : public ZoneObject {
1167 public:
1168 explicit LiveRangeConnector(RegisterAllocationData* data);
1169
1170 // Phase 8: reconnect split ranges with moves, when the control flow
1171 // between the ranges is trivial (no branches).
1172 void ConnectRanges(Zone* local_zone);
1173
1174 // Phase 9: insert moves to connect ranges across basic blocks, when the
1175 // control flow between them cannot be trivially resolved, such as joining
1176 // branches.
1177 void ResolveControlFlow(Zone* local_zone);
1178
1179 private:
data()1180 RegisterAllocationData* data() const { return data_; }
code()1181 InstructionSequence* code() const { return data()->code(); }
code_zone()1182 Zone* code_zone() const { return code()->zone(); }
1183
1184 bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
1185
1186 int ResolveControlFlow(const InstructionBlock* block,
1187 const InstructionOperand& cur_op,
1188 const InstructionBlock* pred,
1189 const InstructionOperand& pred_op);
1190
1191 void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range,
1192 LiveRangeBoundArray* array,
1193 Zone* temp_zone);
1194
1195 RegisterAllocationData* const data_;
1196
1197 DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
1198 };
1199
1200 } // namespace compiler
1201 } // namespace internal
1202 } // namespace v8
1203
1204 #endif // V8_COMPILER_REGISTER_ALLOCATOR_H_
1205