• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 #include "src/lithium-allocator-inl.h"
7 
8 #include "src/hydrogen.h"
9 #include "src/string-stream.h"
10 
11 #if V8_TARGET_ARCH_IA32
12 #include "src/ia32/lithium-ia32.h"
13 #elif V8_TARGET_ARCH_X64
14 #include "src/x64/lithium-x64.h"
15 #elif V8_TARGET_ARCH_ARM64
16 #include "src/arm64/lithium-arm64.h"
17 #elif V8_TARGET_ARCH_ARM
18 #include "src/arm/lithium-arm.h"
19 #elif V8_TARGET_ARCH_MIPS
20 #include "src/mips/lithium-mips.h"
21 #elif V8_TARGET_ARCH_X87
22 #include "src/x87/lithium-x87.h"
23 #else
24 #error "Unknown architecture."
25 #endif
26 
27 namespace v8 {
28 namespace internal {
29 
Min(LifetimePosition a,LifetimePosition b)30 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
31   return a.Value() < b.Value() ? a : b;
32 }
33 
34 
Max(LifetimePosition a,LifetimePosition b)35 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
36   return a.Value() > b.Value() ? a : b;
37 }
38 
39 
UsePosition(LifetimePosition pos,LOperand * operand,LOperand * hint)40 UsePosition::UsePosition(LifetimePosition pos,
41                          LOperand* operand,
42                          LOperand* hint)
43     : operand_(operand),
44       hint_(hint),
45       pos_(pos),
46       next_(NULL),
47       requires_reg_(false),
48       register_beneficial_(true) {
49   if (operand_ != NULL && operand_->IsUnallocated()) {
50     LUnallocated* unalloc = LUnallocated::cast(operand_);
51     requires_reg_ = unalloc->HasRegisterPolicy() ||
52         unalloc->HasDoubleRegisterPolicy();
53     register_beneficial_ = !unalloc->HasAnyPolicy();
54   }
55   ASSERT(pos_.IsValid());
56 }
57 
58 
HasHint() const59 bool UsePosition::HasHint() const {
60   return hint_ != NULL && !hint_->IsUnallocated();
61 }
62 
63 
RequiresRegister() const64 bool UsePosition::RequiresRegister() const {
65   return requires_reg_;
66 }
67 
68 
RegisterIsBeneficial() const69 bool UsePosition::RegisterIsBeneficial() const {
70   return register_beneficial_;
71 }
72 
73 
SplitAt(LifetimePosition pos,Zone * zone)74 void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
75   ASSERT(Contains(pos) && pos.Value() != start().Value());
76   UseInterval* after = new(zone) UseInterval(pos, end_);
77   after->next_ = next_;
78   next_ = after;
79   end_ = pos;
80 }
81 
82 
83 #ifdef DEBUG
84 
85 
Verify() const86 void LiveRange::Verify() const {
87   UsePosition* cur = first_pos_;
88   while (cur != NULL) {
89     ASSERT(Start().Value() <= cur->pos().Value() &&
90            cur->pos().Value() <= End().Value());
91     cur = cur->next();
92   }
93 }
94 
95 
HasOverlap(UseInterval * target) const96 bool LiveRange::HasOverlap(UseInterval* target) const {
97   UseInterval* current_interval = first_interval_;
98   while (current_interval != NULL) {
99     // Intervals overlap if the start of one is contained in the other.
100     if (current_interval->Contains(target->start()) ||
101         target->Contains(current_interval->start())) {
102       return true;
103     }
104     current_interval = current_interval->next();
105   }
106   return false;
107 }
108 
109 
110 #endif
111 
112 
LiveRange(int id,Zone * zone)113 LiveRange::LiveRange(int id, Zone* zone)
114     : id_(id),
115       spilled_(false),
116       kind_(UNALLOCATED_REGISTERS),
117       assigned_register_(kInvalidAssignment),
118       last_interval_(NULL),
119       first_interval_(NULL),
120       first_pos_(NULL),
121       parent_(NULL),
122       next_(NULL),
123       current_interval_(NULL),
124       last_processed_use_(NULL),
125       current_hint_operand_(NULL),
126       spill_operand_(new(zone) LOperand()),
127       spill_start_index_(kMaxInt) { }
128 
129 
set_assigned_register(int reg,Zone * zone)130 void LiveRange::set_assigned_register(int reg, Zone* zone) {
131   ASSERT(!HasRegisterAssigned() && !IsSpilled());
132   assigned_register_ = reg;
133   ConvertOperands(zone);
134 }
135 
136 
MakeSpilled(Zone * zone)137 void LiveRange::MakeSpilled(Zone* zone) {
138   ASSERT(!IsSpilled());
139   ASSERT(TopLevel()->HasAllocatedSpillOperand());
140   spilled_ = true;
141   assigned_register_ = kInvalidAssignment;
142   ConvertOperands(zone);
143 }
144 
145 
HasAllocatedSpillOperand() const146 bool LiveRange::HasAllocatedSpillOperand() const {
147   ASSERT(spill_operand_ != NULL);
148   return !spill_operand_->IsIgnored();
149 }
150 
151 
SetSpillOperand(LOperand * operand)152 void LiveRange::SetSpillOperand(LOperand* operand) {
153   ASSERT(!operand->IsUnallocated());
154   ASSERT(spill_operand_ != NULL);
155   ASSERT(spill_operand_->IsIgnored());
156   spill_operand_->ConvertTo(operand->kind(), operand->index());
157 }
158 
159 
NextUsePosition(LifetimePosition start)160 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
161   UsePosition* use_pos = last_processed_use_;
162   if (use_pos == NULL) use_pos = first_pos();
163   while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
164     use_pos = use_pos->next();
165   }
166   last_processed_use_ = use_pos;
167   return use_pos;
168 }
169 
170 
NextUsePositionRegisterIsBeneficial(LifetimePosition start)171 UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
172     LifetimePosition start) {
173   UsePosition* pos = NextUsePosition(start);
174   while (pos != NULL && !pos->RegisterIsBeneficial()) {
175     pos = pos->next();
176   }
177   return pos;
178 }
179 
180 
PreviousUsePositionRegisterIsBeneficial(LifetimePosition start)181 UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
182     LifetimePosition start) {
183   UsePosition* pos = first_pos();
184   UsePosition* prev = NULL;
185   while (pos != NULL && pos->pos().Value() < start.Value()) {
186     if (pos->RegisterIsBeneficial()) prev = pos;
187     pos = pos->next();
188   }
189   return prev;
190 }
191 
192 
NextRegisterPosition(LifetimePosition start)193 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
194   UsePosition* pos = NextUsePosition(start);
195   while (pos != NULL && !pos->RequiresRegister()) {
196     pos = pos->next();
197   }
198   return pos;
199 }
200 
201 
CanBeSpilled(LifetimePosition pos)202 bool LiveRange::CanBeSpilled(LifetimePosition pos) {
203   // We cannot spill a live range that has a use requiring a register
204   // at the current or the immediate next position.
205   UsePosition* use_pos = NextRegisterPosition(pos);
206   if (use_pos == NULL) return true;
207   return
208       use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value();
209 }
210 
211 
CreateAssignedOperand(Zone * zone)212 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
213   LOperand* op = NULL;
214   if (HasRegisterAssigned()) {
215     ASSERT(!IsSpilled());
216     switch (Kind()) {
217       case GENERAL_REGISTERS:
218         op = LRegister::Create(assigned_register(), zone);
219         break;
220       case DOUBLE_REGISTERS:
221         op = LDoubleRegister::Create(assigned_register(), zone);
222         break;
223       default:
224         UNREACHABLE();
225     }
226   } else if (IsSpilled()) {
227     ASSERT(!HasRegisterAssigned());
228     op = TopLevel()->GetSpillOperand();
229     ASSERT(!op->IsUnallocated());
230   } else {
231     LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE);
232     unalloc->set_virtual_register(id_);
233     op = unalloc;
234   }
235   return op;
236 }
237 
238 
FirstSearchIntervalForPosition(LifetimePosition position) const239 UseInterval* LiveRange::FirstSearchIntervalForPosition(
240     LifetimePosition position) const {
241   if (current_interval_ == NULL) return first_interval_;
242   if (current_interval_->start().Value() > position.Value()) {
243     current_interval_ = NULL;
244     return first_interval_;
245   }
246   return current_interval_;
247 }
248 
249 
AdvanceLastProcessedMarker(UseInterval * to_start_of,LifetimePosition but_not_past) const250 void LiveRange::AdvanceLastProcessedMarker(
251     UseInterval* to_start_of, LifetimePosition but_not_past) const {
252   if (to_start_of == NULL) return;
253   if (to_start_of->start().Value() > but_not_past.Value()) return;
254   LifetimePosition start =
255       current_interval_ == NULL ? LifetimePosition::Invalid()
256                                 : current_interval_->start();
257   if (to_start_of->start().Value() > start.Value()) {
258     current_interval_ = to_start_of;
259   }
260 }
261 
262 
SplitAt(LifetimePosition position,LiveRange * result,Zone * zone)263 void LiveRange::SplitAt(LifetimePosition position,
264                         LiveRange* result,
265                         Zone* zone) {
266   ASSERT(Start().Value() < position.Value());
267   ASSERT(result->IsEmpty());
268   // Find the last interval that ends before the position. If the
269   // position is contained in one of the intervals in the chain, we
270   // split that interval and use the first part.
271   UseInterval* current = FirstSearchIntervalForPosition(position);
272 
273   // If the split position coincides with the beginning of a use interval
274   // we need to split use positons in a special way.
275   bool split_at_start = false;
276 
277   if (current->start().Value() == position.Value()) {
278     // When splitting at start we need to locate the previous use interval.
279     current = first_interval_;
280   }
281 
282   while (current != NULL) {
283     if (current->Contains(position)) {
284       current->SplitAt(position, zone);
285       break;
286     }
287     UseInterval* next = current->next();
288     if (next->start().Value() >= position.Value()) {
289       split_at_start = (next->start().Value() == position.Value());
290       break;
291     }
292     current = next;
293   }
294 
295   // Partition original use intervals to the two live ranges.
296   UseInterval* before = current;
297   UseInterval* after = before->next();
298   result->last_interval_ = (last_interval_ == before)
299       ? after            // Only interval in the range after split.
300       : last_interval_;  // Last interval of the original range.
301   result->first_interval_ = after;
302   last_interval_ = before;
303 
304   // Find the last use position before the split and the first use
305   // position after it.
306   UsePosition* use_after = first_pos_;
307   UsePosition* use_before = NULL;
308   if (split_at_start) {
309     // The split position coincides with the beginning of a use interval (the
310     // end of a lifetime hole). Use at this position should be attributed to
311     // the split child because split child owns use interval covering it.
312     while (use_after != NULL && use_after->pos().Value() < position.Value()) {
313       use_before = use_after;
314       use_after = use_after->next();
315     }
316   } else {
317     while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
318       use_before = use_after;
319       use_after = use_after->next();
320     }
321   }
322 
323   // Partition original use positions to the two live ranges.
324   if (use_before != NULL) {
325     use_before->next_ = NULL;
326   } else {
327     first_pos_ = NULL;
328   }
329   result->first_pos_ = use_after;
330 
331   // Discard cached iteration state. It might be pointing
332   // to the use that no longer belongs to this live range.
333   last_processed_use_ = NULL;
334   current_interval_ = NULL;
335 
336   // Link the new live range in the chain before any of the other
337   // ranges linked from the range before the split.
338   result->parent_ = (parent_ == NULL) ? this : parent_;
339   result->kind_ = result->parent_->kind_;
340   result->next_ = next_;
341   next_ = result;
342 
343 #ifdef DEBUG
344   Verify();
345   result->Verify();
346 #endif
347 }
348 
349 
350 // This implements an ordering on live ranges so that they are ordered by their
351 // start positions.  This is needed for the correctness of the register
352 // allocation algorithm.  If two live ranges start at the same offset then there
353 // is a tie breaker based on where the value is first used.  This part of the
354 // ordering is merely a heuristic.
ShouldBeAllocatedBefore(const LiveRange * other) const355 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
356   LifetimePosition start = Start();
357   LifetimePosition other_start = other->Start();
358   if (start.Value() == other_start.Value()) {
359     UsePosition* pos = first_pos();
360     if (pos == NULL) return false;
361     UsePosition* other_pos = other->first_pos();
362     if (other_pos == NULL) return true;
363     return pos->pos().Value() < other_pos->pos().Value();
364   }
365   return start.Value() < other_start.Value();
366 }
367 
368 
ShortenTo(LifetimePosition start)369 void LiveRange::ShortenTo(LifetimePosition start) {
370   LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
371   ASSERT(first_interval_ != NULL);
372   ASSERT(first_interval_->start().Value() <= start.Value());
373   ASSERT(start.Value() < first_interval_->end().Value());
374   first_interval_->set_start(start);
375 }
376 
377 
EnsureInterval(LifetimePosition start,LifetimePosition end,Zone * zone)378 void LiveRange::EnsureInterval(LifetimePosition start,
379                                LifetimePosition end,
380                                Zone* zone) {
381   LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
382                          id_,
383                          start.Value(),
384                          end.Value());
385   LifetimePosition new_end = end;
386   while (first_interval_ != NULL &&
387          first_interval_->start().Value() <= end.Value()) {
388     if (first_interval_->end().Value() > end.Value()) {
389       new_end = first_interval_->end();
390     }
391     first_interval_ = first_interval_->next();
392   }
393 
394   UseInterval* new_interval = new(zone) UseInterval(start, new_end);
395   new_interval->next_ = first_interval_;
396   first_interval_ = new_interval;
397   if (new_interval->next() == NULL) {
398     last_interval_ = new_interval;
399   }
400 }
401 
402 
AddUseInterval(LifetimePosition start,LifetimePosition end,Zone * zone)403 void LiveRange::AddUseInterval(LifetimePosition start,
404                                LifetimePosition end,
405                                Zone* zone) {
406   LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
407                          id_,
408                          start.Value(),
409                          end.Value());
410   if (first_interval_ == NULL) {
411     UseInterval* interval = new(zone) UseInterval(start, end);
412     first_interval_ = interval;
413     last_interval_ = interval;
414   } else {
415     if (end.Value() == first_interval_->start().Value()) {
416       first_interval_->set_start(start);
417     } else if (end.Value() < first_interval_->start().Value()) {
418       UseInterval* interval = new(zone) UseInterval(start, end);
419       interval->set_next(first_interval_);
420       first_interval_ = interval;
421     } else {
422       // Order of instruction's processing (see ProcessInstructions) guarantees
423       // that each new use interval either precedes or intersects with
424       // last added interval.
425       ASSERT(start.Value() < first_interval_->end().Value());
426       first_interval_->start_ = Min(start, first_interval_->start_);
427       first_interval_->end_ = Max(end, first_interval_->end_);
428     }
429   }
430 }
431 
432 
AddUsePosition(LifetimePosition pos,LOperand * operand,LOperand * hint,Zone * zone)433 void LiveRange::AddUsePosition(LifetimePosition pos,
434                                LOperand* operand,
435                                LOperand* hint,
436                                Zone* zone) {
437   LAllocator::TraceAlloc("Add to live range %d use position %d\n",
438                          id_,
439                          pos.Value());
440   UsePosition* use_pos = new(zone) UsePosition(pos, operand, hint);
441   UsePosition* prev_hint = NULL;
442   UsePosition* prev = NULL;
443   UsePosition* current = first_pos_;
444   while (current != NULL && current->pos().Value() < pos.Value()) {
445     prev_hint = current->HasHint() ? current : prev_hint;
446     prev = current;
447     current = current->next();
448   }
449 
450   if (prev == NULL) {
451     use_pos->set_next(first_pos_);
452     first_pos_ = use_pos;
453   } else {
454     use_pos->next_ = prev->next_;
455     prev->next_ = use_pos;
456   }
457 
458   if (prev_hint == NULL && use_pos->HasHint()) {
459     current_hint_operand_ = hint;
460   }
461 }
462 
463 
ConvertOperands(Zone * zone)464 void LiveRange::ConvertOperands(Zone* zone) {
465   LOperand* op = CreateAssignedOperand(zone);
466   UsePosition* use_pos = first_pos();
467   while (use_pos != NULL) {
468     ASSERT(Start().Value() <= use_pos->pos().Value() &&
469            use_pos->pos().Value() <= End().Value());
470 
471     if (use_pos->HasOperand()) {
472       ASSERT(op->IsRegister() || op->IsDoubleRegister() ||
473              !use_pos->RequiresRegister());
474       use_pos->operand()->ConvertTo(op->kind(), op->index());
475     }
476     use_pos = use_pos->next();
477   }
478 }
479 
480 
CanCover(LifetimePosition position) const481 bool LiveRange::CanCover(LifetimePosition position) const {
482   if (IsEmpty()) return false;
483   return Start().Value() <= position.Value() &&
484          position.Value() < End().Value();
485 }
486 
487 
Covers(LifetimePosition position)488 bool LiveRange::Covers(LifetimePosition position) {
489   if (!CanCover(position)) return false;
490   UseInterval* start_search = FirstSearchIntervalForPosition(position);
491   for (UseInterval* interval = start_search;
492        interval != NULL;
493        interval = interval->next()) {
494     ASSERT(interval->next() == NULL ||
495            interval->next()->start().Value() >= interval->start().Value());
496     AdvanceLastProcessedMarker(interval, position);
497     if (interval->Contains(position)) return true;
498     if (interval->start().Value() > position.Value()) return false;
499   }
500   return false;
501 }
502 
503 
FirstIntersection(LiveRange * other)504 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
505   UseInterval* b = other->first_interval();
506   if (b == NULL) return LifetimePosition::Invalid();
507   LifetimePosition advance_last_processed_up_to = b->start();
508   UseInterval* a = FirstSearchIntervalForPosition(b->start());
509   while (a != NULL && b != NULL) {
510     if (a->start().Value() > other->End().Value()) break;
511     if (b->start().Value() > End().Value()) break;
512     LifetimePosition cur_intersection = a->Intersect(b);
513     if (cur_intersection.IsValid()) {
514       return cur_intersection;
515     }
516     if (a->start().Value() < b->start().Value()) {
517       a = a->next();
518       if (a == NULL || a->start().Value() > other->End().Value()) break;
519       AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
520     } else {
521       b = b->next();
522     }
523   }
524   return LifetimePosition::Invalid();
525 }
526 
527 
LAllocator(int num_values,HGraph * graph)528 LAllocator::LAllocator(int num_values, HGraph* graph)
529     : zone_(graph->isolate()),
530       chunk_(NULL),
531       live_in_sets_(graph->blocks()->length(), zone()),
532       live_ranges_(num_values * 2, zone()),
533       fixed_live_ranges_(NULL),
534       fixed_double_live_ranges_(NULL),
535       unhandled_live_ranges_(num_values * 2, zone()),
536       active_live_ranges_(8, zone()),
537       inactive_live_ranges_(8, zone()),
538       reusable_slots_(8, zone()),
539       next_virtual_register_(num_values),
540       first_artificial_register_(num_values),
541       mode_(UNALLOCATED_REGISTERS),
542       num_registers_(-1),
543       graph_(graph),
544       has_osr_entry_(false),
545       allocation_ok_(true) { }
546 
547 
InitializeLivenessAnalysis()548 void LAllocator::InitializeLivenessAnalysis() {
549   // Initialize the live_in sets for each block to NULL.
550   int block_count = graph_->blocks()->length();
551   live_in_sets_.Initialize(block_count, zone());
552   live_in_sets_.AddBlock(NULL, block_count, zone());
553 }
554 
555 
ComputeLiveOut(HBasicBlock * block)556 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
557   // Compute live out for the given block, except not including backward
558   // successor edges.
559   BitVector* live_out = new(zone()) BitVector(next_virtual_register_, zone());
560 
561   // Process all successor blocks.
562   for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
563     // Add values live on entry to the successor. Note the successor's
564     // live_in will not be computed yet for backwards edges.
565     HBasicBlock* successor = it.Current();
566     BitVector* live_in = live_in_sets_[successor->block_id()];
567     if (live_in != NULL) live_out->Union(*live_in);
568 
569     // All phi input operands corresponding to this successor edge are live
570     // out from this block.
571     int index = successor->PredecessorIndexOf(block);
572     const ZoneList<HPhi*>* phis = successor->phis();
573     for (int i = 0; i < phis->length(); ++i) {
574       HPhi* phi = phis->at(i);
575       if (!phi->OperandAt(index)->IsConstant()) {
576         live_out->Add(phi->OperandAt(index)->id());
577       }
578     }
579   }
580 
581   return live_out;
582 }
583 
584 
AddInitialIntervals(HBasicBlock * block,BitVector * live_out)585 void LAllocator::AddInitialIntervals(HBasicBlock* block,
586                                      BitVector* live_out) {
587   // Add an interval that includes the entire block to the live range for
588   // each live_out value.
589   LifetimePosition start = LifetimePosition::FromInstructionIndex(
590       block->first_instruction_index());
591   LifetimePosition end = LifetimePosition::FromInstructionIndex(
592       block->last_instruction_index()).NextInstruction();
593   BitVector::Iterator iterator(live_out);
594   while (!iterator.Done()) {
595     int operand_index = iterator.Current();
596     LiveRange* range = LiveRangeFor(operand_index);
597     range->AddUseInterval(start, end, zone());
598     iterator.Advance();
599   }
600 }
601 
602 
FixedDoubleLiveRangeID(int index)603 int LAllocator::FixedDoubleLiveRangeID(int index) {
604   return -index - 1 - Register::kMaxNumAllocatableRegisters;
605 }
606 
607 
AllocateFixed(LUnallocated * operand,int pos,bool is_tagged)608 LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
609                                     int pos,
610                                     bool is_tagged) {
611   TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
612   ASSERT(operand->HasFixedPolicy());
613   if (operand->HasFixedSlotPolicy()) {
614     operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_slot_index());
615   } else if (operand->HasFixedRegisterPolicy()) {
616     int reg_index = operand->fixed_register_index();
617     operand->ConvertTo(LOperand::REGISTER, reg_index);
618   } else if (operand->HasFixedDoubleRegisterPolicy()) {
619     int reg_index = operand->fixed_register_index();
620     operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
621   } else {
622     UNREACHABLE();
623   }
624   if (is_tagged) {
625     TraceAlloc("Fixed reg is tagged at %d\n", pos);
626     LInstruction* instr = InstructionAt(pos);
627     if (instr->HasPointerMap()) {
628       instr->pointer_map()->RecordPointer(operand, chunk()->zone());
629     }
630   }
631   return operand;
632 }
633 
634 
FixedLiveRangeFor(int index)635 LiveRange* LAllocator::FixedLiveRangeFor(int index) {
636   ASSERT(index < Register::kMaxNumAllocatableRegisters);
637   LiveRange* result = fixed_live_ranges_[index];
638   if (result == NULL) {
639     result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone());
640     ASSERT(result->IsFixed());
641     result->kind_ = GENERAL_REGISTERS;
642     SetLiveRangeAssignedRegister(result, index);
643     fixed_live_ranges_[index] = result;
644   }
645   return result;
646 }
647 
648 
FixedDoubleLiveRangeFor(int index)649 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
650   ASSERT(index < DoubleRegister::NumAllocatableRegisters());
651   LiveRange* result = fixed_double_live_ranges_[index];
652   if (result == NULL) {
653     result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index),
654                                    chunk()->zone());
655     ASSERT(result->IsFixed());
656     result->kind_ = DOUBLE_REGISTERS;
657     SetLiveRangeAssignedRegister(result, index);
658     fixed_double_live_ranges_[index] = result;
659   }
660   return result;
661 }
662 
663 
LiveRangeFor(int index)664 LiveRange* LAllocator::LiveRangeFor(int index) {
665   if (index >= live_ranges_.length()) {
666     live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
667   }
668   LiveRange* result = live_ranges_[index];
669   if (result == NULL) {
670     result = new(zone()) LiveRange(index, chunk()->zone());
671     live_ranges_[index] = result;
672   }
673   return result;
674 }
675 
676 
GetLastGap(HBasicBlock * block)677 LGap* LAllocator::GetLastGap(HBasicBlock* block) {
678   int last_instruction = block->last_instruction_index();
679   int index = chunk_->NearestGapPos(last_instruction);
680   return GapAt(index);
681 }
682 
683 
LookupPhi(LOperand * operand) const684 HPhi* LAllocator::LookupPhi(LOperand* operand) const {
685   if (!operand->IsUnallocated()) return NULL;
686   int index = LUnallocated::cast(operand)->virtual_register();
687   HValue* instr = graph_->LookupValue(index);
688   if (instr != NULL && instr->IsPhi()) {
689     return HPhi::cast(instr);
690   }
691   return NULL;
692 }
693 
694 
LiveRangeFor(LOperand * operand)695 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
696   if (operand->IsUnallocated()) {
697     return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
698   } else if (operand->IsRegister()) {
699     return FixedLiveRangeFor(operand->index());
700   } else if (operand->IsDoubleRegister()) {
701     return FixedDoubleLiveRangeFor(operand->index());
702   } else {
703     return NULL;
704   }
705 }
706 
707 
Define(LifetimePosition position,LOperand * operand,LOperand * hint)708 void LAllocator::Define(LifetimePosition position,
709                         LOperand* operand,
710                         LOperand* hint) {
711   LiveRange* range = LiveRangeFor(operand);
712   if (range == NULL) return;
713 
714   if (range->IsEmpty() || range->Start().Value() > position.Value()) {
715     // Can happen if there is a definition without use.
716     range->AddUseInterval(position, position.NextInstruction(), zone());
717     range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone());
718   } else {
719     range->ShortenTo(position);
720   }
721 
722   if (operand->IsUnallocated()) {
723     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
724     range->AddUsePosition(position, unalloc_operand, hint, zone());
725   }
726 }
727 
728 
Use(LifetimePosition block_start,LifetimePosition position,LOperand * operand,LOperand * hint)729 void LAllocator::Use(LifetimePosition block_start,
730                      LifetimePosition position,
731                      LOperand* operand,
732                      LOperand* hint) {
733   LiveRange* range = LiveRangeFor(operand);
734   if (range == NULL) return;
735   if (operand->IsUnallocated()) {
736     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
737     range->AddUsePosition(position, unalloc_operand, hint, zone());
738   }
739   range->AddUseInterval(block_start, position, zone());
740 }
741 
742 
AddConstraintsGapMove(int index,LOperand * from,LOperand * to)743 void LAllocator::AddConstraintsGapMove(int index,
744                                        LOperand* from,
745                                        LOperand* to) {
746   LGap* gap = GapAt(index);
747   LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
748                                                      chunk()->zone());
749   if (from->IsUnallocated()) {
750     const ZoneList<LMoveOperands>* move_operands = move->move_operands();
751     for (int i = 0; i < move_operands->length(); ++i) {
752       LMoveOperands cur = move_operands->at(i);
753       LOperand* cur_to = cur.destination();
754       if (cur_to->IsUnallocated()) {
755         if (LUnallocated::cast(cur_to)->virtual_register() ==
756             LUnallocated::cast(from)->virtual_register()) {
757           move->AddMove(cur.source(), to, chunk()->zone());
758           return;
759         }
760       }
761     }
762   }
763   move->AddMove(from, to, chunk()->zone());
764 }
765 
766 
MeetRegisterConstraints(HBasicBlock * block)767 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
768   int start = block->first_instruction_index();
769   int end = block->last_instruction_index();
770   if (start == -1) return;
771   for (int i = start; i <= end; ++i) {
772     if (IsGapAt(i)) {
773       LInstruction* instr = NULL;
774       LInstruction* prev_instr = NULL;
775       if (i < end) instr = InstructionAt(i + 1);
776       if (i > start) prev_instr = InstructionAt(i - 1);
777       MeetConstraintsBetween(prev_instr, instr, i);
778       if (!AllocationOk()) return;
779     }
780   }
781 }
782 
783 
MeetConstraintsBetween(LInstruction * first,LInstruction * second,int gap_index)784 void LAllocator::MeetConstraintsBetween(LInstruction* first,
785                                         LInstruction* second,
786                                         int gap_index) {
787   // Handle fixed temporaries.
788   if (first != NULL) {
789     for (TempIterator it(first); !it.Done(); it.Advance()) {
790       LUnallocated* temp = LUnallocated::cast(it.Current());
791       if (temp->HasFixedPolicy()) {
792         AllocateFixed(temp, gap_index - 1, false);
793       }
794     }
795   }
796 
797   // Handle fixed output operand.
798   if (first != NULL && first->Output() != NULL) {
799     LUnallocated* first_output = LUnallocated::cast(first->Output());
800     LiveRange* range = LiveRangeFor(first_output->virtual_register());
801     bool assigned = false;
802     if (first_output->HasFixedPolicy()) {
803       LUnallocated* output_copy = first_output->CopyUnconstrained(
804           chunk()->zone());
805       bool is_tagged = HasTaggedValue(first_output->virtual_register());
806       AllocateFixed(first_output, gap_index, is_tagged);
807 
808       // This value is produced on the stack, we never need to spill it.
809       if (first_output->IsStackSlot()) {
810         range->SetSpillOperand(first_output);
811         range->SetSpillStartIndex(gap_index - 1);
812         assigned = true;
813       }
814       chunk_->AddGapMove(gap_index, first_output, output_copy);
815     }
816 
817     if (!assigned) {
818       range->SetSpillStartIndex(gap_index);
819 
820       // This move to spill operand is not a real use. Liveness analysis
821       // and splitting of live ranges do not account for it.
822       // Thus it should be inserted to a lifetime position corresponding to
823       // the instruction end.
824       LGap* gap = GapAt(gap_index);
825       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE,
826                                                          chunk()->zone());
827       move->AddMove(first_output, range->GetSpillOperand(),
828                     chunk()->zone());
829     }
830   }
831 
832   // Handle fixed input operands of second instruction.
833   if (second != NULL) {
834     for (UseIterator it(second); !it.Done(); it.Advance()) {
835       LUnallocated* cur_input = LUnallocated::cast(it.Current());
836       if (cur_input->HasFixedPolicy()) {
837         LUnallocated* input_copy = cur_input->CopyUnconstrained(
838             chunk()->zone());
839         bool is_tagged = HasTaggedValue(cur_input->virtual_register());
840         AllocateFixed(cur_input, gap_index + 1, is_tagged);
841         AddConstraintsGapMove(gap_index, input_copy, cur_input);
842       } else if (cur_input->HasWritableRegisterPolicy()) {
843         // The live range of writable input registers always goes until the end
844         // of the instruction.
845         ASSERT(!cur_input->IsUsedAtStart());
846 
847         LUnallocated* input_copy = cur_input->CopyUnconstrained(
848             chunk()->zone());
849         int vreg = GetVirtualRegister();
850         if (!AllocationOk()) return;
851         cur_input->set_virtual_register(vreg);
852 
853         if (RequiredRegisterKind(input_copy->virtual_register()) ==
854             DOUBLE_REGISTERS) {
855           double_artificial_registers_.Add(
856               cur_input->virtual_register() - first_artificial_register_,
857               zone());
858         }
859 
860         AddConstraintsGapMove(gap_index, input_copy, cur_input);
861       }
862     }
863   }
864 
865   // Handle "output same as input" for second instruction.
866   if (second != NULL && second->Output() != NULL) {
867     LUnallocated* second_output = LUnallocated::cast(second->Output());
868     if (second_output->HasSameAsInputPolicy()) {
869       LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
870       int output_vreg = second_output->virtual_register();
871       int input_vreg = cur_input->virtual_register();
872 
873       LUnallocated* input_copy = cur_input->CopyUnconstrained(
874           chunk()->zone());
875       cur_input->set_virtual_register(second_output->virtual_register());
876       AddConstraintsGapMove(gap_index, input_copy, cur_input);
877 
878       if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
879         int index = gap_index + 1;
880         LInstruction* instr = InstructionAt(index);
881         if (instr->HasPointerMap()) {
882           instr->pointer_map()->RecordPointer(input_copy, chunk()->zone());
883         }
884       } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
885         // The input is assumed to immediately have a tagged representation,
886         // before the pointer map can be used. I.e. the pointer map at the
887         // instruction will include the output operand (whose value at the
888         // beginning of the instruction is equal to the input operand). If
889         // this is not desired, then the pointer map at this instruction needs
890         // to be adjusted manually.
891       }
892     }
893   }
894 }
895 
896 
ProcessInstructions(HBasicBlock * block,BitVector * live)897 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
898   int block_start = block->first_instruction_index();
899   int index = block->last_instruction_index();
900 
901   LifetimePosition block_start_position =
902       LifetimePosition::FromInstructionIndex(block_start);
903 
904   while (index >= block_start) {
905     LifetimePosition curr_position =
906         LifetimePosition::FromInstructionIndex(index);
907 
908     if (IsGapAt(index)) {
909       // We have a gap at this position.
910       LGap* gap = GapAt(index);
911       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
912                                                          chunk()->zone());
913       const ZoneList<LMoveOperands>* move_operands = move->move_operands();
914       for (int i = 0; i < move_operands->length(); ++i) {
915         LMoveOperands* cur = &move_operands->at(i);
916         if (cur->IsIgnored()) continue;
917         LOperand* from = cur->source();
918         LOperand* to = cur->destination();
919         HPhi* phi = LookupPhi(to);
920         LOperand* hint = to;
921         if (phi != NULL) {
922           // This is a phi resolving move.
923           if (!phi->block()->IsLoopHeader()) {
924             hint = LiveRangeFor(phi->id())->current_hint_operand();
925           }
926         } else {
927           if (to->IsUnallocated()) {
928             if (live->Contains(LUnallocated::cast(to)->virtual_register())) {
929               Define(curr_position, to, from);
930               live->Remove(LUnallocated::cast(to)->virtual_register());
931             } else {
932               cur->Eliminate();
933               continue;
934             }
935           } else {
936             Define(curr_position, to, from);
937           }
938         }
939         Use(block_start_position, curr_position, from, hint);
940         if (from->IsUnallocated()) {
941           live->Add(LUnallocated::cast(from)->virtual_register());
942         }
943       }
944     } else {
945       ASSERT(!IsGapAt(index));
946       LInstruction* instr = InstructionAt(index);
947 
948       if (instr != NULL) {
949         LOperand* output = instr->Output();
950         if (output != NULL) {
951           if (output->IsUnallocated()) {
952             live->Remove(LUnallocated::cast(output)->virtual_register());
953           }
954           Define(curr_position, output, NULL);
955         }
956 
957         if (instr->ClobbersRegisters()) {
958           for (int i = 0; i < Register::kMaxNumAllocatableRegisters; ++i) {
959             if (output == NULL || !output->IsRegister() ||
960                 output->index() != i) {
961               LiveRange* range = FixedLiveRangeFor(i);
962               range->AddUseInterval(curr_position,
963                                     curr_position.InstructionEnd(),
964                                     zone());
965             }
966           }
967         }
968 
969         if (instr->ClobbersDoubleRegisters(isolate())) {
970           for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
971             if (output == NULL || !output->IsDoubleRegister() ||
972                 output->index() != i) {
973               LiveRange* range = FixedDoubleLiveRangeFor(i);
974               range->AddUseInterval(curr_position,
975                                     curr_position.InstructionEnd(),
976                                     zone());
977             }
978           }
979         }
980 
981         for (UseIterator it(instr); !it.Done(); it.Advance()) {
982           LOperand* input = it.Current();
983 
984           LifetimePosition use_pos;
985           if (input->IsUnallocated() &&
986               LUnallocated::cast(input)->IsUsedAtStart()) {
987             use_pos = curr_position;
988           } else {
989             use_pos = curr_position.InstructionEnd();
990           }
991 
992           Use(block_start_position, use_pos, input, NULL);
993           if (input->IsUnallocated()) {
994             live->Add(LUnallocated::cast(input)->virtual_register());
995           }
996         }
997 
998         for (TempIterator it(instr); !it.Done(); it.Advance()) {
999           LOperand* temp = it.Current();
1000           if (instr->ClobbersTemps()) {
1001             if (temp->IsRegister()) continue;
1002             if (temp->IsUnallocated()) {
1003               LUnallocated* temp_unalloc = LUnallocated::cast(temp);
1004               if (temp_unalloc->HasFixedPolicy()) {
1005                 continue;
1006               }
1007             }
1008           }
1009           Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
1010           Define(curr_position, temp, NULL);
1011 
1012           if (temp->IsUnallocated()) {
1013             LUnallocated* temp_unalloc = LUnallocated::cast(temp);
1014             if (temp_unalloc->HasDoubleRegisterPolicy()) {
1015               double_artificial_registers_.Add(
1016                   temp_unalloc->virtual_register() - first_artificial_register_,
1017                   zone());
1018             }
1019           }
1020         }
1021       }
1022     }
1023 
1024     index = index - 1;
1025   }
1026 }
1027 
1028 
ResolvePhis(HBasicBlock * block)1029 void LAllocator::ResolvePhis(HBasicBlock* block) {
1030   const ZoneList<HPhi*>* phis = block->phis();
1031   for (int i = 0; i < phis->length(); ++i) {
1032     HPhi* phi = phis->at(i);
1033     LUnallocated* phi_operand =
1034         new(chunk()->zone()) LUnallocated(LUnallocated::NONE);
1035     phi_operand->set_virtual_register(phi->id());
1036     for (int j = 0; j < phi->OperandCount(); ++j) {
1037       HValue* op = phi->OperandAt(j);
1038       LOperand* operand = NULL;
1039       if (op->IsConstant() && op->EmitAtUses()) {
1040         HConstant* constant = HConstant::cast(op);
1041         operand = chunk_->DefineConstantOperand(constant);
1042       } else {
1043         ASSERT(!op->EmitAtUses());
1044         LUnallocated* unalloc =
1045             new(chunk()->zone()) LUnallocated(LUnallocated::ANY);
1046         unalloc->set_virtual_register(op->id());
1047         operand = unalloc;
1048       }
1049       HBasicBlock* cur_block = block->predecessors()->at(j);
1050       // The gap move must be added without any special processing as in
1051       // the AddConstraintsGapMove.
1052       chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
1053                          operand,
1054                          phi_operand);
1055 
1056       // We are going to insert a move before the branch instruction.
1057       // Some branch instructions (e.g. loops' back edges)
1058       // can potentially cause a GC so they have a pointer map.
1059       // By inserting a move we essentially create a copy of a
1060       // value which is invisible to PopulatePointerMaps(), because we store
1061       // it into a location different from the operand of a live range
1062       // covering a branch instruction.
1063       // Thus we need to manually record a pointer.
1064       LInstruction* branch =
1065           InstructionAt(cur_block->last_instruction_index());
1066       if (branch->HasPointerMap()) {
1067         if (phi->representation().IsTagged() && !phi->type().IsSmi()) {
1068           branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone());
1069         } else if (!phi->representation().IsDouble()) {
1070           branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone());
1071         }
1072       }
1073     }
1074 
1075     LiveRange* live_range = LiveRangeFor(phi->id());
1076     LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1077     label->GetOrCreateParallelMove(LGap::START, chunk()->zone())->
1078         AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone());
1079     live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1080   }
1081 }
1082 
1083 
Allocate(LChunk * chunk)1084 bool LAllocator::Allocate(LChunk* chunk) {
1085   ASSERT(chunk_ == NULL);
1086   chunk_ = static_cast<LPlatformChunk*>(chunk);
1087   assigned_registers_ =
1088       new(chunk->zone()) BitVector(Register::NumAllocatableRegisters(),
1089                                    chunk->zone());
1090   assigned_double_registers_ =
1091       new(chunk->zone()) BitVector(DoubleRegister::NumAllocatableRegisters(),
1092                                    chunk->zone());
1093   MeetRegisterConstraints();
1094   if (!AllocationOk()) return false;
1095   ResolvePhis();
1096   BuildLiveRanges();
1097   AllocateGeneralRegisters();
1098   if (!AllocationOk()) return false;
1099   AllocateDoubleRegisters();
1100   if (!AllocationOk()) return false;
1101   PopulatePointerMaps();
1102   ConnectRanges();
1103   ResolveControlFlow();
1104   return true;
1105 }
1106 
1107 
MeetRegisterConstraints()1108 void LAllocator::MeetRegisterConstraints() {
1109   LAllocatorPhase phase("L_Register constraints", this);
1110   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1111   for (int i = 0; i < blocks->length(); ++i) {
1112     HBasicBlock* block = blocks->at(i);
1113     MeetRegisterConstraints(block);
1114     if (!AllocationOk()) return;
1115   }
1116 }
1117 
1118 
ResolvePhis()1119 void LAllocator::ResolvePhis() {
1120   LAllocatorPhase phase("L_Resolve phis", this);
1121 
1122   // Process the blocks in reverse order.
1123   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1124   for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1125     HBasicBlock* block = blocks->at(block_id);
1126     ResolvePhis(block);
1127   }
1128 }
1129 
1130 
ResolveControlFlow(LiveRange * range,HBasicBlock * block,HBasicBlock * pred)1131 void LAllocator::ResolveControlFlow(LiveRange* range,
1132                                     HBasicBlock* block,
1133                                     HBasicBlock* pred) {
1134   LifetimePosition pred_end =
1135       LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1136   LifetimePosition cur_start =
1137       LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1138   LiveRange* pred_cover = NULL;
1139   LiveRange* cur_cover = NULL;
1140   LiveRange* cur_range = range;
1141   while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1142     if (cur_range->CanCover(cur_start)) {
1143       ASSERT(cur_cover == NULL);
1144       cur_cover = cur_range;
1145     }
1146     if (cur_range->CanCover(pred_end)) {
1147       ASSERT(pred_cover == NULL);
1148       pred_cover = cur_range;
1149     }
1150     cur_range = cur_range->next();
1151   }
1152 
1153   if (cur_cover->IsSpilled()) return;
1154   ASSERT(pred_cover != NULL && cur_cover != NULL);
1155   if (pred_cover != cur_cover) {
1156     LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone());
1157     LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone());
1158     if (!pred_op->Equals(cur_op)) {
1159       LGap* gap = NULL;
1160       if (block->predecessors()->length() == 1) {
1161         gap = GapAt(block->first_instruction_index());
1162       } else {
1163         ASSERT(pred->end()->SecondSuccessor() == NULL);
1164         gap = GetLastGap(pred);
1165 
1166         // We are going to insert a move before the branch instruction.
1167         // Some branch instructions (e.g. loops' back edges)
1168         // can potentially cause a GC so they have a pointer map.
1169         // By inserting a move we essentially create a copy of a
1170         // value which is invisible to PopulatePointerMaps(), because we store
1171         // it into a location different from the operand of a live range
1172         // covering a branch instruction.
1173         // Thus we need to manually record a pointer.
1174         LInstruction* branch = InstructionAt(pred->last_instruction_index());
1175         if (branch->HasPointerMap()) {
1176           if (HasTaggedValue(range->id())) {
1177             branch->pointer_map()->RecordPointer(cur_op, chunk()->zone());
1178           } else if (!cur_op->IsDoubleStackSlot() &&
1179                      !cur_op->IsDoubleRegister()) {
1180             branch->pointer_map()->RemovePointer(cur_op);
1181           }
1182         }
1183       }
1184       gap->GetOrCreateParallelMove(
1185           LGap::START, chunk()->zone())->AddMove(pred_op, cur_op,
1186                                                  chunk()->zone());
1187     }
1188   }
1189 }
1190 
1191 
GetConnectingParallelMove(LifetimePosition pos)1192 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1193   int index = pos.InstructionIndex();
1194   if (IsGapAt(index)) {
1195     LGap* gap = GapAt(index);
1196     return gap->GetOrCreateParallelMove(
1197         pos.IsInstructionStart() ? LGap::START : LGap::END, chunk()->zone());
1198   }
1199   int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1200   return GapAt(gap_pos)->GetOrCreateParallelMove(
1201       (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, chunk()->zone());
1202 }
1203 
1204 
GetBlock(LifetimePosition pos)1205 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1206   LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1207   return gap->block();
1208 }
1209 
1210 
ConnectRanges()1211 void LAllocator::ConnectRanges() {
1212   LAllocatorPhase phase("L_Connect ranges", this);
1213   for (int i = 0; i < live_ranges()->length(); ++i) {
1214     LiveRange* first_range = live_ranges()->at(i);
1215     if (first_range == NULL || first_range->parent() != NULL) continue;
1216 
1217     LiveRange* second_range = first_range->next();
1218     while (second_range != NULL) {
1219       LifetimePosition pos = second_range->Start();
1220 
1221       if (!second_range->IsSpilled()) {
1222         // Add gap move if the two live ranges touch and there is no block
1223         // boundary.
1224         if (first_range->End().Value() == pos.Value()) {
1225           bool should_insert = true;
1226           if (IsBlockBoundary(pos)) {
1227             should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1228           }
1229           if (should_insert) {
1230             LParallelMove* move = GetConnectingParallelMove(pos);
1231             LOperand* prev_operand = first_range->CreateAssignedOperand(
1232                 chunk()->zone());
1233             LOperand* cur_operand = second_range->CreateAssignedOperand(
1234                 chunk()->zone());
1235             move->AddMove(prev_operand, cur_operand,
1236                           chunk()->zone());
1237           }
1238         }
1239       }
1240 
1241       first_range = second_range;
1242       second_range = second_range->next();
1243     }
1244   }
1245 }
1246 
1247 
CanEagerlyResolveControlFlow(HBasicBlock * block) const1248 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
1249   if (block->predecessors()->length() != 1) return false;
1250   return block->predecessors()->first()->block_id() == block->block_id() - 1;
1251 }
1252 
1253 
ResolveControlFlow()1254 void LAllocator::ResolveControlFlow() {
1255   LAllocatorPhase phase("L_Resolve control flow", this);
1256   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1257   for (int block_id = 1; block_id < blocks->length(); ++block_id) {
1258     HBasicBlock* block = blocks->at(block_id);
1259     if (CanEagerlyResolveControlFlow(block)) continue;
1260     BitVector* live = live_in_sets_[block->block_id()];
1261     BitVector::Iterator iterator(live);
1262     while (!iterator.Done()) {
1263       int operand_index = iterator.Current();
1264       for (int i = 0; i < block->predecessors()->length(); ++i) {
1265         HBasicBlock* cur = block->predecessors()->at(i);
1266         LiveRange* cur_range = LiveRangeFor(operand_index);
1267         ResolveControlFlow(cur_range, block, cur);
1268       }
1269       iterator.Advance();
1270     }
1271   }
1272 }
1273 
1274 
BuildLiveRanges()1275 void LAllocator::BuildLiveRanges() {
1276   LAllocatorPhase phase("L_Build live ranges", this);
1277   InitializeLivenessAnalysis();
1278   // Process the blocks in reverse order.
1279   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1280   for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1281     HBasicBlock* block = blocks->at(block_id);
1282     BitVector* live = ComputeLiveOut(block);
1283     // Initially consider all live_out values live for the entire block. We
1284     // will shorten these intervals if necessary.
1285     AddInitialIntervals(block, live);
1286 
1287     // Process the instructions in reverse order, generating and killing
1288     // live values.
1289     ProcessInstructions(block, live);
1290     // All phi output operands are killed by this block.
1291     const ZoneList<HPhi*>* phis = block->phis();
1292     for (int i = 0; i < phis->length(); ++i) {
1293       // The live range interval already ends at the first instruction of the
1294       // block.
1295       HPhi* phi = phis->at(i);
1296       live->Remove(phi->id());
1297 
1298       LOperand* hint = NULL;
1299       LOperand* phi_operand = NULL;
1300       LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1301       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
1302                                                          chunk()->zone());
1303       for (int j = 0; j < move->move_operands()->length(); ++j) {
1304         LOperand* to = move->move_operands()->at(j).destination();
1305         if (to->IsUnallocated() &&
1306             LUnallocated::cast(to)->virtual_register() == phi->id()) {
1307           hint = move->move_operands()->at(j).source();
1308           phi_operand = to;
1309           break;
1310         }
1311       }
1312       ASSERT(hint != NULL);
1313 
1314       LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1315               block->first_instruction_index());
1316       Define(block_start, phi_operand, hint);
1317     }
1318 
1319     // Now live is live_in for this block except not including values live
1320     // out on backward successor edges.
1321     live_in_sets_[block_id] = live;
1322 
1323     // If this block is a loop header go back and patch up the necessary
1324     // predecessor blocks.
1325     if (block->IsLoopHeader()) {
1326       // TODO(kmillikin): Need to be able to get the last block of the loop
1327       // in the loop information. Add a live range stretching from the first
1328       // loop instruction to the last for each value live on entry to the
1329       // header.
1330       HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1331       BitVector::Iterator iterator(live);
1332       LifetimePosition start = LifetimePosition::FromInstructionIndex(
1333           block->first_instruction_index());
1334       LifetimePosition end = LifetimePosition::FromInstructionIndex(
1335           back_edge->last_instruction_index()).NextInstruction();
1336       while (!iterator.Done()) {
1337         int operand_index = iterator.Current();
1338         LiveRange* range = LiveRangeFor(operand_index);
1339         range->EnsureInterval(start, end, zone());
1340         iterator.Advance();
1341       }
1342 
1343       for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
1344         live_in_sets_[i]->Union(*live);
1345       }
1346     }
1347 
1348 #ifdef DEBUG
1349     if (block_id == 0) {
1350       BitVector::Iterator iterator(live);
1351       bool found = false;
1352       while (!iterator.Done()) {
1353         found = true;
1354         int operand_index = iterator.Current();
1355         if (chunk_->info()->IsStub()) {
1356           CodeStub::Major major_key = chunk_->info()->code_stub()->MajorKey();
1357           PrintF("Function: %s\n", CodeStub::MajorName(major_key, false));
1358         } else {
1359           ASSERT(chunk_->info()->IsOptimizing());
1360           AllowHandleDereference allow_deref;
1361           PrintF("Function: %s\n",
1362                  chunk_->info()->function()->debug_name()->ToCString().get());
1363         }
1364         PrintF("Value %d used before first definition!\n", operand_index);
1365         LiveRange* range = LiveRangeFor(operand_index);
1366         PrintF("First use is at %d\n", range->first_pos()->pos().Value());
1367         iterator.Advance();
1368       }
1369       ASSERT(!found);
1370     }
1371 #endif
1372   }
1373 
1374   for (int i = 0; i < live_ranges_.length(); ++i) {
1375     if (live_ranges_[i] != NULL) {
1376       live_ranges_[i]->kind_ = RequiredRegisterKind(live_ranges_[i]->id());
1377     }
1378   }
1379 }
1380 
1381 
SafePointsAreInOrder() const1382 bool LAllocator::SafePointsAreInOrder() const {
1383   const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1384   int safe_point = 0;
1385   for (int i = 0; i < pointer_maps->length(); ++i) {
1386     LPointerMap* map = pointer_maps->at(i);
1387     if (safe_point > map->lithium_position()) return false;
1388     safe_point = map->lithium_position();
1389   }
1390   return true;
1391 }
1392 
1393 
PopulatePointerMaps()1394 void LAllocator::PopulatePointerMaps() {
1395   LAllocatorPhase phase("L_Populate pointer maps", this);
1396   const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1397 
1398   ASSERT(SafePointsAreInOrder());
1399 
1400   // Iterate over all safe point positions and record a pointer
1401   // for all spilled live ranges at this point.
1402   int first_safe_point_index = 0;
1403   int last_range_start = 0;
1404   for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1405     LiveRange* range = live_ranges()->at(range_idx);
1406     if (range == NULL) continue;
1407     // Iterate over the first parts of multi-part live ranges.
1408     if (range->parent() != NULL) continue;
1409     // Skip non-pointer values.
1410     if (!HasTaggedValue(range->id())) continue;
1411     // Skip empty live ranges.
1412     if (range->IsEmpty()) continue;
1413 
1414     // Find the extent of the range and its children.
1415     int start = range->Start().InstructionIndex();
1416     int end = 0;
1417     for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
1418       LifetimePosition this_end = cur->End();
1419       if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1420       ASSERT(cur->Start().InstructionIndex() >= start);
1421     }
1422 
1423     // Most of the ranges are in order, but not all.  Keep an eye on when
1424     // they step backwards and reset the first_safe_point_index so we don't
1425     // miss any safe points.
1426     if (start < last_range_start) {
1427       first_safe_point_index = 0;
1428     }
1429     last_range_start = start;
1430 
1431     // Step across all the safe points that are before the start of this range,
1432     // recording how far we step in order to save doing this for the next range.
1433     while (first_safe_point_index < pointer_maps->length()) {
1434       LPointerMap* map = pointer_maps->at(first_safe_point_index);
1435       int safe_point = map->lithium_position();
1436       if (safe_point >= start) break;
1437       first_safe_point_index++;
1438     }
1439 
1440     // Step through the safe points to see whether they are in the range.
1441     for (int safe_point_index = first_safe_point_index;
1442          safe_point_index < pointer_maps->length();
1443          ++safe_point_index) {
1444       LPointerMap* map = pointer_maps->at(safe_point_index);
1445       int safe_point = map->lithium_position();
1446 
1447       // The safe points are sorted so we can stop searching here.
1448       if (safe_point - 1 > end) break;
1449 
1450       // Advance to the next active range that covers the current
1451       // safe point position.
1452       LifetimePosition safe_point_pos =
1453           LifetimePosition::FromInstructionIndex(safe_point);
1454       LiveRange* cur = range;
1455       while (cur != NULL && !cur->Covers(safe_point_pos)) {
1456         cur = cur->next();
1457       }
1458       if (cur == NULL) continue;
1459 
1460       // Check if the live range is spilled and the safe point is after
1461       // the spill position.
1462       if (range->HasAllocatedSpillOperand() &&
1463           safe_point >= range->spill_start_index()) {
1464         TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1465                    range->id(), range->spill_start_index(), safe_point);
1466         map->RecordPointer(range->GetSpillOperand(), chunk()->zone());
1467       }
1468 
1469       if (!cur->IsSpilled()) {
1470         TraceAlloc("Pointer in register for range %d (start at %d) "
1471                    "at safe point %d\n",
1472                    cur->id(), cur->Start().Value(), safe_point);
1473         LOperand* operand = cur->CreateAssignedOperand(chunk()->zone());
1474         ASSERT(!operand->IsStackSlot());
1475         map->RecordPointer(operand, chunk()->zone());
1476       }
1477     }
1478   }
1479 }
1480 
1481 
AllocateGeneralRegisters()1482 void LAllocator::AllocateGeneralRegisters() {
1483   LAllocatorPhase phase("L_Allocate general registers", this);
1484   num_registers_ = Register::NumAllocatableRegisters();
1485   mode_ = GENERAL_REGISTERS;
1486   AllocateRegisters();
1487 }
1488 
1489 
AllocateDoubleRegisters()1490 void LAllocator::AllocateDoubleRegisters() {
1491   LAllocatorPhase phase("L_Allocate double registers", this);
1492   num_registers_ = DoubleRegister::NumAllocatableRegisters();
1493   mode_ = DOUBLE_REGISTERS;
1494   AllocateRegisters();
1495 }
1496 
1497 
AllocateRegisters()1498 void LAllocator::AllocateRegisters() {
1499   ASSERT(unhandled_live_ranges_.is_empty());
1500 
1501   for (int i = 0; i < live_ranges_.length(); ++i) {
1502     if (live_ranges_[i] != NULL) {
1503       if (live_ranges_[i]->Kind() == mode_) {
1504         AddToUnhandledUnsorted(live_ranges_[i]);
1505       }
1506     }
1507   }
1508   SortUnhandled();
1509   ASSERT(UnhandledIsSorted());
1510 
1511   ASSERT(reusable_slots_.is_empty());
1512   ASSERT(active_live_ranges_.is_empty());
1513   ASSERT(inactive_live_ranges_.is_empty());
1514 
1515   if (mode_ == DOUBLE_REGISTERS) {
1516     for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
1517       LiveRange* current = fixed_double_live_ranges_.at(i);
1518       if (current != NULL) {
1519         AddToInactive(current);
1520       }
1521     }
1522   } else {
1523     ASSERT(mode_ == GENERAL_REGISTERS);
1524     for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
1525       LiveRange* current = fixed_live_ranges_.at(i);
1526       if (current != NULL) {
1527         AddToInactive(current);
1528       }
1529     }
1530   }
1531 
1532   while (!unhandled_live_ranges_.is_empty()) {
1533     ASSERT(UnhandledIsSorted());
1534     LiveRange* current = unhandled_live_ranges_.RemoveLast();
1535     ASSERT(UnhandledIsSorted());
1536     LifetimePosition position = current->Start();
1537 #ifdef DEBUG
1538     allocation_finger_ = position;
1539 #endif
1540     TraceAlloc("Processing interval %d start=%d\n",
1541                current->id(),
1542                position.Value());
1543 
1544     if (current->HasAllocatedSpillOperand()) {
1545       TraceAlloc("Live range %d already has a spill operand\n", current->id());
1546       LifetimePosition next_pos = position;
1547       if (IsGapAt(next_pos.InstructionIndex())) {
1548         next_pos = next_pos.NextInstruction();
1549       }
1550       UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1551       // If the range already has a spill operand and it doesn't need a
1552       // register immediately, split it and spill the first part of the range.
1553       if (pos == NULL) {
1554         Spill(current);
1555         continue;
1556       } else if (pos->pos().Value() >
1557                  current->Start().NextInstruction().Value()) {
1558         // Do not spill live range eagerly if use position that can benefit from
1559         // the register is too close to the start of live range.
1560         SpillBetween(current, current->Start(), pos->pos());
1561         if (!AllocationOk()) return;
1562         ASSERT(UnhandledIsSorted());
1563         continue;
1564       }
1565     }
1566 
1567     for (int i = 0; i < active_live_ranges_.length(); ++i) {
1568       LiveRange* cur_active = active_live_ranges_.at(i);
1569       if (cur_active->End().Value() <= position.Value()) {
1570         ActiveToHandled(cur_active);
1571         --i;  // The live range was removed from the list of active live ranges.
1572       } else if (!cur_active->Covers(position)) {
1573         ActiveToInactive(cur_active);
1574         --i;  // The live range was removed from the list of active live ranges.
1575       }
1576     }
1577 
1578     for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1579       LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1580       if (cur_inactive->End().Value() <= position.Value()) {
1581         InactiveToHandled(cur_inactive);
1582         --i;  // Live range was removed from the list of inactive live ranges.
1583       } else if (cur_inactive->Covers(position)) {
1584         InactiveToActive(cur_inactive);
1585         --i;  // Live range was removed from the list of inactive live ranges.
1586       }
1587     }
1588 
1589     ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled());
1590 
1591     bool result = TryAllocateFreeReg(current);
1592     if (!AllocationOk()) return;
1593 
1594     if (!result) AllocateBlockedReg(current);
1595     if (!AllocationOk()) return;
1596 
1597     if (current->HasRegisterAssigned()) {
1598       AddToActive(current);
1599     }
1600   }
1601 
1602   reusable_slots_.Rewind(0);
1603   active_live_ranges_.Rewind(0);
1604   inactive_live_ranges_.Rewind(0);
1605 }
1606 
1607 
RegisterName(int allocation_index)1608 const char* LAllocator::RegisterName(int allocation_index) {
1609   if (mode_ == GENERAL_REGISTERS) {
1610     return Register::AllocationIndexToString(allocation_index);
1611   } else {
1612     return DoubleRegister::AllocationIndexToString(allocation_index);
1613   }
1614 }
1615 
1616 
TraceAlloc(const char * msg,...)1617 void LAllocator::TraceAlloc(const char* msg, ...) {
1618   if (FLAG_trace_alloc) {
1619     va_list arguments;
1620     va_start(arguments, msg);
1621     OS::VPrint(msg, arguments);
1622     va_end(arguments);
1623   }
1624 }
1625 
1626 
HasTaggedValue(int virtual_register) const1627 bool LAllocator::HasTaggedValue(int virtual_register) const {
1628   HValue* value = graph_->LookupValue(virtual_register);
1629   if (value == NULL) return false;
1630   return value->representation().IsTagged() && !value->type().IsSmi();
1631 }
1632 
1633 
RequiredRegisterKind(int virtual_register) const1634 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
1635   if (virtual_register < first_artificial_register_) {
1636     HValue* value = graph_->LookupValue(virtual_register);
1637     if (value != NULL && value->representation().IsDouble()) {
1638       return DOUBLE_REGISTERS;
1639     }
1640   } else if (double_artificial_registers_.Contains(
1641       virtual_register - first_artificial_register_)) {
1642     return DOUBLE_REGISTERS;
1643   }
1644 
1645   return GENERAL_REGISTERS;
1646 }
1647 
1648 
AddToActive(LiveRange * range)1649 void LAllocator::AddToActive(LiveRange* range) {
1650   TraceAlloc("Add live range %d to active\n", range->id());
1651   active_live_ranges_.Add(range, zone());
1652 }
1653 
1654 
AddToInactive(LiveRange * range)1655 void LAllocator::AddToInactive(LiveRange* range) {
1656   TraceAlloc("Add live range %d to inactive\n", range->id());
1657   inactive_live_ranges_.Add(range, zone());
1658 }
1659 
1660 
AddToUnhandledSorted(LiveRange * range)1661 void LAllocator::AddToUnhandledSorted(LiveRange* range) {
1662   if (range == NULL || range->IsEmpty()) return;
1663   ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1664   ASSERT(allocation_finger_.Value() <= range->Start().Value());
1665   for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1666     LiveRange* cur_range = unhandled_live_ranges_.at(i);
1667     if (range->ShouldBeAllocatedBefore(cur_range)) {
1668       TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1669       unhandled_live_ranges_.InsertAt(i + 1, range, zone());
1670       ASSERT(UnhandledIsSorted());
1671       return;
1672     }
1673   }
1674   TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1675   unhandled_live_ranges_.InsertAt(0, range, zone());
1676   ASSERT(UnhandledIsSorted());
1677 }
1678 
1679 
AddToUnhandledUnsorted(LiveRange * range)1680 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1681   if (range == NULL || range->IsEmpty()) return;
1682   ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1683   TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1684   unhandled_live_ranges_.Add(range, zone());
1685 }
1686 
1687 
UnhandledSortHelper(LiveRange * const * a,LiveRange * const * b)1688 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
1689   ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
1690          !(*b)->ShouldBeAllocatedBefore(*a));
1691   if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
1692   if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
1693   return (*a)->id() - (*b)->id();
1694 }
1695 
1696 
1697 // Sort the unhandled live ranges so that the ranges to be processed first are
1698 // at the end of the array list.  This is convenient for the register allocation
1699 // algorithm because it is efficient to remove elements from the end.
SortUnhandled()1700 void LAllocator::SortUnhandled() {
1701   TraceAlloc("Sort unhandled\n");
1702   unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1703 }
1704 
1705 
UnhandledIsSorted()1706 bool LAllocator::UnhandledIsSorted() {
1707   int len = unhandled_live_ranges_.length();
1708   for (int i = 1; i < len; i++) {
1709     LiveRange* a = unhandled_live_ranges_.at(i - 1);
1710     LiveRange* b = unhandled_live_ranges_.at(i);
1711     if (a->Start().Value() < b->Start().Value()) return false;
1712   }
1713   return true;
1714 }
1715 
1716 
FreeSpillSlot(LiveRange * range)1717 void LAllocator::FreeSpillSlot(LiveRange* range) {
1718   // Check that we are the last range.
1719   if (range->next() != NULL) return;
1720 
1721   if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
1722 
1723   int index = range->TopLevel()->GetSpillOperand()->index();
1724   if (index >= 0) {
1725     reusable_slots_.Add(range, zone());
1726   }
1727 }
1728 
1729 
TryReuseSpillSlot(LiveRange * range)1730 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
1731   if (reusable_slots_.is_empty()) return NULL;
1732   if (reusable_slots_.first()->End().Value() >
1733       range->TopLevel()->Start().Value()) {
1734     return NULL;
1735   }
1736   LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
1737   reusable_slots_.Remove(0);
1738   return result;
1739 }
1740 
1741 
ActiveToHandled(LiveRange * range)1742 void LAllocator::ActiveToHandled(LiveRange* range) {
1743   ASSERT(active_live_ranges_.Contains(range));
1744   active_live_ranges_.RemoveElement(range);
1745   TraceAlloc("Moving live range %d from active to handled\n", range->id());
1746   FreeSpillSlot(range);
1747 }
1748 
1749 
ActiveToInactive(LiveRange * range)1750 void LAllocator::ActiveToInactive(LiveRange* range) {
1751   ASSERT(active_live_ranges_.Contains(range));
1752   active_live_ranges_.RemoveElement(range);
1753   inactive_live_ranges_.Add(range, zone());
1754   TraceAlloc("Moving live range %d from active to inactive\n", range->id());
1755 }
1756 
1757 
InactiveToHandled(LiveRange * range)1758 void LAllocator::InactiveToHandled(LiveRange* range) {
1759   ASSERT(inactive_live_ranges_.Contains(range));
1760   inactive_live_ranges_.RemoveElement(range);
1761   TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
1762   FreeSpillSlot(range);
1763 }
1764 
1765 
InactiveToActive(LiveRange * range)1766 void LAllocator::InactiveToActive(LiveRange* range) {
1767   ASSERT(inactive_live_ranges_.Contains(range));
1768   inactive_live_ranges_.RemoveElement(range);
1769   active_live_ranges_.Add(range, zone());
1770   TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1771 }
1772 
1773 
1774 // TryAllocateFreeReg and AllocateBlockedReg assume this
1775 // when allocating local arrays.
1776 STATIC_ASSERT(DoubleRegister::kMaxNumAllocatableRegisters >=
1777               Register::kMaxNumAllocatableRegisters);
1778 
1779 
TryAllocateFreeReg(LiveRange * current)1780 bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1781   LifetimePosition free_until_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1782 
1783   for (int i = 0; i < num_registers_; i++) {
1784     free_until_pos[i] = LifetimePosition::MaxPosition();
1785   }
1786 
1787   for (int i = 0; i < active_live_ranges_.length(); ++i) {
1788     LiveRange* cur_active = active_live_ranges_.at(i);
1789     free_until_pos[cur_active->assigned_register()] =
1790         LifetimePosition::FromInstructionIndex(0);
1791   }
1792 
1793   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1794     LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1795     ASSERT(cur_inactive->End().Value() > current->Start().Value());
1796     LifetimePosition next_intersection =
1797         cur_inactive->FirstIntersection(current);
1798     if (!next_intersection.IsValid()) continue;
1799     int cur_reg = cur_inactive->assigned_register();
1800     free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1801   }
1802 
1803   LOperand* hint = current->FirstHint();
1804   if (hint != NULL && (hint->IsRegister() || hint->IsDoubleRegister())) {
1805     int register_index = hint->index();
1806     TraceAlloc(
1807         "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1808         RegisterName(register_index),
1809         free_until_pos[register_index].Value(),
1810         current->id(),
1811         current->End().Value());
1812 
1813     // The desired register is free until the end of the current live range.
1814     if (free_until_pos[register_index].Value() >= current->End().Value()) {
1815       TraceAlloc("Assigning preferred reg %s to live range %d\n",
1816                  RegisterName(register_index),
1817                  current->id());
1818       SetLiveRangeAssignedRegister(current, register_index);
1819       return true;
1820     }
1821   }
1822 
1823   // Find the register which stays free for the longest time.
1824   int reg = 0;
1825   for (int i = 1; i < RegisterCount(); ++i) {
1826     if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1827       reg = i;
1828     }
1829   }
1830 
1831   LifetimePosition pos = free_until_pos[reg];
1832 
1833   if (pos.Value() <= current->Start().Value()) {
1834     // All registers are blocked.
1835     return false;
1836   }
1837 
1838   if (pos.Value() < current->End().Value()) {
1839     // Register reg is available at the range start but becomes blocked before
1840     // the range end. Split current at position where it becomes blocked.
1841     LiveRange* tail = SplitRangeAt(current, pos);
1842     if (!AllocationOk()) return false;
1843     AddToUnhandledSorted(tail);
1844   }
1845 
1846 
1847   // Register reg is available at the range start and is free until
1848   // the range end.
1849   ASSERT(pos.Value() >= current->End().Value());
1850   TraceAlloc("Assigning free reg %s to live range %d\n",
1851              RegisterName(reg),
1852              current->id());
1853   SetLiveRangeAssignedRegister(current, reg);
1854 
1855   return true;
1856 }
1857 
1858 
AllocateBlockedReg(LiveRange * current)1859 void LAllocator::AllocateBlockedReg(LiveRange* current) {
1860   UsePosition* register_use = current->NextRegisterPosition(current->Start());
1861   if (register_use == NULL) {
1862     // There is no use in the current live range that requires a register.
1863     // We can just spill it.
1864     Spill(current);
1865     return;
1866   }
1867 
1868 
1869   LifetimePosition use_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1870   LifetimePosition block_pos[DoubleRegister::kMaxNumAllocatableRegisters];
1871 
1872   for (int i = 0; i < num_registers_; i++) {
1873     use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1874   }
1875 
1876   for (int i = 0; i < active_live_ranges_.length(); ++i) {
1877     LiveRange* range = active_live_ranges_[i];
1878     int cur_reg = range->assigned_register();
1879     if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1880       block_pos[cur_reg] = use_pos[cur_reg] =
1881           LifetimePosition::FromInstructionIndex(0);
1882     } else {
1883       UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
1884           current->Start());
1885       if (next_use == NULL) {
1886         use_pos[cur_reg] = range->End();
1887       } else {
1888         use_pos[cur_reg] = next_use->pos();
1889       }
1890     }
1891   }
1892 
1893   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1894     LiveRange* range = inactive_live_ranges_.at(i);
1895     ASSERT(range->End().Value() > current->Start().Value());
1896     LifetimePosition next_intersection = range->FirstIntersection(current);
1897     if (!next_intersection.IsValid()) continue;
1898     int cur_reg = range->assigned_register();
1899     if (range->IsFixed()) {
1900       block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1901       use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1902     } else {
1903       use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1904     }
1905   }
1906 
1907   int reg = 0;
1908   for (int i = 1; i < RegisterCount(); ++i) {
1909     if (use_pos[i].Value() > use_pos[reg].Value()) {
1910       reg = i;
1911     }
1912   }
1913 
1914   LifetimePosition pos = use_pos[reg];
1915 
1916   if (pos.Value() < register_use->pos().Value()) {
1917     // All registers are blocked before the first use that requires a register.
1918     // Spill starting part of live range up to that use.
1919     SpillBetween(current, current->Start(), register_use->pos());
1920     return;
1921   }
1922 
1923   if (block_pos[reg].Value() < current->End().Value()) {
1924     // Register becomes blocked before the current range end. Split before that
1925     // position.
1926     LiveRange* tail = SplitBetween(current,
1927                                    current->Start(),
1928                                    block_pos[reg].InstructionStart());
1929     if (!AllocationOk()) return;
1930     AddToUnhandledSorted(tail);
1931   }
1932 
1933   // Register reg is not blocked for the whole range.
1934   ASSERT(block_pos[reg].Value() >= current->End().Value());
1935   TraceAlloc("Assigning blocked reg %s to live range %d\n",
1936              RegisterName(reg),
1937              current->id());
1938   SetLiveRangeAssignedRegister(current, reg);
1939 
1940   // This register was not free. Thus we need to find and spill
1941   // parts of active and inactive live regions that use the same register
1942   // at the same lifetime positions as current.
1943   SplitAndSpillIntersecting(current);
1944 }
1945 
1946 
FindOptimalSpillingPos(LiveRange * range,LifetimePosition pos)1947 LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range,
1948                                                     LifetimePosition pos) {
1949   HBasicBlock* block = GetBlock(pos.InstructionStart());
1950   HBasicBlock* loop_header =
1951       block->IsLoopHeader() ? block : block->parent_loop_header();
1952 
1953   if (loop_header == NULL) return pos;
1954 
1955   UsePosition* prev_use =
1956     range->PreviousUsePositionRegisterIsBeneficial(pos);
1957 
1958   while (loop_header != NULL) {
1959     // We are going to spill live range inside the loop.
1960     // If possible try to move spilling position backwards to loop header.
1961     // This will reduce number of memory moves on the back edge.
1962     LifetimePosition loop_start = LifetimePosition::FromInstructionIndex(
1963         loop_header->first_instruction_index());
1964 
1965     if (range->Covers(loop_start)) {
1966       if (prev_use == NULL || prev_use->pos().Value() < loop_start.Value()) {
1967         // No register beneficial use inside the loop before the pos.
1968         pos = loop_start;
1969       }
1970     }
1971 
1972     // Try hoisting out to an outer loop.
1973     loop_header = loop_header->parent_loop_header();
1974   }
1975 
1976   return pos;
1977 }
1978 
1979 
SplitAndSpillIntersecting(LiveRange * current)1980 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
1981   ASSERT(current->HasRegisterAssigned());
1982   int reg = current->assigned_register();
1983   LifetimePosition split_pos = current->Start();
1984   for (int i = 0; i < active_live_ranges_.length(); ++i) {
1985     LiveRange* range = active_live_ranges_[i];
1986     if (range->assigned_register() == reg) {
1987       UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1988       LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
1989       if (next_pos == NULL) {
1990         SpillAfter(range, spill_pos);
1991       } else {
1992         // When spilling between spill_pos and next_pos ensure that the range
1993         // remains spilled at least until the start of the current live range.
1994         // This guarantees that we will not introduce new unhandled ranges that
1995         // start before the current range as this violates allocation invariant
1996         // and will lead to an inconsistent state of active and inactive
1997         // live-ranges: ranges are allocated in order of their start positions,
1998         // ranges are retired from active/inactive when the start of the
1999         // current live-range is larger than their end.
2000         SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
2001       }
2002       if (!AllocationOk()) return;
2003       ActiveToHandled(range);
2004       --i;
2005     }
2006   }
2007 
2008   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
2009     LiveRange* range = inactive_live_ranges_[i];
2010     ASSERT(range->End().Value() > current->Start().Value());
2011     if (range->assigned_register() == reg && !range->IsFixed()) {
2012       LifetimePosition next_intersection = range->FirstIntersection(current);
2013       if (next_intersection.IsValid()) {
2014         UsePosition* next_pos = range->NextRegisterPosition(current->Start());
2015         if (next_pos == NULL) {
2016           SpillAfter(range, split_pos);
2017         } else {
2018           next_intersection = Min(next_intersection, next_pos->pos());
2019           SpillBetween(range, split_pos, next_intersection);
2020         }
2021         if (!AllocationOk()) return;
2022         InactiveToHandled(range);
2023         --i;
2024       }
2025     }
2026   }
2027 }
2028 
2029 
IsBlockBoundary(LifetimePosition pos)2030 bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
2031   return pos.IsInstructionStart() &&
2032       InstructionAt(pos.InstructionIndex())->IsLabel();
2033 }
2034 
2035 
SplitRangeAt(LiveRange * range,LifetimePosition pos)2036 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) {
2037   ASSERT(!range->IsFixed());
2038   TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
2039 
2040   if (pos.Value() <= range->Start().Value()) return range;
2041 
2042   // We can't properly connect liveranges if split occured at the end
2043   // of control instruction.
2044   ASSERT(pos.IsInstructionStart() ||
2045          !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
2046 
2047   int vreg = GetVirtualRegister();
2048   if (!AllocationOk()) return NULL;
2049   LiveRange* result = LiveRangeFor(vreg);
2050   range->SplitAt(pos, result, zone());
2051   return result;
2052 }
2053 
2054 
SplitBetween(LiveRange * range,LifetimePosition start,LifetimePosition end)2055 LiveRange* LAllocator::SplitBetween(LiveRange* range,
2056                                     LifetimePosition start,
2057                                     LifetimePosition end) {
2058   ASSERT(!range->IsFixed());
2059   TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
2060              range->id(),
2061              start.Value(),
2062              end.Value());
2063 
2064   LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2065   ASSERT(split_pos.Value() >= start.Value());
2066   return SplitRangeAt(range, split_pos);
2067 }
2068 
2069 
FindOptimalSplitPos(LifetimePosition start,LifetimePosition end)2070 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
2071                                                  LifetimePosition end) {
2072   int start_instr = start.InstructionIndex();
2073   int end_instr = end.InstructionIndex();
2074   ASSERT(start_instr <= end_instr);
2075 
2076   // We have no choice
2077   if (start_instr == end_instr) return end;
2078 
2079   HBasicBlock* start_block = GetBlock(start);
2080   HBasicBlock* end_block = GetBlock(end);
2081 
2082   if (end_block == start_block) {
2083     // The interval is split in the same basic block. Split at the latest
2084     // possible position.
2085     return end;
2086   }
2087 
2088   HBasicBlock* block = end_block;
2089   // Find header of outermost loop.
2090   while (block->parent_loop_header() != NULL &&
2091       block->parent_loop_header()->block_id() > start_block->block_id()) {
2092     block = block->parent_loop_header();
2093   }
2094 
2095   // We did not find any suitable outer loop. Split at the latest possible
2096   // position unless end_block is a loop header itself.
2097   if (block == end_block && !end_block->IsLoopHeader()) return end;
2098 
2099   return LifetimePosition::FromInstructionIndex(
2100       block->first_instruction_index());
2101 }
2102 
2103 
SpillAfter(LiveRange * range,LifetimePosition pos)2104 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2105   LiveRange* second_part = SplitRangeAt(range, pos);
2106   if (!AllocationOk()) return;
2107   Spill(second_part);
2108 }
2109 
2110 
SpillBetween(LiveRange * range,LifetimePosition start,LifetimePosition end)2111 void LAllocator::SpillBetween(LiveRange* range,
2112                               LifetimePosition start,
2113                               LifetimePosition end) {
2114   SpillBetweenUntil(range, start, start, end);
2115 }
2116 
2117 
SpillBetweenUntil(LiveRange * range,LifetimePosition start,LifetimePosition until,LifetimePosition end)2118 void LAllocator::SpillBetweenUntil(LiveRange* range,
2119                                    LifetimePosition start,
2120                                    LifetimePosition until,
2121                                    LifetimePosition end) {
2122   CHECK(start.Value() < end.Value());
2123   LiveRange* second_part = SplitRangeAt(range, start);
2124   if (!AllocationOk()) return;
2125 
2126   if (second_part->Start().Value() < end.Value()) {
2127     // The split result intersects with [start, end[.
2128     // Split it at position between ]start+1, end[, spill the middle part
2129     // and put the rest to unhandled.
2130     LiveRange* third_part = SplitBetween(
2131         second_part,
2132         Max(second_part->Start().InstructionEnd(), until),
2133         end.PrevInstruction().InstructionEnd());
2134     if (!AllocationOk()) return;
2135 
2136     ASSERT(third_part != second_part);
2137 
2138     Spill(second_part);
2139     AddToUnhandledSorted(third_part);
2140   } else {
2141     // The split result does not intersect with [start, end[.
2142     // Nothing to spill. Just put it to unhandled as whole.
2143     AddToUnhandledSorted(second_part);
2144   }
2145 }
2146 
2147 
Spill(LiveRange * range)2148 void LAllocator::Spill(LiveRange* range) {
2149   ASSERT(!range->IsSpilled());
2150   TraceAlloc("Spilling live range %d\n", range->id());
2151   LiveRange* first = range->TopLevel();
2152 
2153   if (!first->HasAllocatedSpillOperand()) {
2154     LOperand* op = TryReuseSpillSlot(range);
2155     if (op == NULL) op = chunk_->GetNextSpillSlot(range->Kind());
2156     first->SetSpillOperand(op);
2157   }
2158   range->MakeSpilled(chunk()->zone());
2159 }
2160 
2161 
RegisterCount() const2162 int LAllocator::RegisterCount() const {
2163   return num_registers_;
2164 }
2165 
2166 
2167 #ifdef DEBUG
2168 
2169 
Verify() const2170 void LAllocator::Verify() const {
2171   for (int i = 0; i < live_ranges()->length(); ++i) {
2172     LiveRange* current = live_ranges()->at(i);
2173     if (current != NULL) current->Verify();
2174   }
2175 }
2176 
2177 
2178 #endif
2179 
2180 
LAllocatorPhase(const char * name,LAllocator * allocator)2181 LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator)
2182     : CompilationPhase(name, allocator->graph()->info()),
2183       allocator_(allocator) {
2184   if (FLAG_hydrogen_stats) {
2185     allocator_zone_start_allocation_size_ =
2186         allocator->zone()->allocation_size();
2187   }
2188 }
2189 
2190 
~LAllocatorPhase()2191 LAllocatorPhase::~LAllocatorPhase() {
2192   if (FLAG_hydrogen_stats) {
2193     unsigned size = allocator_->zone()->allocation_size() -
2194                     allocator_zone_start_allocation_size_;
2195     isolate()->GetHStatistics()->SaveTiming(name(), TimeDelta(), size);
2196   }
2197 
2198   if (ShouldProduceTraceOutput()) {
2199     isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk());
2200     isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_);
2201   }
2202 
2203 #ifdef DEBUG
2204   if (allocator_ != NULL) allocator_->Verify();
2205 #endif
2206 }
2207 
2208 
2209 } }  // namespace v8::internal
2210