• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 #include "lithium-allocator-inl.h"
30 
31 #include "hydrogen.h"
32 #include "string-stream.h"
33 
34 #if V8_TARGET_ARCH_IA32
35 #include "ia32/lithium-ia32.h"
36 #elif V8_TARGET_ARCH_X64
37 #include "x64/lithium-x64.h"
38 #elif V8_TARGET_ARCH_ARM
39 #include "arm/lithium-arm.h"
40 #elif V8_TARGET_ARCH_MIPS
41 #include "mips/lithium-mips.h"
42 #else
43 #error "Unknown architecture."
44 #endif
45 
46 namespace v8 {
47 namespace internal {
48 
Min(LifetimePosition a,LifetimePosition b)49 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
50   return a.Value() < b.Value() ? a : b;
51 }
52 
53 
Max(LifetimePosition a,LifetimePosition b)54 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
55   return a.Value() > b.Value() ? a : b;
56 }
57 
58 
UsePosition(LifetimePosition pos,LOperand * operand)59 UsePosition::UsePosition(LifetimePosition pos, LOperand* operand)
60     : operand_(operand),
61       hint_(NULL),
62       pos_(pos),
63       next_(NULL),
64       requires_reg_(false),
65       register_beneficial_(true) {
66   if (operand_ != NULL && operand_->IsUnallocated()) {
67     LUnallocated* unalloc = LUnallocated::cast(operand_);
68     requires_reg_ = unalloc->HasRegisterPolicy();
69     register_beneficial_ = !unalloc->HasAnyPolicy();
70   }
71   ASSERT(pos_.IsValid());
72 }
73 
74 
HasHint() const75 bool UsePosition::HasHint() const {
76   return hint_ != NULL && !hint_->IsUnallocated();
77 }
78 
79 
RequiresRegister() const80 bool UsePosition::RequiresRegister() const {
81   return requires_reg_;
82 }
83 
84 
RegisterIsBeneficial() const85 bool UsePosition::RegisterIsBeneficial() const {
86   return register_beneficial_;
87 }
88 
89 
SplitAt(LifetimePosition pos,Zone * zone)90 void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
91   ASSERT(Contains(pos) && pos.Value() != start().Value());
92   UseInterval* after = new(zone) UseInterval(pos, end_);
93   after->next_ = next_;
94   next_ = after;
95   end_ = pos;
96 }
97 
98 
99 #ifdef DEBUG
100 
101 
Verify() const102 void LiveRange::Verify() const {
103   UsePosition* cur = first_pos_;
104   while (cur != NULL) {
105     ASSERT(Start().Value() <= cur->pos().Value() &&
106            cur->pos().Value() <= End().Value());
107     cur = cur->next();
108   }
109 }
110 
111 
HasOverlap(UseInterval * target) const112 bool LiveRange::HasOverlap(UseInterval* target) const {
113   UseInterval* current_interval = first_interval_;
114   while (current_interval != NULL) {
115     // Intervals overlap if the start of one is contained in the other.
116     if (current_interval->Contains(target->start()) ||
117         target->Contains(current_interval->start())) {
118       return true;
119     }
120     current_interval = current_interval->next();
121   }
122   return false;
123 }
124 
125 
126 #endif
127 
128 
LiveRange(int id,Zone * zone)129 LiveRange::LiveRange(int id, Zone* zone)
130     : id_(id),
131       spilled_(false),
132       is_double_(false),
133       assigned_register_(kInvalidAssignment),
134       last_interval_(NULL),
135       first_interval_(NULL),
136       first_pos_(NULL),
137       parent_(NULL),
138       next_(NULL),
139       current_interval_(NULL),
140       last_processed_use_(NULL),
141       spill_operand_(new(zone) LOperand()),
142       spill_start_index_(kMaxInt) { }
143 
144 
set_assigned_register(int reg,RegisterKind register_kind,Zone * zone)145 void LiveRange::set_assigned_register(int reg,
146                                       RegisterKind register_kind,
147                                       Zone* zone) {
148   ASSERT(!HasRegisterAssigned() && !IsSpilled());
149   assigned_register_ = reg;
150   is_double_ = (register_kind == DOUBLE_REGISTERS);
151   ConvertOperands(zone);
152 }
153 
154 
MakeSpilled(Zone * zone)155 void LiveRange::MakeSpilled(Zone* zone) {
156   ASSERT(!IsSpilled());
157   ASSERT(TopLevel()->HasAllocatedSpillOperand());
158   spilled_ = true;
159   assigned_register_ = kInvalidAssignment;
160   ConvertOperands(zone);
161 }
162 
163 
HasAllocatedSpillOperand() const164 bool LiveRange::HasAllocatedSpillOperand() const {
165   ASSERT(spill_operand_ != NULL);
166   return !spill_operand_->IsIgnored();
167 }
168 
169 
SetSpillOperand(LOperand * operand)170 void LiveRange::SetSpillOperand(LOperand* operand) {
171   ASSERT(!operand->IsUnallocated());
172   ASSERT(spill_operand_ != NULL);
173   ASSERT(spill_operand_->IsIgnored());
174   spill_operand_->ConvertTo(operand->kind(), operand->index());
175 }
176 
177 
NextUsePosition(LifetimePosition start)178 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
179   UsePosition* use_pos = last_processed_use_;
180   if (use_pos == NULL) use_pos = first_pos();
181   while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
182     use_pos = use_pos->next();
183   }
184   last_processed_use_ = use_pos;
185   return use_pos;
186 }
187 
188 
NextUsePositionRegisterIsBeneficial(LifetimePosition start)189 UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
190     LifetimePosition start) {
191   UsePosition* pos = NextUsePosition(start);
192   while (pos != NULL && !pos->RegisterIsBeneficial()) {
193     pos = pos->next();
194   }
195   return pos;
196 }
197 
198 
NextRegisterPosition(LifetimePosition start)199 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
200   UsePosition* pos = NextUsePosition(start);
201   while (pos != NULL && !pos->RequiresRegister()) {
202     pos = pos->next();
203   }
204   return pos;
205 }
206 
207 
CanBeSpilled(LifetimePosition pos)208 bool LiveRange::CanBeSpilled(LifetimePosition pos) {
209   // TODO(kmillikin): Comment. Now.
210   if (pos.Value() <= Start().Value() && HasRegisterAssigned()) return false;
211 
212   // We cannot spill a live range that has a use requiring a register
213   // at the current or the immediate next position.
214   UsePosition* use_pos = NextRegisterPosition(pos);
215   if (use_pos == NULL) return true;
216   return
217       use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value();
218 }
219 
220 
FirstPosWithHint() const221 UsePosition* LiveRange::FirstPosWithHint() const {
222   UsePosition* pos = first_pos_;
223   while (pos != NULL && !pos->HasHint()) pos = pos->next();
224   return pos;
225 }
226 
227 
CreateAssignedOperand(Zone * zone)228 LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
229   LOperand* op = NULL;
230   if (HasRegisterAssigned()) {
231     ASSERT(!IsSpilled());
232     if (IsDouble()) {
233       op = LDoubleRegister::Create(assigned_register());
234     } else {
235       op = LRegister::Create(assigned_register());
236     }
237   } else if (IsSpilled()) {
238     ASSERT(!HasRegisterAssigned());
239     op = TopLevel()->GetSpillOperand();
240     ASSERT(!op->IsUnallocated());
241   } else {
242     LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE);
243     unalloc->set_virtual_register(id_);
244     op = unalloc;
245   }
246   return op;
247 }
248 
249 
FirstSearchIntervalForPosition(LifetimePosition position) const250 UseInterval* LiveRange::FirstSearchIntervalForPosition(
251     LifetimePosition position) const {
252   if (current_interval_ == NULL) return first_interval_;
253   if (current_interval_->start().Value() > position.Value()) {
254     current_interval_ = NULL;
255     return first_interval_;
256   }
257   return current_interval_;
258 }
259 
260 
AdvanceLastProcessedMarker(UseInterval * to_start_of,LifetimePosition but_not_past) const261 void LiveRange::AdvanceLastProcessedMarker(
262     UseInterval* to_start_of, LifetimePosition but_not_past) const {
263   if (to_start_of == NULL) return;
264   if (to_start_of->start().Value() > but_not_past.Value()) return;
265   LifetimePosition start =
266       current_interval_ == NULL ? LifetimePosition::Invalid()
267                                 : current_interval_->start();
268   if (to_start_of->start().Value() > start.Value()) {
269     current_interval_ = to_start_of;
270   }
271 }
272 
273 
SplitAt(LifetimePosition position,LiveRange * result,Zone * zone)274 void LiveRange::SplitAt(LifetimePosition position,
275                         LiveRange* result,
276                         Zone* zone) {
277   ASSERT(Start().Value() < position.Value());
278   ASSERT(result->IsEmpty());
279   // Find the last interval that ends before the position. If the
280   // position is contained in one of the intervals in the chain, we
281   // split that interval and use the first part.
282   UseInterval* current = FirstSearchIntervalForPosition(position);
283 
284   // If the split position coincides with the beginning of a use interval
285   // we need to split use positons in a special way.
286   bool split_at_start = false;
287 
288   if (current->start().Value() == position.Value()) {
289     // When splitting at start we need to locate the previous use interval.
290     current = first_interval_;
291   }
292 
293   while (current != NULL) {
294     if (current->Contains(position)) {
295       current->SplitAt(position, zone);
296       break;
297     }
298     UseInterval* next = current->next();
299     if (next->start().Value() >= position.Value()) {
300       split_at_start = (next->start().Value() == position.Value());
301       break;
302     }
303     current = next;
304   }
305 
306   // Partition original use intervals to the two live ranges.
307   UseInterval* before = current;
308   UseInterval* after = before->next();
309   result->last_interval_ = (last_interval_ == before)
310       ? after            // Only interval in the range after split.
311       : last_interval_;  // Last interval of the original range.
312   result->first_interval_ = after;
313   last_interval_ = before;
314 
315   // Find the last use position before the split and the first use
316   // position after it.
317   UsePosition* use_after = first_pos_;
318   UsePosition* use_before = NULL;
319   if (split_at_start) {
320     // The split position coincides with the beginning of a use interval (the
321     // end of a lifetime hole). Use at this position should be attributed to
322     // the split child because split child owns use interval covering it.
323     while (use_after != NULL && use_after->pos().Value() < position.Value()) {
324       use_before = use_after;
325       use_after = use_after->next();
326     }
327   } else {
328     while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
329       use_before = use_after;
330       use_after = use_after->next();
331     }
332   }
333 
334   // Partition original use positions to the two live ranges.
335   if (use_before != NULL) {
336     use_before->next_ = NULL;
337   } else {
338     first_pos_ = NULL;
339   }
340   result->first_pos_ = use_after;
341 
342   // Discard cached iteration state. It might be pointing
343   // to the use that no longer belongs to this live range.
344   last_processed_use_ = NULL;
345   current_interval_ = NULL;
346 
347   // Link the new live range in the chain before any of the other
348   // ranges linked from the range before the split.
349   result->parent_ = (parent_ == NULL) ? this : parent_;
350   result->next_ = next_;
351   next_ = result;
352 
353 #ifdef DEBUG
354   Verify();
355   result->Verify();
356 #endif
357 }
358 
359 
360 // This implements an ordering on live ranges so that they are ordered by their
361 // start positions.  This is needed for the correctness of the register
362 // allocation algorithm.  If two live ranges start at the same offset then there
363 // is a tie breaker based on where the value is first used.  This part of the
364 // ordering is merely a heuristic.
ShouldBeAllocatedBefore(const LiveRange * other) const365 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
366   LifetimePosition start = Start();
367   LifetimePosition other_start = other->Start();
368   if (start.Value() == other_start.Value()) {
369     UsePosition* pos = FirstPosWithHint();
370     if (pos == NULL) return false;
371     UsePosition* other_pos = other->first_pos();
372     if (other_pos == NULL) return true;
373     return pos->pos().Value() < other_pos->pos().Value();
374   }
375   return start.Value() < other_start.Value();
376 }
377 
378 
ShortenTo(LifetimePosition start)379 void LiveRange::ShortenTo(LifetimePosition start) {
380   LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
381   ASSERT(first_interval_ != NULL);
382   ASSERT(first_interval_->start().Value() <= start.Value());
383   ASSERT(start.Value() < first_interval_->end().Value());
384   first_interval_->set_start(start);
385 }
386 
387 
EnsureInterval(LifetimePosition start,LifetimePosition end,Zone * zone)388 void LiveRange::EnsureInterval(LifetimePosition start,
389                                LifetimePosition end,
390                                Zone* zone) {
391   LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
392                          id_,
393                          start.Value(),
394                          end.Value());
395   LifetimePosition new_end = end;
396   while (first_interval_ != NULL &&
397          first_interval_->start().Value() <= end.Value()) {
398     if (first_interval_->end().Value() > end.Value()) {
399       new_end = first_interval_->end();
400     }
401     first_interval_ = first_interval_->next();
402   }
403 
404   UseInterval* new_interval = new(zone) UseInterval(start, new_end);
405   new_interval->next_ = first_interval_;
406   first_interval_ = new_interval;
407   if (new_interval->next() == NULL) {
408     last_interval_ = new_interval;
409   }
410 }
411 
412 
AddUseInterval(LifetimePosition start,LifetimePosition end,Zone * zone)413 void LiveRange::AddUseInterval(LifetimePosition start,
414                                LifetimePosition end,
415                                Zone* zone) {
416   LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
417                          id_,
418                          start.Value(),
419                          end.Value());
420   if (first_interval_ == NULL) {
421     UseInterval* interval = new(zone) UseInterval(start, end);
422     first_interval_ = interval;
423     last_interval_ = interval;
424   } else {
425     if (end.Value() == first_interval_->start().Value()) {
426       first_interval_->set_start(start);
427     } else if (end.Value() < first_interval_->start().Value()) {
428       UseInterval* interval = new(zone) UseInterval(start, end);
429       interval->set_next(first_interval_);
430       first_interval_ = interval;
431     } else {
432       // Order of instruction's processing (see ProcessInstructions) guarantees
433       // that each new use interval either precedes or intersects with
434       // last added interval.
435       ASSERT(start.Value() < first_interval_->end().Value());
436       first_interval_->start_ = Min(start, first_interval_->start_);
437       first_interval_->end_ = Max(end, first_interval_->end_);
438     }
439   }
440 }
441 
442 
AddUsePosition(LifetimePosition pos,LOperand * operand,Zone * zone)443 UsePosition* LiveRange::AddUsePosition(LifetimePosition pos,
444                                        LOperand* operand,
445                                        Zone* zone) {
446   LAllocator::TraceAlloc("Add to live range %d use position %d\n",
447                          id_,
448                          pos.Value());
449   UsePosition* use_pos = new(zone) UsePosition(pos, operand);
450   UsePosition* prev = NULL;
451   UsePosition* current = first_pos_;
452   while (current != NULL && current->pos().Value() < pos.Value()) {
453     prev = current;
454     current = current->next();
455   }
456 
457   if (prev == NULL) {
458     use_pos->set_next(first_pos_);
459     first_pos_ = use_pos;
460   } else {
461     use_pos->next_ = prev->next_;
462     prev->next_ = use_pos;
463   }
464 
465   return use_pos;
466 }
467 
468 
ConvertOperands(Zone * zone)469 void LiveRange::ConvertOperands(Zone* zone) {
470   LOperand* op = CreateAssignedOperand(zone);
471   UsePosition* use_pos = first_pos();
472   while (use_pos != NULL) {
473     ASSERT(Start().Value() <= use_pos->pos().Value() &&
474            use_pos->pos().Value() <= End().Value());
475 
476     if (use_pos->HasOperand()) {
477       ASSERT(op->IsRegister() || op->IsDoubleRegister() ||
478              !use_pos->RequiresRegister());
479       use_pos->operand()->ConvertTo(op->kind(), op->index());
480     }
481     use_pos = use_pos->next();
482   }
483 }
484 
485 
CanCover(LifetimePosition position) const486 bool LiveRange::CanCover(LifetimePosition position) const {
487   if (IsEmpty()) return false;
488   return Start().Value() <= position.Value() &&
489          position.Value() < End().Value();
490 }
491 
492 
Covers(LifetimePosition position)493 bool LiveRange::Covers(LifetimePosition position) {
494   if (!CanCover(position)) return false;
495   UseInterval* start_search = FirstSearchIntervalForPosition(position);
496   for (UseInterval* interval = start_search;
497        interval != NULL;
498        interval = interval->next()) {
499     ASSERT(interval->next() == NULL ||
500            interval->next()->start().Value() >= interval->start().Value());
501     AdvanceLastProcessedMarker(interval, position);
502     if (interval->Contains(position)) return true;
503     if (interval->start().Value() > position.Value()) return false;
504   }
505   return false;
506 }
507 
508 
FirstIntersection(LiveRange * other)509 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
510   UseInterval* b = other->first_interval();
511   if (b == NULL) return LifetimePosition::Invalid();
512   LifetimePosition advance_last_processed_up_to = b->start();
513   UseInterval* a = FirstSearchIntervalForPosition(b->start());
514   while (a != NULL && b != NULL) {
515     if (a->start().Value() > other->End().Value()) break;
516     if (b->start().Value() > End().Value()) break;
517     LifetimePosition cur_intersection = a->Intersect(b);
518     if (cur_intersection.IsValid()) {
519       return cur_intersection;
520     }
521     if (a->start().Value() < b->start().Value()) {
522       a = a->next();
523       if (a == NULL || a->start().Value() > other->End().Value()) break;
524       AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
525     } else {
526       b = b->next();
527     }
528   }
529   return LifetimePosition::Invalid();
530 }
531 
532 
LAllocator(int num_values,HGraph * graph)533 LAllocator::LAllocator(int num_values, HGraph* graph)
534     : zone_(graph->zone()),
535       chunk_(NULL),
536       live_in_sets_(graph->blocks()->length()),
537       live_ranges_(num_values * 2),
538       fixed_live_ranges_(NULL),
539       fixed_double_live_ranges_(NULL),
540       unhandled_live_ranges_(num_values * 2),
541       active_live_ranges_(8),
542       inactive_live_ranges_(8),
543       reusable_slots_(8),
544       next_virtual_register_(num_values),
545       first_artificial_register_(num_values),
546       mode_(GENERAL_REGISTERS),
547       num_registers_(-1),
548       graph_(graph),
549       has_osr_entry_(false),
550       allocation_ok_(true) { }
551 
552 
InitializeLivenessAnalysis()553 void LAllocator::InitializeLivenessAnalysis() {
554   // Initialize the live_in sets for each block to NULL.
555   int block_count = graph_->blocks()->length();
556   live_in_sets_.Initialize(block_count);
557   live_in_sets_.AddBlock(NULL, block_count);
558 }
559 
560 
ComputeLiveOut(HBasicBlock * block)561 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
562   // Compute live out for the given block, except not including backward
563   // successor edges.
564   BitVector* live_out = new(zone_) BitVector(next_virtual_register_, zone_);
565 
566   // Process all successor blocks.
567   for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
568     // Add values live on entry to the successor. Note the successor's
569     // live_in will not be computed yet for backwards edges.
570     HBasicBlock* successor = it.Current();
571     BitVector* live_in = live_in_sets_[successor->block_id()];
572     if (live_in != NULL) live_out->Union(*live_in);
573 
574     // All phi input operands corresponding to this successor edge are live
575     // out from this block.
576     int index = successor->PredecessorIndexOf(block);
577     const ZoneList<HPhi*>* phis = successor->phis();
578     for (int i = 0; i < phis->length(); ++i) {
579       HPhi* phi = phis->at(i);
580       if (!phi->OperandAt(index)->IsConstant()) {
581         live_out->Add(phi->OperandAt(index)->id());
582       }
583     }
584   }
585 
586   return live_out;
587 }
588 
589 
AddInitialIntervals(HBasicBlock * block,BitVector * live_out)590 void LAllocator::AddInitialIntervals(HBasicBlock* block,
591                                      BitVector* live_out) {
592   // Add an interval that includes the entire block to the live range for
593   // each live_out value.
594   LifetimePosition start = LifetimePosition::FromInstructionIndex(
595       block->first_instruction_index());
596   LifetimePosition end = LifetimePosition::FromInstructionIndex(
597       block->last_instruction_index()).NextInstruction();
598   BitVector::Iterator iterator(live_out);
599   while (!iterator.Done()) {
600     int operand_index = iterator.Current();
601     LiveRange* range = LiveRangeFor(operand_index);
602     range->AddUseInterval(start, end, zone_);
603     iterator.Advance();
604   }
605 }
606 
607 
FixedDoubleLiveRangeID(int index)608 int LAllocator::FixedDoubleLiveRangeID(int index) {
609   return -index - 1 - Register::kNumAllocatableRegisters;
610 }
611 
612 
AllocateFixed(LUnallocated * operand,int pos,bool is_tagged)613 LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
614                                     int pos,
615                                     bool is_tagged) {
616   TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
617   ASSERT(operand->HasFixedPolicy());
618   if (operand->policy() == LUnallocated::FIXED_SLOT) {
619     operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_index());
620   } else if (operand->policy() == LUnallocated::FIXED_REGISTER) {
621     int reg_index = operand->fixed_index();
622     operand->ConvertTo(LOperand::REGISTER, reg_index);
623   } else if (operand->policy() == LUnallocated::FIXED_DOUBLE_REGISTER) {
624     int reg_index = operand->fixed_index();
625     operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
626   } else {
627     UNREACHABLE();
628   }
629   if (is_tagged) {
630     TraceAlloc("Fixed reg is tagged at %d\n", pos);
631     LInstruction* instr = InstructionAt(pos);
632     if (instr->HasPointerMap()) {
633       instr->pointer_map()->RecordPointer(operand);
634     }
635   }
636   return operand;
637 }
638 
639 
FixedLiveRangeFor(int index)640 LiveRange* LAllocator::FixedLiveRangeFor(int index) {
641   ASSERT(index < Register::kNumAllocatableRegisters);
642   LiveRange* result = fixed_live_ranges_[index];
643   if (result == NULL) {
644     result = new(zone_) LiveRange(FixedLiveRangeID(index), zone_);
645     ASSERT(result->IsFixed());
646     result->set_assigned_register(index, GENERAL_REGISTERS, zone_);
647     fixed_live_ranges_[index] = result;
648   }
649   return result;
650 }
651 
652 
FixedDoubleLiveRangeFor(int index)653 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
654   ASSERT(index < DoubleRegister::kNumAllocatableRegisters);
655   LiveRange* result = fixed_double_live_ranges_[index];
656   if (result == NULL) {
657     result = new(zone_) LiveRange(FixedDoubleLiveRangeID(index), zone_);
658     ASSERT(result->IsFixed());
659     result->set_assigned_register(index, DOUBLE_REGISTERS, zone_);
660     fixed_double_live_ranges_[index] = result;
661   }
662   return result;
663 }
664 
665 
LiveRangeFor(int index)666 LiveRange* LAllocator::LiveRangeFor(int index) {
667   if (index >= live_ranges_.length()) {
668     live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1);
669   }
670   LiveRange* result = live_ranges_[index];
671   if (result == NULL) {
672     result = new(zone_) LiveRange(index, zone_);
673     live_ranges_[index] = result;
674   }
675   return result;
676 }
677 
678 
GetLastGap(HBasicBlock * block)679 LGap* LAllocator::GetLastGap(HBasicBlock* block) {
680   int last_instruction = block->last_instruction_index();
681   int index = chunk_->NearestGapPos(last_instruction);
682   return GapAt(index);
683 }
684 
685 
LookupPhi(LOperand * operand) const686 HPhi* LAllocator::LookupPhi(LOperand* operand) const {
687   if (!operand->IsUnallocated()) return NULL;
688   int index = LUnallocated::cast(operand)->virtual_register();
689   HValue* instr = graph_->LookupValue(index);
690   if (instr != NULL && instr->IsPhi()) {
691     return HPhi::cast(instr);
692   }
693   return NULL;
694 }
695 
696 
LiveRangeFor(LOperand * operand)697 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
698   if (operand->IsUnallocated()) {
699     return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
700   } else if (operand->IsRegister()) {
701     return FixedLiveRangeFor(operand->index());
702   } else if (operand->IsDoubleRegister()) {
703     return FixedDoubleLiveRangeFor(operand->index());
704   } else {
705     return NULL;
706   }
707 }
708 
709 
Define(LifetimePosition position,LOperand * operand,LOperand * hint)710 void LAllocator::Define(LifetimePosition position,
711                         LOperand* operand,
712                         LOperand* hint) {
713   LiveRange* range = LiveRangeFor(operand);
714   if (range == NULL) return;
715 
716   if (range->IsEmpty() || range->Start().Value() > position.Value()) {
717     // Can happen if there is a definition without use.
718     range->AddUseInterval(position, position.NextInstruction(), zone_);
719     range->AddUsePosition(position.NextInstruction(), NULL, zone_);
720   } else {
721     range->ShortenTo(position);
722   }
723 
724   if (operand->IsUnallocated()) {
725     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
726     range->AddUsePosition(position, unalloc_operand, zone_)->set_hint(hint);
727   }
728 }
729 
730 
Use(LifetimePosition block_start,LifetimePosition position,LOperand * operand,LOperand * hint)731 void LAllocator::Use(LifetimePosition block_start,
732                      LifetimePosition position,
733                      LOperand* operand,
734                      LOperand* hint) {
735   LiveRange* range = LiveRangeFor(operand);
736   if (range == NULL) return;
737   if (operand->IsUnallocated()) {
738     LUnallocated* unalloc_operand = LUnallocated::cast(operand);
739     range->AddUsePosition(position, unalloc_operand, zone_)->set_hint(hint);
740   }
741   range->AddUseInterval(block_start, position, zone_);
742 }
743 
744 
AddConstraintsGapMove(int index,LOperand * from,LOperand * to)745 void LAllocator::AddConstraintsGapMove(int index,
746                                        LOperand* from,
747                                        LOperand* to) {
748   LGap* gap = GapAt(index);
749   LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
750   if (from->IsUnallocated()) {
751     const ZoneList<LMoveOperands>* move_operands = move->move_operands();
752     for (int i = 0; i < move_operands->length(); ++i) {
753       LMoveOperands cur = move_operands->at(i);
754       LOperand* cur_to = cur.destination();
755       if (cur_to->IsUnallocated()) {
756         if (LUnallocated::cast(cur_to)->virtual_register() ==
757             LUnallocated::cast(from)->virtual_register()) {
758           move->AddMove(cur.source(), to);
759           return;
760         }
761       }
762     }
763   }
764   move->AddMove(from, to);
765 }
766 
767 
MeetRegisterConstraints(HBasicBlock * block)768 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
769   int start = block->first_instruction_index();
770   int end = block->last_instruction_index();
771   for (int i = start; i <= end; ++i) {
772     if (IsGapAt(i)) {
773       LInstruction* instr = NULL;
774       LInstruction* prev_instr = NULL;
775       if (i < end) instr = InstructionAt(i + 1);
776       if (i > start) prev_instr = InstructionAt(i - 1);
777       MeetConstraintsBetween(prev_instr, instr, i);
778       if (!AllocationOk()) return;
779     }
780   }
781 }
782 
783 
MeetConstraintsBetween(LInstruction * first,LInstruction * second,int gap_index)784 void LAllocator::MeetConstraintsBetween(LInstruction* first,
785                                         LInstruction* second,
786                                         int gap_index) {
787   // Handle fixed temporaries.
788   if (first != NULL) {
789     for (TempIterator it(first); !it.Done(); it.Advance()) {
790       LUnallocated* temp = LUnallocated::cast(it.Current());
791       if (temp->HasFixedPolicy()) {
792         AllocateFixed(temp, gap_index - 1, false);
793       }
794     }
795   }
796 
797   // Handle fixed output operand.
798   if (first != NULL && first->Output() != NULL) {
799     LUnallocated* first_output = LUnallocated::cast(first->Output());
800     LiveRange* range = LiveRangeFor(first_output->virtual_register());
801     bool assigned = false;
802     if (first_output->HasFixedPolicy()) {
803       LUnallocated* output_copy = first_output->CopyUnconstrained();
804       bool is_tagged = HasTaggedValue(first_output->virtual_register());
805       AllocateFixed(first_output, gap_index, is_tagged);
806 
807       // This value is produced on the stack, we never need to spill it.
808       if (first_output->IsStackSlot()) {
809         range->SetSpillOperand(first_output);
810         range->SetSpillStartIndex(gap_index - 1);
811         assigned = true;
812       }
813       chunk_->AddGapMove(gap_index, first_output, output_copy);
814     }
815 
816     if (!assigned) {
817       range->SetSpillStartIndex(gap_index);
818 
819       // This move to spill operand is not a real use. Liveness analysis
820       // and splitting of live ranges do not account for it.
821       // Thus it should be inserted to a lifetime position corresponding to
822       // the instruction end.
823       LGap* gap = GapAt(gap_index);
824       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE);
825       move->AddMove(first_output, range->GetSpillOperand());
826     }
827   }
828 
829   // Handle fixed input operands of second instruction.
830   if (second != NULL) {
831     for (UseIterator it(second); !it.Done(); it.Advance()) {
832       LUnallocated* cur_input = LUnallocated::cast(it.Current());
833       if (cur_input->HasFixedPolicy()) {
834         LUnallocated* input_copy = cur_input->CopyUnconstrained();
835         bool is_tagged = HasTaggedValue(cur_input->virtual_register());
836         AllocateFixed(cur_input, gap_index + 1, is_tagged);
837         AddConstraintsGapMove(gap_index, input_copy, cur_input);
838       } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) {
839         // The live range of writable input registers always goes until the end
840         // of the instruction.
841         ASSERT(!cur_input->IsUsedAtStart());
842 
843         LUnallocated* input_copy = cur_input->CopyUnconstrained();
844         cur_input->set_virtual_register(GetVirtualRegister());
845         if (!AllocationOk()) return;
846 
847         if (RequiredRegisterKind(input_copy->virtual_register()) ==
848             DOUBLE_REGISTERS) {
849           double_artificial_registers_.Add(
850               cur_input->virtual_register() - first_artificial_register_,
851               zone_);
852         }
853 
854         AddConstraintsGapMove(gap_index, input_copy, cur_input);
855       }
856     }
857   }
858 
859   // Handle "output same as input" for second instruction.
860   if (second != NULL && second->Output() != NULL) {
861     LUnallocated* second_output = LUnallocated::cast(second->Output());
862     if (second_output->HasSameAsInputPolicy()) {
863       LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
864       int output_vreg = second_output->virtual_register();
865       int input_vreg = cur_input->virtual_register();
866 
867       LUnallocated* input_copy = cur_input->CopyUnconstrained();
868       cur_input->set_virtual_register(second_output->virtual_register());
869       AddConstraintsGapMove(gap_index, input_copy, cur_input);
870 
871       if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
872         int index = gap_index + 1;
873         LInstruction* instr = InstructionAt(index);
874         if (instr->HasPointerMap()) {
875           instr->pointer_map()->RecordPointer(input_copy);
876         }
877       } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
878         // The input is assumed to immediately have a tagged representation,
879         // before the pointer map can be used. I.e. the pointer map at the
880         // instruction will include the output operand (whose value at the
881         // beginning of the instruction is equal to the input operand). If
882         // this is not desired, then the pointer map at this instruction needs
883         // to be adjusted manually.
884       }
885     }
886   }
887 }
888 
889 
ProcessInstructions(HBasicBlock * block,BitVector * live)890 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
891   int block_start = block->first_instruction_index();
892   int index = block->last_instruction_index();
893 
894   LifetimePosition block_start_position =
895       LifetimePosition::FromInstructionIndex(block_start);
896 
897   while (index >= block_start) {
898     LifetimePosition curr_position =
899         LifetimePosition::FromInstructionIndex(index);
900 
901     if (IsGapAt(index)) {
902       // We have a gap at this position.
903       LGap* gap = GapAt(index);
904       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
905       const ZoneList<LMoveOperands>* move_operands = move->move_operands();
906       for (int i = 0; i < move_operands->length(); ++i) {
907         LMoveOperands* cur = &move_operands->at(i);
908         if (cur->IsIgnored()) continue;
909         LOperand* from = cur->source();
910         LOperand* to = cur->destination();
911         HPhi* phi = LookupPhi(to);
912         LOperand* hint = to;
913         if (phi != NULL) {
914           // This is a phi resolving move.
915           if (!phi->block()->IsLoopHeader()) {
916             hint = LiveRangeFor(phi->id())->FirstHint();
917           }
918         } else {
919           if (to->IsUnallocated()) {
920             if (live->Contains(LUnallocated::cast(to)->virtual_register())) {
921               Define(curr_position, to, from);
922               live->Remove(LUnallocated::cast(to)->virtual_register());
923             } else {
924               cur->Eliminate();
925               continue;
926             }
927           } else {
928             Define(curr_position, to, from);
929           }
930         }
931         Use(block_start_position, curr_position, from, hint);
932         if (from->IsUnallocated()) {
933           live->Add(LUnallocated::cast(from)->virtual_register());
934         }
935       }
936     } else {
937       ASSERT(!IsGapAt(index));
938       LInstruction* instr = InstructionAt(index);
939 
940       if (instr != NULL) {
941         LOperand* output = instr->Output();
942         if (output != NULL) {
943           if (output->IsUnallocated()) {
944             live->Remove(LUnallocated::cast(output)->virtual_register());
945           }
946           Define(curr_position, output, NULL);
947         }
948 
949         if (instr->IsMarkedAsCall()) {
950           for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
951             if (output == NULL || !output->IsRegister() ||
952                 output->index() != i) {
953               LiveRange* range = FixedLiveRangeFor(i);
954               range->AddUseInterval(curr_position,
955                                     curr_position.InstructionEnd(),
956                                     zone_);
957             }
958           }
959         }
960 
961         if (instr->IsMarkedAsCall() || instr->IsMarkedAsSaveDoubles()) {
962           for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
963             if (output == NULL || !output->IsDoubleRegister() ||
964                 output->index() != i) {
965               LiveRange* range = FixedDoubleLiveRangeFor(i);
966               range->AddUseInterval(curr_position,
967                                     curr_position.InstructionEnd(),
968                                     zone_);
969             }
970           }
971         }
972 
973         for (UseIterator it(instr); !it.Done(); it.Advance()) {
974           LOperand* input = it.Current();
975 
976           LifetimePosition use_pos;
977           if (input->IsUnallocated() &&
978               LUnallocated::cast(input)->IsUsedAtStart()) {
979             use_pos = curr_position;
980           } else {
981             use_pos = curr_position.InstructionEnd();
982           }
983 
984           Use(block_start_position, use_pos, input, NULL);
985           if (input->IsUnallocated()) {
986             live->Add(LUnallocated::cast(input)->virtual_register());
987           }
988         }
989 
990         for (TempIterator it(instr); !it.Done(); it.Advance()) {
991           LOperand* temp = it.Current();
992           if (instr->IsMarkedAsCall()) {
993             if (temp->IsRegister()) continue;
994             if (temp->IsUnallocated()) {
995               LUnallocated* temp_unalloc = LUnallocated::cast(temp);
996               if (temp_unalloc->HasFixedPolicy()) {
997                 continue;
998               }
999             }
1000           }
1001           Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
1002           Define(curr_position, temp, NULL);
1003         }
1004       }
1005     }
1006 
1007     index = index - 1;
1008   }
1009 }
1010 
1011 
ResolvePhis(HBasicBlock * block)1012 void LAllocator::ResolvePhis(HBasicBlock* block) {
1013   const ZoneList<HPhi*>* phis = block->phis();
1014   for (int i = 0; i < phis->length(); ++i) {
1015     HPhi* phi = phis->at(i);
1016     LUnallocated* phi_operand = new(zone_) LUnallocated(LUnallocated::NONE);
1017     phi_operand->set_virtual_register(phi->id());
1018     for (int j = 0; j < phi->OperandCount(); ++j) {
1019       HValue* op = phi->OperandAt(j);
1020       LOperand* operand = NULL;
1021       if (op->IsConstant() && op->EmitAtUses()) {
1022         HConstant* constant = HConstant::cast(op);
1023         operand = chunk_->DefineConstantOperand(constant);
1024       } else {
1025         ASSERT(!op->EmitAtUses());
1026         LUnallocated* unalloc = new(zone_) LUnallocated(LUnallocated::ANY);
1027         unalloc->set_virtual_register(op->id());
1028         operand = unalloc;
1029       }
1030       HBasicBlock* cur_block = block->predecessors()->at(j);
1031       // The gap move must be added without any special processing as in
1032       // the AddConstraintsGapMove.
1033       chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
1034                          operand,
1035                          phi_operand);
1036 
1037       // We are going to insert a move before the branch instruction.
1038       // Some branch instructions (e.g. loops' back edges)
1039       // can potentially cause a GC so they have a pointer map.
1040       // By inserting a move we essentially create a copy of a
1041       // value which is invisible to PopulatePointerMaps(), because we store
1042       // it into a location different from the operand of a live range
1043       // covering a branch instruction.
1044       // Thus we need to manually record a pointer.
1045       LInstruction* branch =
1046           InstructionAt(cur_block->last_instruction_index());
1047       if (branch->HasPointerMap()) {
1048         if (phi->representation().IsTagged()) {
1049           branch->pointer_map()->RecordPointer(phi_operand);
1050         } else if (!phi->representation().IsDouble()) {
1051           branch->pointer_map()->RecordUntagged(phi_operand);
1052         }
1053       }
1054     }
1055 
1056     LiveRange* live_range = LiveRangeFor(phi->id());
1057     LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1058     label->GetOrCreateParallelMove(LGap::START)->
1059         AddMove(phi_operand, live_range->GetSpillOperand());
1060     live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1061   }
1062 }
1063 
1064 
Allocate(LChunk * chunk)1065 bool LAllocator::Allocate(LChunk* chunk) {
1066   ASSERT(chunk_ == NULL);
1067   chunk_ = chunk;
1068   MeetRegisterConstraints();
1069   if (!AllocationOk()) return false;
1070   ResolvePhis();
1071   BuildLiveRanges();
1072   AllocateGeneralRegisters();
1073   if (!AllocationOk()) return false;
1074   AllocateDoubleRegisters();
1075   if (!AllocationOk()) return false;
1076   PopulatePointerMaps();
1077   if (has_osr_entry_) ProcessOsrEntry();
1078   ConnectRanges();
1079   ResolveControlFlow();
1080   return true;
1081 }
1082 
1083 
MeetRegisterConstraints()1084 void LAllocator::MeetRegisterConstraints() {
1085   HPhase phase("L_Register constraints", chunk_);
1086   first_artificial_register_ = next_virtual_register_;
1087   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1088   for (int i = 0; i < blocks->length(); ++i) {
1089     HBasicBlock* block = blocks->at(i);
1090     MeetRegisterConstraints(block);
1091     if (!AllocationOk()) return;
1092   }
1093 }
1094 
1095 
ResolvePhis()1096 void LAllocator::ResolvePhis() {
1097   HPhase phase("L_Resolve phis", chunk_);
1098 
1099   // Process the blocks in reverse order.
1100   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1101   for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1102     HBasicBlock* block = blocks->at(block_id);
1103     ResolvePhis(block);
1104   }
1105 }
1106 
1107 
ResolveControlFlow(LiveRange * range,HBasicBlock * block,HBasicBlock * pred)1108 void LAllocator::ResolveControlFlow(LiveRange* range,
1109                                     HBasicBlock* block,
1110                                     HBasicBlock* pred) {
1111   LifetimePosition pred_end =
1112       LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1113   LifetimePosition cur_start =
1114       LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1115   LiveRange* pred_cover = NULL;
1116   LiveRange* cur_cover = NULL;
1117   LiveRange* cur_range = range;
1118   while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1119     if (cur_range->CanCover(cur_start)) {
1120       ASSERT(cur_cover == NULL);
1121       cur_cover = cur_range;
1122     }
1123     if (cur_range->CanCover(pred_end)) {
1124       ASSERT(pred_cover == NULL);
1125       pred_cover = cur_range;
1126     }
1127     cur_range = cur_range->next();
1128   }
1129 
1130   if (cur_cover->IsSpilled()) return;
1131   ASSERT(pred_cover != NULL && cur_cover != NULL);
1132   if (pred_cover != cur_cover) {
1133     LOperand* pred_op = pred_cover->CreateAssignedOperand(zone_);
1134     LOperand* cur_op = cur_cover->CreateAssignedOperand(zone_);
1135     if (!pred_op->Equals(cur_op)) {
1136       LGap* gap = NULL;
1137       if (block->predecessors()->length() == 1) {
1138         gap = GapAt(block->first_instruction_index());
1139       } else {
1140         ASSERT(pred->end()->SecondSuccessor() == NULL);
1141         gap = GetLastGap(pred);
1142 
1143         // We are going to insert a move before the branch instruction.
1144         // Some branch instructions (e.g. loops' back edges)
1145         // can potentially cause a GC so they have a pointer map.
1146         // By inserting a move we essentially create a copy of a
1147         // value which is invisible to PopulatePointerMaps(), because we store
1148         // it into a location different from the operand of a live range
1149         // covering a branch instruction.
1150         // Thus we need to manually record a pointer.
1151         LInstruction* branch = InstructionAt(pred->last_instruction_index());
1152         if (branch->HasPointerMap()) {
1153           if (HasTaggedValue(range->id())) {
1154             branch->pointer_map()->RecordPointer(cur_op);
1155           } else if (!cur_op->IsDoubleStackSlot() &&
1156                      !cur_op->IsDoubleRegister()) {
1157             branch->pointer_map()->RemovePointer(cur_op);
1158           }
1159         }
1160       }
1161       gap->GetOrCreateParallelMove(LGap::START)->AddMove(pred_op, cur_op);
1162     }
1163   }
1164 }
1165 
1166 
GetConnectingParallelMove(LifetimePosition pos)1167 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1168   int index = pos.InstructionIndex();
1169   if (IsGapAt(index)) {
1170     LGap* gap = GapAt(index);
1171     return gap->GetOrCreateParallelMove(
1172         pos.IsInstructionStart() ? LGap::START : LGap::END);
1173   }
1174   int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1175   return GapAt(gap_pos)->GetOrCreateParallelMove(
1176       (gap_pos < index) ? LGap::AFTER : LGap::BEFORE);
1177 }
1178 
1179 
GetBlock(LifetimePosition pos)1180 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1181   LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1182   return gap->block();
1183 }
1184 
1185 
ConnectRanges()1186 void LAllocator::ConnectRanges() {
1187   HPhase phase("L_Connect ranges", this);
1188   for (int i = 0; i < live_ranges()->length(); ++i) {
1189     LiveRange* first_range = live_ranges()->at(i);
1190     if (first_range == NULL || first_range->parent() != NULL) continue;
1191 
1192     LiveRange* second_range = first_range->next();
1193     while (second_range != NULL) {
1194       LifetimePosition pos = second_range->Start();
1195 
1196       if (!second_range->IsSpilled()) {
1197         // Add gap move if the two live ranges touch and there is no block
1198         // boundary.
1199         if (first_range->End().Value() == pos.Value()) {
1200           bool should_insert = true;
1201           if (IsBlockBoundary(pos)) {
1202             should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1203           }
1204           if (should_insert) {
1205             LParallelMove* move = GetConnectingParallelMove(pos);
1206             LOperand* prev_operand = first_range->CreateAssignedOperand(zone_);
1207             LOperand* cur_operand = second_range->CreateAssignedOperand(zone_);
1208             move->AddMove(prev_operand, cur_operand);
1209           }
1210         }
1211       }
1212 
1213       first_range = second_range;
1214       second_range = second_range->next();
1215     }
1216   }
1217 }
1218 
1219 
CanEagerlyResolveControlFlow(HBasicBlock * block) const1220 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
1221   if (block->predecessors()->length() != 1) return false;
1222   return block->predecessors()->first()->block_id() == block->block_id() - 1;
1223 }
1224 
1225 
ResolveControlFlow()1226 void LAllocator::ResolveControlFlow() {
1227   HPhase phase("L_Resolve control flow", this);
1228   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1229   for (int block_id = 1; block_id < blocks->length(); ++block_id) {
1230     HBasicBlock* block = blocks->at(block_id);
1231     if (CanEagerlyResolveControlFlow(block)) continue;
1232     BitVector* live = live_in_sets_[block->block_id()];
1233     BitVector::Iterator iterator(live);
1234     while (!iterator.Done()) {
1235       int operand_index = iterator.Current();
1236       for (int i = 0; i < block->predecessors()->length(); ++i) {
1237         HBasicBlock* cur = block->predecessors()->at(i);
1238         LiveRange* cur_range = LiveRangeFor(operand_index);
1239         ResolveControlFlow(cur_range, block, cur);
1240       }
1241       iterator.Advance();
1242     }
1243   }
1244 }
1245 
1246 
BuildLiveRanges()1247 void LAllocator::BuildLiveRanges() {
1248   HPhase phase("L_Build live ranges", this);
1249   InitializeLivenessAnalysis();
1250   // Process the blocks in reverse order.
1251   const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1252   for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1253     HBasicBlock* block = blocks->at(block_id);
1254     BitVector* live = ComputeLiveOut(block);
1255     // Initially consider all live_out values live for the entire block. We
1256     // will shorten these intervals if necessary.
1257     AddInitialIntervals(block, live);
1258 
1259     // Process the instructions in reverse order, generating and killing
1260     // live values.
1261     ProcessInstructions(block, live);
1262     // All phi output operands are killed by this block.
1263     const ZoneList<HPhi*>* phis = block->phis();
1264     for (int i = 0; i < phis->length(); ++i) {
1265       // The live range interval already ends at the first instruction of the
1266       // block.
1267       HPhi* phi = phis->at(i);
1268       live->Remove(phi->id());
1269 
1270       LOperand* hint = NULL;
1271       LOperand* phi_operand = NULL;
1272       LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1273       LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
1274       for (int j = 0; j < move->move_operands()->length(); ++j) {
1275         LOperand* to = move->move_operands()->at(j).destination();
1276         if (to->IsUnallocated() &&
1277             LUnallocated::cast(to)->virtual_register() == phi->id()) {
1278           hint = move->move_operands()->at(j).source();
1279           phi_operand = to;
1280           break;
1281         }
1282       }
1283       ASSERT(hint != NULL);
1284 
1285       LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1286               block->first_instruction_index());
1287       Define(block_start, phi_operand, hint);
1288     }
1289 
1290     // Now live is live_in for this block except not including values live
1291     // out on backward successor edges.
1292     live_in_sets_[block_id] = live;
1293 
1294     // If this block is a loop header go back and patch up the necessary
1295     // predecessor blocks.
1296     if (block->IsLoopHeader()) {
1297       // TODO(kmillikin): Need to be able to get the last block of the loop
1298       // in the loop information. Add a live range stretching from the first
1299       // loop instruction to the last for each value live on entry to the
1300       // header.
1301       HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1302       BitVector::Iterator iterator(live);
1303       LifetimePosition start = LifetimePosition::FromInstructionIndex(
1304           block->first_instruction_index());
1305       LifetimePosition end = LifetimePosition::FromInstructionIndex(
1306           back_edge->last_instruction_index()).NextInstruction();
1307       while (!iterator.Done()) {
1308         int operand_index = iterator.Current();
1309         LiveRange* range = LiveRangeFor(operand_index);
1310         range->EnsureInterval(start, end, zone_);
1311         iterator.Advance();
1312       }
1313 
1314       for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
1315         live_in_sets_[i]->Union(*live);
1316       }
1317     }
1318 
1319 #ifdef DEBUG
1320     if (block_id == 0) {
1321       BitVector::Iterator iterator(live);
1322       bool found = false;
1323       while (!iterator.Done()) {
1324         found = true;
1325         int operand_index = iterator.Current();
1326         PrintF("Function: %s\n",
1327                *chunk_->info()->function()->debug_name()->ToCString());
1328         PrintF("Value %d used before first definition!\n", operand_index);
1329         LiveRange* range = LiveRangeFor(operand_index);
1330         PrintF("First use is at %d\n", range->first_pos()->pos().Value());
1331         iterator.Advance();
1332       }
1333       ASSERT(!found);
1334     }
1335 #endif
1336   }
1337 }
1338 
1339 
SafePointsAreInOrder() const1340 bool LAllocator::SafePointsAreInOrder() const {
1341   const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1342   int safe_point = 0;
1343   for (int i = 0; i < pointer_maps->length(); ++i) {
1344     LPointerMap* map = pointer_maps->at(i);
1345     if (safe_point > map->lithium_position()) return false;
1346     safe_point = map->lithium_position();
1347   }
1348   return true;
1349 }
1350 
1351 
PopulatePointerMaps()1352 void LAllocator::PopulatePointerMaps() {
1353   HPhase phase("L_Populate pointer maps", this);
1354   const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1355 
1356   ASSERT(SafePointsAreInOrder());
1357 
1358   // Iterate over all safe point positions and record a pointer
1359   // for all spilled live ranges at this point.
1360   int first_safe_point_index = 0;
1361   int last_range_start = 0;
1362   for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1363     LiveRange* range = live_ranges()->at(range_idx);
1364     if (range == NULL) continue;
1365     // Iterate over the first parts of multi-part live ranges.
1366     if (range->parent() != NULL) continue;
1367     // Skip non-pointer values.
1368     if (!HasTaggedValue(range->id())) continue;
1369     // Skip empty live ranges.
1370     if (range->IsEmpty()) continue;
1371 
1372     // Find the extent of the range and its children.
1373     int start = range->Start().InstructionIndex();
1374     int end = 0;
1375     for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
1376       LifetimePosition this_end = cur->End();
1377       if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1378       ASSERT(cur->Start().InstructionIndex() >= start);
1379     }
1380 
1381     // Most of the ranges are in order, but not all.  Keep an eye on when
1382     // they step backwards and reset the first_safe_point_index so we don't
1383     // miss any safe points.
1384     if (start < last_range_start) {
1385       first_safe_point_index = 0;
1386     }
1387     last_range_start = start;
1388 
1389     // Step across all the safe points that are before the start of this range,
1390     // recording how far we step in order to save doing this for the next range.
1391     while (first_safe_point_index < pointer_maps->length()) {
1392       LPointerMap* map = pointer_maps->at(first_safe_point_index);
1393       int safe_point = map->lithium_position();
1394       if (safe_point >= start) break;
1395       first_safe_point_index++;
1396     }
1397 
1398     // Step through the safe points to see whether they are in the range.
1399     for (int safe_point_index = first_safe_point_index;
1400          safe_point_index < pointer_maps->length();
1401          ++safe_point_index) {
1402       LPointerMap* map = pointer_maps->at(safe_point_index);
1403       int safe_point = map->lithium_position();
1404 
1405       // The safe points are sorted so we can stop searching here.
1406       if (safe_point - 1 > end) break;
1407 
1408       // Advance to the next active range that covers the current
1409       // safe point position.
1410       LifetimePosition safe_point_pos =
1411           LifetimePosition::FromInstructionIndex(safe_point);
1412       LiveRange* cur = range;
1413       while (cur != NULL && !cur->Covers(safe_point_pos.PrevInstruction())) {
1414         cur = cur->next();
1415       }
1416       if (cur == NULL) continue;
1417 
1418       // Check if the live range is spilled and the safe point is after
1419       // the spill position.
1420       if (range->HasAllocatedSpillOperand() &&
1421           safe_point >= range->spill_start_index()) {
1422         TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1423                    range->id(), range->spill_start_index(), safe_point);
1424         map->RecordPointer(range->GetSpillOperand());
1425       }
1426 
1427       if (!cur->IsSpilled()) {
1428         TraceAlloc("Pointer in register for range %d (start at %d) "
1429                    "at safe point %d\n",
1430                    cur->id(), cur->Start().Value(), safe_point);
1431         LOperand* operand = cur->CreateAssignedOperand(zone_);
1432         ASSERT(!operand->IsStackSlot());
1433         map->RecordPointer(operand);
1434       }
1435     }
1436   }
1437 }
1438 
1439 
ProcessOsrEntry()1440 void LAllocator::ProcessOsrEntry() {
1441   const ZoneList<LInstruction*>* instrs = chunk_->instructions();
1442 
1443   // Linear search for the OSR entry instruction in the chunk.
1444   int index = -1;
1445   while (++index < instrs->length() &&
1446          !instrs->at(index)->IsOsrEntry()) {
1447   }
1448   ASSERT(index < instrs->length());
1449   LOsrEntry* instruction = LOsrEntry::cast(instrs->at(index));
1450 
1451   LifetimePosition position = LifetimePosition::FromInstructionIndex(index);
1452   for (int i = 0; i < live_ranges()->length(); ++i) {
1453     LiveRange* range = live_ranges()->at(i);
1454     if (range != NULL) {
1455       if (range->Covers(position) &&
1456           range->HasRegisterAssigned() &&
1457           range->TopLevel()->HasAllocatedSpillOperand()) {
1458         int reg_index = range->assigned_register();
1459         LOperand* spill_operand = range->TopLevel()->GetSpillOperand();
1460         if (range->IsDouble()) {
1461           instruction->MarkSpilledDoubleRegister(reg_index, spill_operand);
1462         } else {
1463           instruction->MarkSpilledRegister(reg_index, spill_operand);
1464         }
1465       }
1466     }
1467   }
1468 }
1469 
1470 
AllocateGeneralRegisters()1471 void LAllocator::AllocateGeneralRegisters() {
1472   HPhase phase("L_Allocate general registers", this);
1473   num_registers_ = Register::kNumAllocatableRegisters;
1474   AllocateRegisters();
1475 }
1476 
1477 
AllocateDoubleRegisters()1478 void LAllocator::AllocateDoubleRegisters() {
1479   HPhase phase("L_Allocate double registers", this);
1480   num_registers_ = DoubleRegister::kNumAllocatableRegisters;
1481   mode_ = DOUBLE_REGISTERS;
1482   AllocateRegisters();
1483 }
1484 
1485 
AllocateRegisters()1486 void LAllocator::AllocateRegisters() {
1487   ASSERT(unhandled_live_ranges_.is_empty());
1488 
1489   for (int i = 0; i < live_ranges_.length(); ++i) {
1490     if (live_ranges_[i] != NULL) {
1491       if (RequiredRegisterKind(live_ranges_[i]->id()) == mode_) {
1492         AddToUnhandledUnsorted(live_ranges_[i]);
1493       }
1494     }
1495   }
1496   SortUnhandled();
1497   ASSERT(UnhandledIsSorted());
1498 
1499   ASSERT(reusable_slots_.is_empty());
1500   ASSERT(active_live_ranges_.is_empty());
1501   ASSERT(inactive_live_ranges_.is_empty());
1502 
1503   if (mode_ == DOUBLE_REGISTERS) {
1504     for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) {
1505       LiveRange* current = fixed_double_live_ranges_.at(i);
1506       if (current != NULL) {
1507         AddToInactive(current);
1508       }
1509     }
1510   } else {
1511     for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
1512       LiveRange* current = fixed_live_ranges_.at(i);
1513       if (current != NULL) {
1514         AddToInactive(current);
1515       }
1516     }
1517   }
1518 
1519   while (!unhandled_live_ranges_.is_empty()) {
1520     ASSERT(UnhandledIsSorted());
1521     LiveRange* current = unhandled_live_ranges_.RemoveLast();
1522     ASSERT(UnhandledIsSorted());
1523     LifetimePosition position = current->Start();
1524     TraceAlloc("Processing interval %d start=%d\n",
1525                current->id(),
1526                position.Value());
1527 
1528     if (current->HasAllocatedSpillOperand()) {
1529       TraceAlloc("Live range %d already has a spill operand\n", current->id());
1530       LifetimePosition next_pos = position;
1531       if (IsGapAt(next_pos.InstructionIndex())) {
1532         next_pos = next_pos.NextInstruction();
1533       }
1534       UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1535       // If the range already has a spill operand and it doesn't need a
1536       // register immediately, split it and spill the first part of the range.
1537       if (pos == NULL) {
1538         Spill(current);
1539         continue;
1540       } else if (pos->pos().Value() >
1541                  current->Start().NextInstruction().Value()) {
1542         // Do not spill live range eagerly if use position that can benefit from
1543         // the register is too close to the start of live range.
1544         SpillBetween(current, current->Start(), pos->pos());
1545         if (!AllocationOk()) return;
1546         ASSERT(UnhandledIsSorted());
1547         continue;
1548       }
1549     }
1550 
1551     for (int i = 0; i < active_live_ranges_.length(); ++i) {
1552       LiveRange* cur_active = active_live_ranges_.at(i);
1553       if (cur_active->End().Value() <= position.Value()) {
1554         ActiveToHandled(cur_active);
1555         --i;  // The live range was removed from the list of active live ranges.
1556       } else if (!cur_active->Covers(position)) {
1557         ActiveToInactive(cur_active);
1558         --i;  // The live range was removed from the list of active live ranges.
1559       }
1560     }
1561 
1562     for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1563       LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1564       if (cur_inactive->End().Value() <= position.Value()) {
1565         InactiveToHandled(cur_inactive);
1566         --i;  // Live range was removed from the list of inactive live ranges.
1567       } else if (cur_inactive->Covers(position)) {
1568         InactiveToActive(cur_inactive);
1569         --i;  // Live range was removed from the list of inactive live ranges.
1570       }
1571     }
1572 
1573     ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled());
1574 
1575     bool result = TryAllocateFreeReg(current);
1576     if (!AllocationOk()) return;
1577 
1578     if (!result) AllocateBlockedReg(current);
1579     if (!AllocationOk()) return;
1580 
1581     if (current->HasRegisterAssigned()) {
1582       AddToActive(current);
1583     }
1584   }
1585 
1586   reusable_slots_.Rewind(0);
1587   active_live_ranges_.Rewind(0);
1588   inactive_live_ranges_.Rewind(0);
1589 }
1590 
1591 
RegisterName(int allocation_index)1592 const char* LAllocator::RegisterName(int allocation_index) {
1593   if (mode_ == GENERAL_REGISTERS) {
1594     return Register::AllocationIndexToString(allocation_index);
1595   } else {
1596     return DoubleRegister::AllocationIndexToString(allocation_index);
1597   }
1598 }
1599 
1600 
TraceAlloc(const char * msg,...)1601 void LAllocator::TraceAlloc(const char* msg, ...) {
1602   if (FLAG_trace_alloc) {
1603     va_list arguments;
1604     va_start(arguments, msg);
1605     OS::VPrint(msg, arguments);
1606     va_end(arguments);
1607   }
1608 }
1609 
1610 
HasTaggedValue(int virtual_register) const1611 bool LAllocator::HasTaggedValue(int virtual_register) const {
1612   HValue* value = graph_->LookupValue(virtual_register);
1613   if (value == NULL) return false;
1614   return value->representation().IsTagged();
1615 }
1616 
1617 
RequiredRegisterKind(int virtual_register) const1618 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
1619   if (virtual_register < first_artificial_register_) {
1620     HValue* value = graph_->LookupValue(virtual_register);
1621     if (value != NULL && value->representation().IsDouble()) {
1622       return DOUBLE_REGISTERS;
1623     }
1624   } else if (double_artificial_registers_.Contains(
1625       virtual_register - first_artificial_register_)) {
1626     return DOUBLE_REGISTERS;
1627   }
1628 
1629   return GENERAL_REGISTERS;
1630 }
1631 
1632 
AddToActive(LiveRange * range)1633 void LAllocator::AddToActive(LiveRange* range) {
1634   TraceAlloc("Add live range %d to active\n", range->id());
1635   active_live_ranges_.Add(range);
1636 }
1637 
1638 
AddToInactive(LiveRange * range)1639 void LAllocator::AddToInactive(LiveRange* range) {
1640   TraceAlloc("Add live range %d to inactive\n", range->id());
1641   inactive_live_ranges_.Add(range);
1642 }
1643 
1644 
AddToUnhandledSorted(LiveRange * range)1645 void LAllocator::AddToUnhandledSorted(LiveRange* range) {
1646   if (range == NULL || range->IsEmpty()) return;
1647   ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1648   for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1649     LiveRange* cur_range = unhandled_live_ranges_.at(i);
1650     if (range->ShouldBeAllocatedBefore(cur_range)) {
1651       TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1652       unhandled_live_ranges_.InsertAt(i + 1, range);
1653       ASSERT(UnhandledIsSorted());
1654       return;
1655     }
1656   }
1657   TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1658   unhandled_live_ranges_.InsertAt(0, range);
1659   ASSERT(UnhandledIsSorted());
1660 }
1661 
1662 
AddToUnhandledUnsorted(LiveRange * range)1663 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1664   if (range == NULL || range->IsEmpty()) return;
1665   ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1666   TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1667   unhandled_live_ranges_.Add(range);
1668 }
1669 
1670 
UnhandledSortHelper(LiveRange * const * a,LiveRange * const * b)1671 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
1672   ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
1673          !(*b)->ShouldBeAllocatedBefore(*a));
1674   if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
1675   if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
1676   return (*a)->id() - (*b)->id();
1677 }
1678 
1679 
1680 // Sort the unhandled live ranges so that the ranges to be processed first are
1681 // at the end of the array list.  This is convenient for the register allocation
1682 // algorithm because it is efficient to remove elements from the end.
SortUnhandled()1683 void LAllocator::SortUnhandled() {
1684   TraceAlloc("Sort unhandled\n");
1685   unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1686 }
1687 
1688 
UnhandledIsSorted()1689 bool LAllocator::UnhandledIsSorted() {
1690   int len = unhandled_live_ranges_.length();
1691   for (int i = 1; i < len; i++) {
1692     LiveRange* a = unhandled_live_ranges_.at(i - 1);
1693     LiveRange* b = unhandled_live_ranges_.at(i);
1694     if (a->Start().Value() < b->Start().Value()) return false;
1695   }
1696   return true;
1697 }
1698 
1699 
FreeSpillSlot(LiveRange * range)1700 void LAllocator::FreeSpillSlot(LiveRange* range) {
1701   // Check that we are the last range.
1702   if (range->next() != NULL) return;
1703 
1704   if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
1705 
1706   int index = range->TopLevel()->GetSpillOperand()->index();
1707   if (index >= 0) {
1708     reusable_slots_.Add(range);
1709   }
1710 }
1711 
1712 
TryReuseSpillSlot(LiveRange * range)1713 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
1714   if (reusable_slots_.is_empty()) return NULL;
1715   if (reusable_slots_.first()->End().Value() >
1716       range->TopLevel()->Start().Value()) {
1717     return NULL;
1718   }
1719   LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
1720   reusable_slots_.Remove(0);
1721   return result;
1722 }
1723 
1724 
ActiveToHandled(LiveRange * range)1725 void LAllocator::ActiveToHandled(LiveRange* range) {
1726   ASSERT(active_live_ranges_.Contains(range));
1727   active_live_ranges_.RemoveElement(range);
1728   TraceAlloc("Moving live range %d from active to handled\n", range->id());
1729   FreeSpillSlot(range);
1730 }
1731 
1732 
ActiveToInactive(LiveRange * range)1733 void LAllocator::ActiveToInactive(LiveRange* range) {
1734   ASSERT(active_live_ranges_.Contains(range));
1735   active_live_ranges_.RemoveElement(range);
1736   inactive_live_ranges_.Add(range);
1737   TraceAlloc("Moving live range %d from active to inactive\n", range->id());
1738 }
1739 
1740 
InactiveToHandled(LiveRange * range)1741 void LAllocator::InactiveToHandled(LiveRange* range) {
1742   ASSERT(inactive_live_ranges_.Contains(range));
1743   inactive_live_ranges_.RemoveElement(range);
1744   TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
1745   FreeSpillSlot(range);
1746 }
1747 
1748 
InactiveToActive(LiveRange * range)1749 void LAllocator::InactiveToActive(LiveRange* range) {
1750   ASSERT(inactive_live_ranges_.Contains(range));
1751   inactive_live_ranges_.RemoveElement(range);
1752   active_live_ranges_.Add(range);
1753   TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1754 }
1755 
1756 
1757 // TryAllocateFreeReg and AllocateBlockedReg assume this
1758 // when allocating local arrays.
1759 STATIC_ASSERT(DoubleRegister::kNumAllocatableRegisters >=
1760               Register::kNumAllocatableRegisters);
1761 
1762 
TryAllocateFreeReg(LiveRange * current)1763 bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1764   LifetimePosition free_until_pos[DoubleRegister::kNumAllocatableRegisters];
1765 
1766   for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1767     free_until_pos[i] = LifetimePosition::MaxPosition();
1768   }
1769 
1770   for (int i = 0; i < active_live_ranges_.length(); ++i) {
1771     LiveRange* cur_active = active_live_ranges_.at(i);
1772     free_until_pos[cur_active->assigned_register()] =
1773         LifetimePosition::FromInstructionIndex(0);
1774   }
1775 
1776   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1777     LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1778     ASSERT(cur_inactive->End().Value() > current->Start().Value());
1779     LifetimePosition next_intersection =
1780         cur_inactive->FirstIntersection(current);
1781     if (!next_intersection.IsValid()) continue;
1782     int cur_reg = cur_inactive->assigned_register();
1783     free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1784   }
1785 
1786   UsePosition* hinted_use = current->FirstPosWithHint();
1787   if (hinted_use != NULL) {
1788     LOperand* hint = hinted_use->hint();
1789     if (hint->IsRegister() || hint->IsDoubleRegister()) {
1790       int register_index = hint->index();
1791       TraceAlloc(
1792           "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1793           RegisterName(register_index),
1794           free_until_pos[register_index].Value(),
1795           current->id(),
1796           current->End().Value());
1797 
1798       // The desired register is free until the end of the current live range.
1799       if (free_until_pos[register_index].Value() >= current->End().Value()) {
1800         TraceAlloc("Assigning preferred reg %s to live range %d\n",
1801                    RegisterName(register_index),
1802                    current->id());
1803         current->set_assigned_register(register_index, mode_, zone_);
1804         return true;
1805       }
1806     }
1807   }
1808 
1809   // Find the register which stays free for the longest time.
1810   int reg = 0;
1811   for (int i = 1; i < RegisterCount(); ++i) {
1812     if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1813       reg = i;
1814     }
1815   }
1816 
1817   LifetimePosition pos = free_until_pos[reg];
1818 
1819   if (pos.Value() <= current->Start().Value()) {
1820     // All registers are blocked.
1821     return false;
1822   }
1823 
1824   if (pos.Value() < current->End().Value()) {
1825     // Register reg is available at the range start but becomes blocked before
1826     // the range end. Split current at position where it becomes blocked.
1827     LiveRange* tail = SplitRangeAt(current, pos);
1828     if (!AllocationOk()) return false;
1829     AddToUnhandledSorted(tail);
1830   }
1831 
1832 
1833   // Register reg is available at the range start and is free until
1834   // the range end.
1835   ASSERT(pos.Value() >= current->End().Value());
1836   TraceAlloc("Assigning free reg %s to live range %d\n",
1837              RegisterName(reg),
1838              current->id());
1839   current->set_assigned_register(reg, mode_, zone_);
1840 
1841   return true;
1842 }
1843 
1844 
AllocateBlockedReg(LiveRange * current)1845 void LAllocator::AllocateBlockedReg(LiveRange* current) {
1846   UsePosition* register_use = current->NextRegisterPosition(current->Start());
1847   if (register_use == NULL) {
1848     // There is no use in the current live range that requires a register.
1849     // We can just spill it.
1850     Spill(current);
1851     return;
1852   }
1853 
1854 
1855   LifetimePosition use_pos[DoubleRegister::kNumAllocatableRegisters];
1856   LifetimePosition block_pos[DoubleRegister::kNumAllocatableRegisters];
1857 
1858   for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1859     use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1860   }
1861 
1862   for (int i = 0; i < active_live_ranges_.length(); ++i) {
1863     LiveRange* range = active_live_ranges_[i];
1864     int cur_reg = range->assigned_register();
1865     if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1866       block_pos[cur_reg] = use_pos[cur_reg] =
1867           LifetimePosition::FromInstructionIndex(0);
1868     } else {
1869       UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
1870           current->Start());
1871       if (next_use == NULL) {
1872         use_pos[cur_reg] = range->End();
1873       } else {
1874         use_pos[cur_reg] = next_use->pos();
1875       }
1876     }
1877   }
1878 
1879   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1880     LiveRange* range = inactive_live_ranges_.at(i);
1881     ASSERT(range->End().Value() > current->Start().Value());
1882     LifetimePosition next_intersection = range->FirstIntersection(current);
1883     if (!next_intersection.IsValid()) continue;
1884     int cur_reg = range->assigned_register();
1885     if (range->IsFixed()) {
1886       block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1887       use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1888     } else {
1889       use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1890     }
1891   }
1892 
1893   int reg = 0;
1894   for (int i = 1; i < RegisterCount(); ++i) {
1895     if (use_pos[i].Value() > use_pos[reg].Value()) {
1896       reg = i;
1897     }
1898   }
1899 
1900   LifetimePosition pos = use_pos[reg];
1901 
1902   if (pos.Value() < register_use->pos().Value()) {
1903     // All registers are blocked before the first use that requires a register.
1904     // Spill starting part of live range up to that use.
1905     //
1906     // Corner case: the first use position is equal to the start of the range.
1907     // In this case we have nothing to spill and SpillBetween will just return
1908     // this range to the list of unhandled ones. This will lead to the infinite
1909     // loop.
1910     ASSERT(current->Start().Value() < register_use->pos().Value());
1911     SpillBetween(current, current->Start(), register_use->pos());
1912     return;
1913   }
1914 
1915   if (block_pos[reg].Value() < current->End().Value()) {
1916     // Register becomes blocked before the current range end. Split before that
1917     // position.
1918     LiveRange* tail = SplitBetween(current,
1919                                    current->Start(),
1920                                    block_pos[reg].InstructionStart());
1921     AddToUnhandledSorted(tail);
1922   }
1923 
1924   // Register reg is not blocked for the whole range.
1925   ASSERT(block_pos[reg].Value() >= current->End().Value());
1926   TraceAlloc("Assigning blocked reg %s to live range %d\n",
1927              RegisterName(reg),
1928              current->id());
1929   current->set_assigned_register(reg, mode_, zone_);
1930 
1931   // This register was not free. Thus we need to find and spill
1932   // parts of active and inactive live regions that use the same register
1933   // at the same lifetime positions as current.
1934   SplitAndSpillIntersecting(current);
1935 }
1936 
1937 
SplitAndSpillIntersecting(LiveRange * current)1938 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
1939   ASSERT(current->HasRegisterAssigned());
1940   int reg = current->assigned_register();
1941   LifetimePosition split_pos = current->Start();
1942   for (int i = 0; i < active_live_ranges_.length(); ++i) {
1943     LiveRange* range = active_live_ranges_[i];
1944     if (range->assigned_register() == reg) {
1945       UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1946       if (next_pos == NULL) {
1947         SpillAfter(range, split_pos);
1948       } else {
1949         SpillBetween(range, split_pos, next_pos->pos());
1950       }
1951       ActiveToHandled(range);
1952       --i;
1953     }
1954   }
1955 
1956   for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1957     LiveRange* range = inactive_live_ranges_[i];
1958     ASSERT(range->End().Value() > current->Start().Value());
1959     if (range->assigned_register() == reg && !range->IsFixed()) {
1960       LifetimePosition next_intersection = range->FirstIntersection(current);
1961       if (next_intersection.IsValid()) {
1962         UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1963         if (next_pos == NULL) {
1964           SpillAfter(range, split_pos);
1965         } else {
1966           next_intersection = Min(next_intersection, next_pos->pos());
1967           SpillBetween(range, split_pos, next_intersection);
1968         }
1969         InactiveToHandled(range);
1970         --i;
1971       }
1972     }
1973   }
1974 }
1975 
1976 
IsBlockBoundary(LifetimePosition pos)1977 bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
1978   return pos.IsInstructionStart() &&
1979       InstructionAt(pos.InstructionIndex())->IsLabel();
1980 }
1981 
1982 
SplitRangeAt(LiveRange * range,LifetimePosition pos)1983 LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) {
1984   ASSERT(!range->IsFixed());
1985   TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
1986 
1987   if (pos.Value() <= range->Start().Value()) return range;
1988 
1989   // We can't properly connect liveranges if split occured at the end
1990   // of control instruction.
1991   ASSERT(pos.IsInstructionStart() ||
1992          !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
1993 
1994   LiveRange* result = LiveRangeFor(GetVirtualRegister());
1995   if (!AllocationOk()) return NULL;
1996   range->SplitAt(pos, result, zone_);
1997   return result;
1998 }
1999 
2000 
SplitBetween(LiveRange * range,LifetimePosition start,LifetimePosition end)2001 LiveRange* LAllocator::SplitBetween(LiveRange* range,
2002                                     LifetimePosition start,
2003                                     LifetimePosition end) {
2004   ASSERT(!range->IsFixed());
2005   TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
2006              range->id(),
2007              start.Value(),
2008              end.Value());
2009 
2010   LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2011   ASSERT(split_pos.Value() >= start.Value());
2012   return SplitRangeAt(range, split_pos);
2013 }
2014 
2015 
FindOptimalSplitPos(LifetimePosition start,LifetimePosition end)2016 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
2017                                                  LifetimePosition end) {
2018   int start_instr = start.InstructionIndex();
2019   int end_instr = end.InstructionIndex();
2020   ASSERT(start_instr <= end_instr);
2021 
2022   // We have no choice
2023   if (start_instr == end_instr) return end;
2024 
2025   HBasicBlock* start_block = GetBlock(start);
2026   HBasicBlock* end_block = GetBlock(end);
2027 
2028   if (end_block == start_block) {
2029     // The interval is split in the same basic block. Split at the latest
2030     // possible position.
2031     return end;
2032   }
2033 
2034   HBasicBlock* block = end_block;
2035   // Find header of outermost loop.
2036   while (block->parent_loop_header() != NULL &&
2037       block->parent_loop_header()->block_id() > start_block->block_id()) {
2038     block = block->parent_loop_header();
2039   }
2040 
2041   // We did not find any suitable outer loop. Split at the latest possible
2042   // position unless end_block is a loop header itself.
2043   if (block == end_block && !end_block->IsLoopHeader()) return end;
2044 
2045   return LifetimePosition::FromInstructionIndex(
2046       block->first_instruction_index());
2047 }
2048 
2049 
SpillAfter(LiveRange * range,LifetimePosition pos)2050 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2051   LiveRange* second_part = SplitRangeAt(range, pos);
2052   if (!AllocationOk()) return;
2053   Spill(second_part);
2054 }
2055 
2056 
SpillBetween(LiveRange * range,LifetimePosition start,LifetimePosition end)2057 void LAllocator::SpillBetween(LiveRange* range,
2058                               LifetimePosition start,
2059                               LifetimePosition end) {
2060   ASSERT(start.Value() < end.Value());
2061   LiveRange* second_part = SplitRangeAt(range, start);
2062   if (!AllocationOk()) return;
2063 
2064   if (second_part->Start().Value() < end.Value()) {
2065     // The split result intersects with [start, end[.
2066     // Split it at position between ]start+1, end[, spill the middle part
2067     // and put the rest to unhandled.
2068     LiveRange* third_part = SplitBetween(
2069         second_part,
2070         second_part->Start().InstructionEnd(),
2071         end.PrevInstruction().InstructionEnd());
2072 
2073     ASSERT(third_part != second_part);
2074 
2075     Spill(second_part);
2076     AddToUnhandledSorted(third_part);
2077   } else {
2078     // The split result does not intersect with [start, end[.
2079     // Nothing to spill. Just put it to unhandled as whole.
2080     AddToUnhandledSorted(second_part);
2081   }
2082 }
2083 
2084 
Spill(LiveRange * range)2085 void LAllocator::Spill(LiveRange* range) {
2086   ASSERT(!range->IsSpilled());
2087   TraceAlloc("Spilling live range %d\n", range->id());
2088   LiveRange* first = range->TopLevel();
2089 
2090   if (!first->HasAllocatedSpillOperand()) {
2091     LOperand* op = TryReuseSpillSlot(range);
2092     if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS);
2093     first->SetSpillOperand(op);
2094   }
2095   range->MakeSpilled(zone_);
2096 }
2097 
2098 
RegisterCount() const2099 int LAllocator::RegisterCount() const {
2100   return num_registers_;
2101 }
2102 
2103 
2104 #ifdef DEBUG
2105 
2106 
Verify() const2107 void LAllocator::Verify() const {
2108   for (int i = 0; i < live_ranges()->length(); ++i) {
2109     LiveRange* current = live_ranges()->at(i);
2110     if (current != NULL) current->Verify();
2111   }
2112 }
2113 
2114 
2115 #endif
2116 
2117 
2118 } }  // namespace v8::internal
2119