1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29 #include "lithium-allocator-inl.h"
30
31 #include "hydrogen.h"
32 #include "string-stream.h"
33
34 #if V8_TARGET_ARCH_IA32
35 #include "ia32/lithium-ia32.h"
36 #elif V8_TARGET_ARCH_X64
37 #include "x64/lithium-x64.h"
38 #elif V8_TARGET_ARCH_ARM
39 #include "arm/lithium-arm.h"
40 #elif V8_TARGET_ARCH_MIPS
41 #include "mips/lithium-mips.h"
42 #else
43 #error "Unknown architecture."
44 #endif
45
46 namespace v8 {
47 namespace internal {
48
49
50 #define DEFINE_OPERAND_CACHE(name, type) \
51 name name::cache[name::kNumCachedOperands]; \
52 void name::SetupCache() { \
53 for (int i = 0; i < kNumCachedOperands; i++) { \
54 cache[i].ConvertTo(type, i); \
55 } \
56 } \
57 static bool name##_initialize() { \
58 name::SetupCache(); \
59 return true; \
60 } \
61 static bool name##_cache_initialized = name##_initialize();
62
DEFINE_OPERAND_CACHE(LConstantOperand,CONSTANT_OPERAND)63 DEFINE_OPERAND_CACHE(LConstantOperand, CONSTANT_OPERAND)
64 DEFINE_OPERAND_CACHE(LStackSlot, STACK_SLOT)
65 DEFINE_OPERAND_CACHE(LDoubleStackSlot, DOUBLE_STACK_SLOT)
66 DEFINE_OPERAND_CACHE(LRegister, REGISTER)
67 DEFINE_OPERAND_CACHE(LDoubleRegister, DOUBLE_REGISTER)
68
69 #undef DEFINE_OPERAND_CACHE
70
71
72 static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
73 return a.Value() < b.Value() ? a : b;
74 }
75
76
Max(LifetimePosition a,LifetimePosition b)77 static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
78 return a.Value() > b.Value() ? a : b;
79 }
80
81
UsePosition(LifetimePosition pos,LOperand * operand)82 UsePosition::UsePosition(LifetimePosition pos, LOperand* operand)
83 : operand_(operand),
84 hint_(NULL),
85 pos_(pos),
86 next_(NULL),
87 requires_reg_(false),
88 register_beneficial_(true) {
89 if (operand_ != NULL && operand_->IsUnallocated()) {
90 LUnallocated* unalloc = LUnallocated::cast(operand_);
91 requires_reg_ = unalloc->HasRegisterPolicy();
92 register_beneficial_ = !unalloc->HasAnyPolicy();
93 }
94 ASSERT(pos_.IsValid());
95 }
96
97
HasHint() const98 bool UsePosition::HasHint() const {
99 return hint_ != NULL && !hint_->IsUnallocated();
100 }
101
102
RequiresRegister() const103 bool UsePosition::RequiresRegister() const {
104 return requires_reg_;
105 }
106
107
RegisterIsBeneficial() const108 bool UsePosition::RegisterIsBeneficial() const {
109 return register_beneficial_;
110 }
111
112
SplitAt(LifetimePosition pos)113 void UseInterval::SplitAt(LifetimePosition pos) {
114 ASSERT(Contains(pos) && pos.Value() != start().Value());
115 UseInterval* after = new UseInterval(pos, end_);
116 after->next_ = next_;
117 next_ = after;
118 end_ = pos;
119 }
120
121
122 #ifdef DEBUG
123
124
Verify() const125 void LiveRange::Verify() const {
126 UsePosition* cur = first_pos_;
127 while (cur != NULL) {
128 ASSERT(Start().Value() <= cur->pos().Value() &&
129 cur->pos().Value() <= End().Value());
130 cur = cur->next();
131 }
132 }
133
134
HasOverlap(UseInterval * target) const135 bool LiveRange::HasOverlap(UseInterval* target) const {
136 UseInterval* current_interval = first_interval_;
137 while (current_interval != NULL) {
138 // Intervals overlap if the start of one is contained in the other.
139 if (current_interval->Contains(target->start()) ||
140 target->Contains(current_interval->start())) {
141 return true;
142 }
143 current_interval = current_interval->next();
144 }
145 return false;
146 }
147
148
149 #endif
150
151
LiveRange(int id)152 LiveRange::LiveRange(int id)
153 : id_(id),
154 spilled_(false),
155 assigned_register_(kInvalidAssignment),
156 assigned_register_kind_(NONE),
157 last_interval_(NULL),
158 first_interval_(NULL),
159 first_pos_(NULL),
160 parent_(NULL),
161 next_(NULL),
162 current_interval_(NULL),
163 last_processed_use_(NULL),
164 spill_start_index_(kMaxInt) {
165 spill_operand_ = new LUnallocated(LUnallocated::IGNORE);
166 }
167
168
set_assigned_register(int reg,RegisterKind register_kind)169 void LiveRange::set_assigned_register(int reg, RegisterKind register_kind) {
170 ASSERT(!HasRegisterAssigned() && !IsSpilled());
171 assigned_register_ = reg;
172 assigned_register_kind_ = register_kind;
173 ConvertOperands();
174 }
175
176
MakeSpilled()177 void LiveRange::MakeSpilled() {
178 ASSERT(!IsSpilled());
179 ASSERT(TopLevel()->HasAllocatedSpillOperand());
180 spilled_ = true;
181 assigned_register_ = kInvalidAssignment;
182 ConvertOperands();
183 }
184
185
HasAllocatedSpillOperand() const186 bool LiveRange::HasAllocatedSpillOperand() const {
187 return spill_operand_ != NULL && !spill_operand_->IsUnallocated();
188 }
189
190
SetSpillOperand(LOperand * operand)191 void LiveRange::SetSpillOperand(LOperand* operand) {
192 ASSERT(!operand->IsUnallocated());
193 ASSERT(spill_operand_ != NULL);
194 ASSERT(spill_operand_->IsUnallocated());
195 spill_operand_->ConvertTo(operand->kind(), operand->index());
196 }
197
198
NextUsePosition(LifetimePosition start)199 UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
200 UsePosition* use_pos = last_processed_use_;
201 if (use_pos == NULL) use_pos = first_pos();
202 while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
203 use_pos = use_pos->next();
204 }
205 last_processed_use_ = use_pos;
206 return use_pos;
207 }
208
209
NextUsePositionRegisterIsBeneficial(LifetimePosition start)210 UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
211 LifetimePosition start) {
212 UsePosition* pos = NextUsePosition(start);
213 while (pos != NULL && !pos->RegisterIsBeneficial()) {
214 pos = pos->next();
215 }
216 return pos;
217 }
218
219
NextRegisterPosition(LifetimePosition start)220 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
221 UsePosition* pos = NextUsePosition(start);
222 while (pos != NULL && !pos->RequiresRegister()) {
223 pos = pos->next();
224 }
225 return pos;
226 }
227
228
CanBeSpilled(LifetimePosition pos)229 bool LiveRange::CanBeSpilled(LifetimePosition pos) {
230 // TODO(kmillikin): Comment. Now.
231 if (pos.Value() <= Start().Value() && HasRegisterAssigned()) return false;
232
233 // We cannot spill a live range that has a use requiring a register
234 // at the current or the immediate next position.
235 UsePosition* use_pos = NextRegisterPosition(pos);
236 if (use_pos == NULL) return true;
237 return use_pos->pos().Value() > pos.NextInstruction().Value();
238 }
239
240
FirstPosWithHint() const241 UsePosition* LiveRange::FirstPosWithHint() const {
242 UsePosition* pos = first_pos_;
243 while (pos != NULL && !pos->HasHint()) pos = pos->next();
244 return pos;
245 }
246
247
CreateAssignedOperand()248 LOperand* LiveRange::CreateAssignedOperand() {
249 LOperand* op = NULL;
250 if (HasRegisterAssigned()) {
251 ASSERT(!IsSpilled());
252 if (IsDouble()) {
253 op = LDoubleRegister::Create(assigned_register());
254 } else {
255 op = LRegister::Create(assigned_register());
256 }
257 } else if (IsSpilled()) {
258 ASSERT(!HasRegisterAssigned());
259 op = TopLevel()->GetSpillOperand();
260 ASSERT(!op->IsUnallocated());
261 } else {
262 LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE);
263 unalloc->set_virtual_register(id_);
264 op = unalloc;
265 }
266 return op;
267 }
268
269
FirstSearchIntervalForPosition(LifetimePosition position) const270 UseInterval* LiveRange::FirstSearchIntervalForPosition(
271 LifetimePosition position) const {
272 if (current_interval_ == NULL) return first_interval_;
273 if (current_interval_->start().Value() > position.Value()) {
274 current_interval_ = NULL;
275 return first_interval_;
276 }
277 return current_interval_;
278 }
279
280
AdvanceLastProcessedMarker(UseInterval * to_start_of,LifetimePosition but_not_past) const281 void LiveRange::AdvanceLastProcessedMarker(
282 UseInterval* to_start_of, LifetimePosition but_not_past) const {
283 if (to_start_of == NULL) return;
284 if (to_start_of->start().Value() > but_not_past.Value()) return;
285 LifetimePosition start =
286 current_interval_ == NULL ? LifetimePosition::Invalid()
287 : current_interval_->start();
288 if (to_start_of->start().Value() > start.Value()) {
289 current_interval_ = to_start_of;
290 }
291 }
292
293
SplitAt(LifetimePosition position,LiveRange * result)294 void LiveRange::SplitAt(LifetimePosition position, LiveRange* result) {
295 ASSERT(Start().Value() < position.Value());
296 ASSERT(result->IsEmpty());
297 // Find the last interval that ends before the position. If the
298 // position is contained in one of the intervals in the chain, we
299 // split that interval and use the first part.
300 UseInterval* current = FirstSearchIntervalForPosition(position);
301
302 // If the split position coincides with the beginning of a use interval
303 // we need to split use positons in a special way.
304 bool split_at_start = false;
305
306 while (current != NULL) {
307 if (current->Contains(position)) {
308 current->SplitAt(position);
309 break;
310 }
311 UseInterval* next = current->next();
312 if (next->start().Value() >= position.Value()) {
313 split_at_start = (next->start().Value() == position.Value());
314 break;
315 }
316 current = next;
317 }
318
319 // Partition original use intervals to the two live ranges.
320 UseInterval* before = current;
321 UseInterval* after = before->next();
322 result->last_interval_ = (last_interval_ == before)
323 ? after // Only interval in the range after split.
324 : last_interval_; // Last interval of the original range.
325 result->first_interval_ = after;
326 last_interval_ = before;
327
328 // Find the last use position before the split and the first use
329 // position after it.
330 UsePosition* use_after = first_pos_;
331 UsePosition* use_before = NULL;
332 if (split_at_start) {
333 // The split position coincides with the beginning of a use interval (the
334 // end of a lifetime hole). Use at this position should be attributed to
335 // the split child because split child owns use interval covering it.
336 while (use_after != NULL && use_after->pos().Value() < position.Value()) {
337 use_before = use_after;
338 use_after = use_after->next();
339 }
340 } else {
341 while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
342 use_before = use_after;
343 use_after = use_after->next();
344 }
345 }
346
347 // Partition original use positions to the two live ranges.
348 if (use_before != NULL) {
349 use_before->next_ = NULL;
350 } else {
351 first_pos_ = NULL;
352 }
353 result->first_pos_ = use_after;
354
355 // Link the new live range in the chain before any of the other
356 // ranges linked from the range before the split.
357 result->parent_ = (parent_ == NULL) ? this : parent_;
358 result->next_ = next_;
359 next_ = result;
360
361 #ifdef DEBUG
362 Verify();
363 result->Verify();
364 #endif
365 }
366
367
368 // This implements an ordering on live ranges so that they are ordered by their
369 // start positions. This is needed for the correctness of the register
370 // allocation algorithm. If two live ranges start at the same offset then there
371 // is a tie breaker based on where the value is first used. This part of the
372 // ordering is merely a heuristic.
ShouldBeAllocatedBefore(const LiveRange * other) const373 bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
374 LifetimePosition start = Start();
375 LifetimePosition other_start = other->Start();
376 if (start.Value() == other_start.Value()) {
377 UsePosition* pos = FirstPosWithHint();
378 if (pos == NULL) return false;
379 UsePosition* other_pos = other->first_pos();
380 if (other_pos == NULL) return true;
381 return pos->pos().Value() < other_pos->pos().Value();
382 }
383 return start.Value() < other_start.Value();
384 }
385
386
ShortenTo(LifetimePosition start)387 void LiveRange::ShortenTo(LifetimePosition start) {
388 LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
389 ASSERT(first_interval_ != NULL);
390 ASSERT(first_interval_->start().Value() <= start.Value());
391 ASSERT(start.Value() < first_interval_->end().Value());
392 first_interval_->set_start(start);
393 }
394
395
EnsureInterval(LifetimePosition start,LifetimePosition end)396 void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end) {
397 LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
398 id_,
399 start.Value(),
400 end.Value());
401 LifetimePosition new_end = end;
402 while (first_interval_ != NULL &&
403 first_interval_->start().Value() <= end.Value()) {
404 if (first_interval_->end().Value() > end.Value()) {
405 new_end = first_interval_->end();
406 }
407 first_interval_ = first_interval_->next();
408 }
409
410 UseInterval* new_interval = new UseInterval(start, new_end);
411 new_interval->next_ = first_interval_;
412 first_interval_ = new_interval;
413 if (new_interval->next() == NULL) {
414 last_interval_ = new_interval;
415 }
416 }
417
418
AddUseInterval(LifetimePosition start,LifetimePosition end)419 void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end) {
420 LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
421 id_,
422 start.Value(),
423 end.Value());
424 if (first_interval_ == NULL) {
425 UseInterval* interval = new UseInterval(start, end);
426 first_interval_ = interval;
427 last_interval_ = interval;
428 } else {
429 if (end.Value() == first_interval_->start().Value()) {
430 first_interval_->set_start(start);
431 } else if (end.Value() < first_interval_->start().Value()) {
432 UseInterval* interval = new UseInterval(start, end);
433 interval->set_next(first_interval_);
434 first_interval_ = interval;
435 } else {
436 // Order of instruction's processing (see ProcessInstructions) guarantees
437 // that each new use interval either precedes or intersects with
438 // last added interval.
439 ASSERT(start.Value() < first_interval_->end().Value());
440 first_interval_->start_ = Min(start, first_interval_->start_);
441 first_interval_->end_ = Max(end, first_interval_->end_);
442 }
443 }
444 }
445
446
AddUsePosition(LifetimePosition pos,LOperand * operand)447 UsePosition* LiveRange::AddUsePosition(LifetimePosition pos,
448 LOperand* operand) {
449 LAllocator::TraceAlloc("Add to live range %d use position %d\n",
450 id_,
451 pos.Value());
452 UsePosition* use_pos = new UsePosition(pos, operand);
453 UsePosition* prev = NULL;
454 UsePosition* current = first_pos_;
455 while (current != NULL && current->pos().Value() < pos.Value()) {
456 prev = current;
457 current = current->next();
458 }
459
460 if (prev == NULL) {
461 use_pos->set_next(first_pos_);
462 first_pos_ = use_pos;
463 } else {
464 use_pos->next_ = prev->next_;
465 prev->next_ = use_pos;
466 }
467
468 return use_pos;
469 }
470
471
ConvertOperands()472 void LiveRange::ConvertOperands() {
473 LOperand* op = CreateAssignedOperand();
474 UsePosition* use_pos = first_pos();
475 while (use_pos != NULL) {
476 ASSERT(Start().Value() <= use_pos->pos().Value() &&
477 use_pos->pos().Value() <= End().Value());
478
479 if (use_pos->HasOperand()) {
480 ASSERT(op->IsRegister() || op->IsDoubleRegister() ||
481 !use_pos->RequiresRegister());
482 use_pos->operand()->ConvertTo(op->kind(), op->index());
483 }
484 use_pos = use_pos->next();
485 }
486 }
487
488
CanCover(LifetimePosition position) const489 bool LiveRange::CanCover(LifetimePosition position) const {
490 if (IsEmpty()) return false;
491 return Start().Value() <= position.Value() &&
492 position.Value() < End().Value();
493 }
494
495
Covers(LifetimePosition position)496 bool LiveRange::Covers(LifetimePosition position) {
497 if (!CanCover(position)) return false;
498 UseInterval* start_search = FirstSearchIntervalForPosition(position);
499 for (UseInterval* interval = start_search;
500 interval != NULL;
501 interval = interval->next()) {
502 ASSERT(interval->next() == NULL ||
503 interval->next()->start().Value() >= interval->start().Value());
504 AdvanceLastProcessedMarker(interval, position);
505 if (interval->Contains(position)) return true;
506 if (interval->start().Value() > position.Value()) return false;
507 }
508 return false;
509 }
510
511
FirstIntersection(LiveRange * other)512 LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
513 UseInterval* b = other->first_interval();
514 if (b == NULL) return LifetimePosition::Invalid();
515 LifetimePosition advance_last_processed_up_to = b->start();
516 UseInterval* a = FirstSearchIntervalForPosition(b->start());
517 while (a != NULL && b != NULL) {
518 if (a->start().Value() > other->End().Value()) break;
519 if (b->start().Value() > End().Value()) break;
520 LifetimePosition cur_intersection = a->Intersect(b);
521 if (cur_intersection.IsValid()) {
522 return cur_intersection;
523 }
524 if (a->start().Value() < b->start().Value()) {
525 a = a->next();
526 if (a == NULL || a->start().Value() > other->End().Value()) break;
527 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
528 } else {
529 b = b->next();
530 }
531 }
532 return LifetimePosition::Invalid();
533 }
534
535
LAllocator(int num_values,HGraph * graph)536 LAllocator::LAllocator(int num_values, HGraph* graph)
537 : chunk_(NULL),
538 live_in_sets_(graph->blocks()->length()),
539 live_ranges_(num_values * 2),
540 fixed_live_ranges_(NULL),
541 fixed_double_live_ranges_(NULL),
542 unhandled_live_ranges_(num_values * 2),
543 active_live_ranges_(8),
544 inactive_live_ranges_(8),
545 reusable_slots_(8),
546 next_virtual_register_(num_values),
547 first_artificial_register_(num_values),
548 mode_(NONE),
549 num_registers_(-1),
550 graph_(graph),
551 has_osr_entry_(false) {}
552
553
InitializeLivenessAnalysis()554 void LAllocator::InitializeLivenessAnalysis() {
555 // Initialize the live_in sets for each block to NULL.
556 int block_count = graph_->blocks()->length();
557 live_in_sets_.Initialize(block_count);
558 live_in_sets_.AddBlock(NULL, block_count);
559 }
560
561
ComputeLiveOut(HBasicBlock * block)562 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
563 // Compute live out for the given block, except not including backward
564 // successor edges.
565 BitVector* live_out = new BitVector(next_virtual_register_);
566
567 // Process all successor blocks.
568 HBasicBlock* successor = block->end()->FirstSuccessor();
569 while (successor != NULL) {
570 // Add values live on entry to the successor. Note the successor's
571 // live_in will not be computed yet for backwards edges.
572 BitVector* live_in = live_in_sets_[successor->block_id()];
573 if (live_in != NULL) live_out->Union(*live_in);
574
575 // All phi input operands corresponding to this successor edge are live
576 // out from this block.
577 int index = successor->PredecessorIndexOf(block);
578 const ZoneList<HPhi*>* phis = successor->phis();
579 for (int i = 0; i < phis->length(); ++i) {
580 HPhi* phi = phis->at(i);
581 if (!phi->OperandAt(index)->IsConstant()) {
582 live_out->Add(phi->OperandAt(index)->id());
583 }
584 }
585
586 // Check if we are done with second successor.
587 if (successor == block->end()->SecondSuccessor()) break;
588
589 successor = block->end()->SecondSuccessor();
590 }
591
592 return live_out;
593 }
594
595
AddInitialIntervals(HBasicBlock * block,BitVector * live_out)596 void LAllocator::AddInitialIntervals(HBasicBlock* block,
597 BitVector* live_out) {
598 // Add an interval that includes the entire block to the live range for
599 // each live_out value.
600 LifetimePosition start = LifetimePosition::FromInstructionIndex(
601 block->first_instruction_index());
602 LifetimePosition end = LifetimePosition::FromInstructionIndex(
603 block->last_instruction_index()).NextInstruction();
604 BitVector::Iterator iterator(live_out);
605 while (!iterator.Done()) {
606 int operand_index = iterator.Current();
607 LiveRange* range = LiveRangeFor(operand_index);
608 range->AddUseInterval(start, end);
609 iterator.Advance();
610 }
611 }
612
613
FixedDoubleLiveRangeID(int index)614 int LAllocator::FixedDoubleLiveRangeID(int index) {
615 return -index - 1 - Register::kNumAllocatableRegisters;
616 }
617
618
AllocateFixed(LUnallocated * operand,int pos,bool is_tagged)619 LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
620 int pos,
621 bool is_tagged) {
622 TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
623 ASSERT(operand->HasFixedPolicy());
624 if (operand->policy() == LUnallocated::FIXED_SLOT) {
625 operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_index());
626 } else if (operand->policy() == LUnallocated::FIXED_REGISTER) {
627 int reg_index = operand->fixed_index();
628 operand->ConvertTo(LOperand::REGISTER, reg_index);
629 } else if (operand->policy() == LUnallocated::FIXED_DOUBLE_REGISTER) {
630 int reg_index = operand->fixed_index();
631 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
632 } else {
633 UNREACHABLE();
634 }
635 if (is_tagged) {
636 TraceAlloc("Fixed reg is tagged at %d\n", pos);
637 LInstruction* instr = InstructionAt(pos);
638 if (instr->HasPointerMap()) {
639 instr->pointer_map()->RecordPointer(operand);
640 }
641 }
642 return operand;
643 }
644
645
FixedLiveRangeFor(int index)646 LiveRange* LAllocator::FixedLiveRangeFor(int index) {
647 ASSERT(index < Register::kNumAllocatableRegisters);
648 LiveRange* result = fixed_live_ranges_[index];
649 if (result == NULL) {
650 result = new LiveRange(FixedLiveRangeID(index));
651 ASSERT(result->IsFixed());
652 result->set_assigned_register(index, GENERAL_REGISTERS);
653 fixed_live_ranges_[index] = result;
654 }
655 return result;
656 }
657
658
FixedDoubleLiveRangeFor(int index)659 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
660 ASSERT(index < DoubleRegister::kNumAllocatableRegisters);
661 LiveRange* result = fixed_double_live_ranges_[index];
662 if (result == NULL) {
663 result = new LiveRange(FixedDoubleLiveRangeID(index));
664 ASSERT(result->IsFixed());
665 result->set_assigned_register(index, DOUBLE_REGISTERS);
666 fixed_double_live_ranges_[index] = result;
667 }
668 return result;
669 }
670
671
LiveRangeFor(int index)672 LiveRange* LAllocator::LiveRangeFor(int index) {
673 if (index >= live_ranges_.length()) {
674 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1);
675 }
676 LiveRange* result = live_ranges_[index];
677 if (result == NULL) {
678 result = new LiveRange(index);
679 live_ranges_[index] = result;
680 }
681 return result;
682 }
683
684
GetLastGap(HBasicBlock * block)685 LGap* LAllocator::GetLastGap(HBasicBlock* block) {
686 int last_instruction = block->last_instruction_index();
687 int index = chunk_->NearestGapPos(last_instruction);
688 return GapAt(index);
689 }
690
691
LookupPhi(LOperand * operand) const692 HPhi* LAllocator::LookupPhi(LOperand* operand) const {
693 if (!operand->IsUnallocated()) return NULL;
694 int index = operand->VirtualRegister();
695 HValue* instr = graph_->LookupValue(index);
696 if (instr != NULL && instr->IsPhi()) {
697 return HPhi::cast(instr);
698 }
699 return NULL;
700 }
701
702
LiveRangeFor(LOperand * operand)703 LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
704 if (operand->IsUnallocated()) {
705 return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
706 } else if (operand->IsRegister()) {
707 return FixedLiveRangeFor(operand->index());
708 } else if (operand->IsDoubleRegister()) {
709 return FixedDoubleLiveRangeFor(operand->index());
710 } else {
711 return NULL;
712 }
713 }
714
715
Define(LifetimePosition position,LOperand * operand,LOperand * hint)716 void LAllocator::Define(LifetimePosition position,
717 LOperand* operand,
718 LOperand* hint) {
719 LiveRange* range = LiveRangeFor(operand);
720 if (range == NULL) return;
721
722 if (range->IsEmpty() || range->Start().Value() > position.Value()) {
723 // Can happen if there is a definition without use.
724 range->AddUseInterval(position, position.NextInstruction());
725 range->AddUsePosition(position.NextInstruction(), NULL);
726 } else {
727 range->ShortenTo(position);
728 }
729
730 if (operand->IsUnallocated()) {
731 LUnallocated* unalloc_operand = LUnallocated::cast(operand);
732 range->AddUsePosition(position, unalloc_operand)->set_hint(hint);
733 }
734 }
735
736
Use(LifetimePosition block_start,LifetimePosition position,LOperand * operand,LOperand * hint)737 void LAllocator::Use(LifetimePosition block_start,
738 LifetimePosition position,
739 LOperand* operand,
740 LOperand* hint) {
741 LiveRange* range = LiveRangeFor(operand);
742 if (range == NULL) return;
743 if (operand->IsUnallocated()) {
744 LUnallocated* unalloc_operand = LUnallocated::cast(operand);
745 range->AddUsePosition(position, unalloc_operand)->set_hint(hint);
746 }
747 range->AddUseInterval(block_start, position);
748 }
749
750
AddConstraintsGapMove(int index,LOperand * from,LOperand * to)751 void LAllocator::AddConstraintsGapMove(int index,
752 LOperand* from,
753 LOperand* to) {
754 LGap* gap = GapAt(index);
755 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
756 if (from->IsUnallocated()) {
757 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
758 for (int i = 0; i < move_operands->length(); ++i) {
759 LMoveOperands cur = move_operands->at(i);
760 LOperand* cur_to = cur.destination();
761 if (cur_to->IsUnallocated()) {
762 if (cur_to->VirtualRegister() == from->VirtualRegister()) {
763 move->AddMove(cur.source(), to);
764 return;
765 }
766 }
767 }
768 }
769 move->AddMove(from, to);
770 }
771
772
MeetRegisterConstraints(HBasicBlock * block)773 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
774 int start = block->first_instruction_index();
775 int end = block->last_instruction_index();
776 for (int i = start; i <= end; ++i) {
777 if (IsGapAt(i)) {
778 LInstruction* instr = NULL;
779 LInstruction* prev_instr = NULL;
780 if (i < end) instr = InstructionAt(i + 1);
781 if (i > start) prev_instr = InstructionAt(i - 1);
782 MeetConstraintsBetween(prev_instr, instr, i);
783 }
784 }
785 }
786
787
MeetConstraintsBetween(LInstruction * first,LInstruction * second,int gap_index)788 void LAllocator::MeetConstraintsBetween(LInstruction* first,
789 LInstruction* second,
790 int gap_index) {
791 // Handle fixed temporaries.
792 if (first != NULL) {
793 for (TempIterator it(first); it.HasNext(); it.Advance()) {
794 LUnallocated* temp = LUnallocated::cast(it.Next());
795 if (temp->HasFixedPolicy()) {
796 AllocateFixed(temp, gap_index - 1, false);
797 }
798 }
799 }
800
801 // Handle fixed output operand.
802 if (first != NULL && first->Output() != NULL) {
803 LUnallocated* first_output = LUnallocated::cast(first->Output());
804 LiveRange* range = LiveRangeFor(first_output->VirtualRegister());
805 bool assigned = false;
806 if (first_output->HasFixedPolicy()) {
807 LUnallocated* output_copy = first_output->CopyUnconstrained();
808 bool is_tagged = HasTaggedValue(first_output->VirtualRegister());
809 AllocateFixed(first_output, gap_index, is_tagged);
810
811 // This value is produced on the stack, we never need to spill it.
812 if (first_output->IsStackSlot()) {
813 range->SetSpillOperand(first_output);
814 range->SetSpillStartIndex(gap_index - 1);
815 assigned = true;
816 }
817 chunk_->AddGapMove(gap_index, first_output, output_copy);
818 }
819
820 if (!assigned) {
821 range->SetSpillStartIndex(gap_index);
822
823 // This move to spill operand is not a real use. Liveness analysis
824 // and splitting of live ranges do not account for it.
825 // Thus it should be inserted to a lifetime position corresponding to
826 // the instruction end.
827 LGap* gap = GapAt(gap_index);
828 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE);
829 move->AddMove(first_output, range->GetSpillOperand());
830 }
831 }
832
833 // Handle fixed input operands of second instruction.
834 if (second != NULL) {
835 for (UseIterator it(second); it.HasNext(); it.Advance()) {
836 LUnallocated* cur_input = LUnallocated::cast(it.Next());
837 if (cur_input->HasFixedPolicy()) {
838 LUnallocated* input_copy = cur_input->CopyUnconstrained();
839 bool is_tagged = HasTaggedValue(cur_input->VirtualRegister());
840 AllocateFixed(cur_input, gap_index + 1, is_tagged);
841 AddConstraintsGapMove(gap_index, input_copy, cur_input);
842 } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) {
843 // The live range of writable input registers always goes until the end
844 // of the instruction.
845 ASSERT(!cur_input->IsUsedAtStart());
846
847 LUnallocated* input_copy = cur_input->CopyUnconstrained();
848 cur_input->set_virtual_register(next_virtual_register_++);
849
850 if (RequiredRegisterKind(input_copy->virtual_register()) ==
851 DOUBLE_REGISTERS) {
852 double_artificial_registers_.Add(
853 cur_input->virtual_register() - first_artificial_register_);
854 }
855
856 AddConstraintsGapMove(gap_index, input_copy, cur_input);
857 }
858 }
859 }
860
861 // Handle "output same as input" for second instruction.
862 if (second != NULL && second->Output() != NULL) {
863 LUnallocated* second_output = LUnallocated::cast(second->Output());
864 if (second_output->HasSameAsInputPolicy()) {
865 LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
866 int output_vreg = second_output->VirtualRegister();
867 int input_vreg = cur_input->VirtualRegister();
868
869 LUnallocated* input_copy = cur_input->CopyUnconstrained();
870 cur_input->set_virtual_register(second_output->virtual_register());
871 AddConstraintsGapMove(gap_index, input_copy, cur_input);
872
873 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
874 int index = gap_index + 1;
875 LInstruction* instr = InstructionAt(index);
876 if (instr->HasPointerMap()) {
877 instr->pointer_map()->RecordPointer(input_copy);
878 }
879 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
880 // The input is assumed to immediately have a tagged representation,
881 // before the pointer map can be used. I.e. the pointer map at the
882 // instruction will include the output operand (whose value at the
883 // beginning of the instruction is equal to the input operand). If
884 // this is not desired, then the pointer map at this instruction needs
885 // to be adjusted manually.
886 }
887 }
888 }
889 }
890
891
ProcessInstructions(HBasicBlock * block,BitVector * live)892 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
893 int block_start = block->first_instruction_index();
894 int index = block->last_instruction_index();
895
896 LifetimePosition block_start_position =
897 LifetimePosition::FromInstructionIndex(block_start);
898
899 while (index >= block_start) {
900 LifetimePosition curr_position =
901 LifetimePosition::FromInstructionIndex(index);
902
903 if (IsGapAt(index)) {
904 // We have a gap at this position.
905 LGap* gap = GapAt(index);
906 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
907 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
908 for (int i = 0; i < move_operands->length(); ++i) {
909 LMoveOperands* cur = &move_operands->at(i);
910 if (cur->IsIgnored()) continue;
911 LOperand* from = cur->source();
912 LOperand* to = cur->destination();
913 HPhi* phi = LookupPhi(to);
914 LOperand* hint = to;
915 if (phi != NULL) {
916 // This is a phi resolving move.
917 if (!phi->block()->IsLoopHeader()) {
918 hint = LiveRangeFor(phi->id())->FirstHint();
919 }
920 } else {
921 if (to->IsUnallocated()) {
922 if (live->Contains(to->VirtualRegister())) {
923 Define(curr_position, to, from);
924 live->Remove(to->VirtualRegister());
925 } else {
926 cur->Eliminate();
927 continue;
928 }
929 } else {
930 Define(curr_position, to, from);
931 }
932 }
933 Use(block_start_position, curr_position, from, hint);
934 if (from->IsUnallocated()) {
935 live->Add(from->VirtualRegister());
936 }
937 }
938 } else {
939 ASSERT(!IsGapAt(index));
940 LInstruction* instr = InstructionAt(index);
941
942 if (instr != NULL) {
943 LOperand* output = instr->Output();
944 if (output != NULL) {
945 if (output->IsUnallocated()) live->Remove(output->VirtualRegister());
946 Define(curr_position, output, NULL);
947 }
948
949 if (instr->IsMarkedAsCall()) {
950 for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
951 if (output == NULL || !output->IsRegister() ||
952 output->index() != i) {
953 LiveRange* range = FixedLiveRangeFor(i);
954 range->AddUseInterval(curr_position,
955 curr_position.InstructionEnd());
956 }
957 }
958 }
959
960 if (instr->IsMarkedAsCall() || instr->IsMarkedAsSaveDoubles()) {
961 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
962 if (output == NULL || !output->IsDoubleRegister() ||
963 output->index() != i) {
964 LiveRange* range = FixedDoubleLiveRangeFor(i);
965 range->AddUseInterval(curr_position,
966 curr_position.InstructionEnd());
967 }
968 }
969 }
970
971 for (UseIterator it(instr); it.HasNext(); it.Advance()) {
972 LOperand* input = it.Next();
973
974 LifetimePosition use_pos;
975 if (input->IsUnallocated() &&
976 LUnallocated::cast(input)->IsUsedAtStart()) {
977 use_pos = curr_position;
978 } else {
979 use_pos = curr_position.InstructionEnd();
980 }
981
982 Use(block_start_position, use_pos, input, NULL);
983 if (input->IsUnallocated()) live->Add(input->VirtualRegister());
984 }
985
986 for (TempIterator it(instr); it.HasNext(); it.Advance()) {
987 LOperand* temp = it.Next();
988 if (instr->IsMarkedAsCall()) {
989 if (temp->IsRegister()) continue;
990 if (temp->IsUnallocated()) {
991 LUnallocated* temp_unalloc = LUnallocated::cast(temp);
992 if (temp_unalloc->HasFixedPolicy()) {
993 continue;
994 }
995 }
996 }
997 Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
998 Define(curr_position, temp, NULL);
999 }
1000 }
1001 }
1002
1003 index = index - 1;
1004 }
1005 }
1006
1007
ResolvePhis(HBasicBlock * block)1008 void LAllocator::ResolvePhis(HBasicBlock* block) {
1009 const ZoneList<HPhi*>* phis = block->phis();
1010 for (int i = 0; i < phis->length(); ++i) {
1011 HPhi* phi = phis->at(i);
1012 LUnallocated* phi_operand = new LUnallocated(LUnallocated::NONE);
1013 phi_operand->set_virtual_register(phi->id());
1014 for (int j = 0; j < phi->OperandCount(); ++j) {
1015 HValue* op = phi->OperandAt(j);
1016 LOperand* operand = NULL;
1017 if (op->IsConstant() && op->EmitAtUses()) {
1018 HConstant* constant = HConstant::cast(op);
1019 operand = chunk_->DefineConstantOperand(constant);
1020 } else {
1021 ASSERT(!op->EmitAtUses());
1022 LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE);
1023 unalloc->set_virtual_register(op->id());
1024 operand = unalloc;
1025 }
1026 HBasicBlock* cur_block = block->predecessors()->at(j);
1027 // The gap move must be added without any special processing as in
1028 // the AddConstraintsGapMove.
1029 chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
1030 operand,
1031 phi_operand);
1032 }
1033
1034 LiveRange* live_range = LiveRangeFor(phi->id());
1035 LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1036 label->GetOrCreateParallelMove(LGap::START)->
1037 AddMove(phi_operand, live_range->GetSpillOperand());
1038 live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1039 }
1040 }
1041
1042
Allocate(LChunk * chunk)1043 void LAllocator::Allocate(LChunk* chunk) {
1044 ASSERT(chunk_ == NULL);
1045 chunk_ = chunk;
1046 MeetRegisterConstraints();
1047 ResolvePhis();
1048 BuildLiveRanges();
1049 AllocateGeneralRegisters();
1050 AllocateDoubleRegisters();
1051 PopulatePointerMaps();
1052 if (has_osr_entry_) ProcessOsrEntry();
1053 ConnectRanges();
1054 ResolveControlFlow();
1055 }
1056
1057
MeetRegisterConstraints()1058 void LAllocator::MeetRegisterConstraints() {
1059 HPhase phase("Register constraints", chunk_);
1060 first_artificial_register_ = next_virtual_register_;
1061 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1062 for (int i = 0; i < blocks->length(); ++i) {
1063 HBasicBlock* block = blocks->at(i);
1064 MeetRegisterConstraints(block);
1065 }
1066 }
1067
1068
ResolvePhis()1069 void LAllocator::ResolvePhis() {
1070 HPhase phase("Resolve phis", chunk_);
1071
1072 // Process the blocks in reverse order.
1073 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1074 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1075 HBasicBlock* block = blocks->at(block_id);
1076 ResolvePhis(block);
1077 }
1078 }
1079
1080
ResolveControlFlow(LiveRange * range,HBasicBlock * block,HBasicBlock * pred)1081 void LAllocator::ResolveControlFlow(LiveRange* range,
1082 HBasicBlock* block,
1083 HBasicBlock* pred) {
1084 LifetimePosition pred_end =
1085 LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1086 LifetimePosition cur_start =
1087 LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1088 LiveRange* pred_cover = NULL;
1089 LiveRange* cur_cover = NULL;
1090 LiveRange* cur_range = range;
1091 while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1092 if (cur_range->CanCover(cur_start)) {
1093 ASSERT(cur_cover == NULL);
1094 cur_cover = cur_range;
1095 }
1096 if (cur_range->CanCover(pred_end)) {
1097 ASSERT(pred_cover == NULL);
1098 pred_cover = cur_range;
1099 }
1100 cur_range = cur_range->next();
1101 }
1102
1103 if (cur_cover->IsSpilled()) return;
1104 ASSERT(pred_cover != NULL && cur_cover != NULL);
1105 if (pred_cover != cur_cover) {
1106 LOperand* pred_op = pred_cover->CreateAssignedOperand();
1107 LOperand* cur_op = cur_cover->CreateAssignedOperand();
1108 if (!pred_op->Equals(cur_op)) {
1109 LGap* gap = NULL;
1110 if (block->predecessors()->length() == 1) {
1111 gap = GapAt(block->first_instruction_index());
1112 } else {
1113 ASSERT(pred->end()->SecondSuccessor() == NULL);
1114 gap = GetLastGap(pred);
1115
1116 // We are going to insert a move before the branch instruction.
1117 // Some branch instructions (e.g. loops' back edges)
1118 // can potentially cause a GC so they have a pointer map.
1119 // By insterting a move we essentially create a copy of a
1120 // value which is invisible to PopulatePointerMaps(), because we store
1121 // it into a location different from the operand of a live range
1122 // covering a branch instruction.
1123 // Thus we need to manually record a pointer.
1124 if (HasTaggedValue(range->id())) {
1125 LInstruction* branch = InstructionAt(pred->last_instruction_index());
1126 if (branch->HasPointerMap()) {
1127 branch->pointer_map()->RecordPointer(cur_op);
1128 }
1129 }
1130 }
1131 gap->GetOrCreateParallelMove(LGap::START)->AddMove(pred_op, cur_op);
1132 }
1133 }
1134 }
1135
1136
GetConnectingParallelMove(LifetimePosition pos)1137 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1138 int index = pos.InstructionIndex();
1139 if (IsGapAt(index)) {
1140 LGap* gap = GapAt(index);
1141 return gap->GetOrCreateParallelMove(
1142 pos.IsInstructionStart() ? LGap::START : LGap::END);
1143 }
1144 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1145 return GapAt(gap_pos)->GetOrCreateParallelMove(
1146 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE);
1147 }
1148
1149
GetBlock(LifetimePosition pos)1150 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1151 LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1152 return gap->block();
1153 }
1154
1155
ConnectRanges()1156 void LAllocator::ConnectRanges() {
1157 HPhase phase("Connect ranges", this);
1158 for (int i = 0; i < live_ranges()->length(); ++i) {
1159 LiveRange* first_range = live_ranges()->at(i);
1160 if (first_range == NULL || first_range->parent() != NULL) continue;
1161
1162 LiveRange* second_range = first_range->next();
1163 while (second_range != NULL) {
1164 LifetimePosition pos = second_range->Start();
1165
1166 if (!second_range->IsSpilled()) {
1167 // Add gap move if the two live ranges touch and there is no block
1168 // boundary.
1169 if (first_range->End().Value() == pos.Value()) {
1170 bool should_insert = true;
1171 if (IsBlockBoundary(pos)) {
1172 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1173 }
1174 if (should_insert) {
1175 LParallelMove* move = GetConnectingParallelMove(pos);
1176 LOperand* prev_operand = first_range->CreateAssignedOperand();
1177 LOperand* cur_operand = second_range->CreateAssignedOperand();
1178 move->AddMove(prev_operand, cur_operand);
1179 }
1180 }
1181 }
1182
1183 first_range = second_range;
1184 second_range = second_range->next();
1185 }
1186 }
1187 }
1188
1189
CanEagerlyResolveControlFlow(HBasicBlock * block) const1190 bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
1191 if (block->predecessors()->length() != 1) return false;
1192 return block->predecessors()->first()->block_id() == block->block_id() - 1;
1193 }
1194
1195
ResolveControlFlow()1196 void LAllocator::ResolveControlFlow() {
1197 HPhase phase("Resolve control flow", this);
1198 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1199 for (int block_id = 1; block_id < blocks->length(); ++block_id) {
1200 HBasicBlock* block = blocks->at(block_id);
1201 if (CanEagerlyResolveControlFlow(block)) continue;
1202 BitVector* live = live_in_sets_[block->block_id()];
1203 BitVector::Iterator iterator(live);
1204 while (!iterator.Done()) {
1205 int operand_index = iterator.Current();
1206 for (int i = 0; i < block->predecessors()->length(); ++i) {
1207 HBasicBlock* cur = block->predecessors()->at(i);
1208 LiveRange* cur_range = LiveRangeFor(operand_index);
1209 ResolveControlFlow(cur_range, block, cur);
1210 }
1211 iterator.Advance();
1212 }
1213 }
1214 }
1215
1216
BuildLiveRanges()1217 void LAllocator::BuildLiveRanges() {
1218 HPhase phase("Build live ranges", this);
1219 InitializeLivenessAnalysis();
1220 // Process the blocks in reverse order.
1221 const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1222 for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1223 HBasicBlock* block = blocks->at(block_id);
1224 BitVector* live = ComputeLiveOut(block);
1225 // Initially consider all live_out values live for the entire block. We
1226 // will shorten these intervals if necessary.
1227 AddInitialIntervals(block, live);
1228
1229 // Process the instructions in reverse order, generating and killing
1230 // live values.
1231 ProcessInstructions(block, live);
1232 // All phi output operands are killed by this block.
1233 const ZoneList<HPhi*>* phis = block->phis();
1234 for (int i = 0; i < phis->length(); ++i) {
1235 // The live range interval already ends at the first instruction of the
1236 // block.
1237 HPhi* phi = phis->at(i);
1238 live->Remove(phi->id());
1239
1240 LOperand* hint = NULL;
1241 LOperand* phi_operand = NULL;
1242 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1243 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
1244 for (int j = 0; j < move->move_operands()->length(); ++j) {
1245 LOperand* to = move->move_operands()->at(j).destination();
1246 if (to->IsUnallocated() && to->VirtualRegister() == phi->id()) {
1247 hint = move->move_operands()->at(j).source();
1248 phi_operand = to;
1249 break;
1250 }
1251 }
1252 ASSERT(hint != NULL);
1253
1254 LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1255 block->first_instruction_index());
1256 Define(block_start, phi_operand, hint);
1257 }
1258
1259 // Now live is live_in for this block except not including values live
1260 // out on backward successor edges.
1261 live_in_sets_[block_id] = live;
1262
1263 // If this block is a loop header go back and patch up the necessary
1264 // predecessor blocks.
1265 if (block->IsLoopHeader()) {
1266 // TODO(kmillikin): Need to be able to get the last block of the loop
1267 // in the loop information. Add a live range stretching from the first
1268 // loop instruction to the last for each value live on entry to the
1269 // header.
1270 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1271 BitVector::Iterator iterator(live);
1272 LifetimePosition start = LifetimePosition::FromInstructionIndex(
1273 block->first_instruction_index());
1274 LifetimePosition end = LifetimePosition::FromInstructionIndex(
1275 back_edge->last_instruction_index()).NextInstruction();
1276 while (!iterator.Done()) {
1277 int operand_index = iterator.Current();
1278 LiveRange* range = LiveRangeFor(operand_index);
1279 range->EnsureInterval(start, end);
1280 iterator.Advance();
1281 }
1282
1283 for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
1284 live_in_sets_[i]->Union(*live);
1285 }
1286 }
1287
1288 #ifdef DEBUG
1289 if (block_id == 0) {
1290 BitVector::Iterator iterator(live);
1291 bool found = false;
1292 while (!iterator.Done()) {
1293 found = true;
1294 int operand_index = iterator.Current();
1295 PrintF("Function: %s\n",
1296 *chunk_->info()->function()->debug_name()->ToCString());
1297 PrintF("Value %d used before first definition!\n", operand_index);
1298 LiveRange* range = LiveRangeFor(operand_index);
1299 PrintF("First use is at %d\n", range->first_pos()->pos().Value());
1300 iterator.Advance();
1301 }
1302 ASSERT(!found);
1303 }
1304 #endif
1305 }
1306 }
1307
1308
SafePointsAreInOrder() const1309 bool LAllocator::SafePointsAreInOrder() const {
1310 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1311 int safe_point = 0;
1312 for (int i = 0; i < pointer_maps->length(); ++i) {
1313 LPointerMap* map = pointer_maps->at(i);
1314 if (safe_point > map->lithium_position()) return false;
1315 safe_point = map->lithium_position();
1316 }
1317 return true;
1318 }
1319
1320
PopulatePointerMaps()1321 void LAllocator::PopulatePointerMaps() {
1322 HPhase phase("Populate pointer maps", this);
1323 const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1324
1325 ASSERT(SafePointsAreInOrder());
1326
1327 // Iterate over all safe point positions and record a pointer
1328 // for all spilled live ranges at this point.
1329 int first_safe_point_index = 0;
1330 int last_range_start = 0;
1331 for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1332 LiveRange* range = live_ranges()->at(range_idx);
1333 if (range == NULL) continue;
1334 // Iterate over the first parts of multi-part live ranges.
1335 if (range->parent() != NULL) continue;
1336 // Skip non-pointer values.
1337 if (!HasTaggedValue(range->id())) continue;
1338 // Skip empty live ranges.
1339 if (range->IsEmpty()) continue;
1340
1341 // Find the extent of the range and its children.
1342 int start = range->Start().InstructionIndex();
1343 int end = 0;
1344 for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
1345 LifetimePosition this_end = cur->End();
1346 if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1347 ASSERT(cur->Start().InstructionIndex() >= start);
1348 }
1349
1350 // Most of the ranges are in order, but not all. Keep an eye on when
1351 // they step backwards and reset the first_safe_point_index so we don't
1352 // miss any safe points.
1353 if (start < last_range_start) {
1354 first_safe_point_index = 0;
1355 }
1356 last_range_start = start;
1357
1358 // Step across all the safe points that are before the start of this range,
1359 // recording how far we step in order to save doing this for the next range.
1360 while (first_safe_point_index < pointer_maps->length()) {
1361 LPointerMap* map = pointer_maps->at(first_safe_point_index);
1362 int safe_point = map->lithium_position();
1363 if (safe_point >= start) break;
1364 first_safe_point_index++;
1365 }
1366
1367 // Step through the safe points to see whether they are in the range.
1368 for (int safe_point_index = first_safe_point_index;
1369 safe_point_index < pointer_maps->length();
1370 ++safe_point_index) {
1371 LPointerMap* map = pointer_maps->at(safe_point_index);
1372 int safe_point = map->lithium_position();
1373
1374 // The safe points are sorted so we can stop searching here.
1375 if (safe_point - 1 > end) break;
1376
1377 // Advance to the next active range that covers the current
1378 // safe point position.
1379 LifetimePosition safe_point_pos =
1380 LifetimePosition::FromInstructionIndex(safe_point);
1381 LiveRange* cur = range;
1382 while (cur != NULL && !cur->Covers(safe_point_pos.PrevInstruction())) {
1383 cur = cur->next();
1384 }
1385 if (cur == NULL) continue;
1386
1387 // Check if the live range is spilled and the safe point is after
1388 // the spill position.
1389 if (range->HasAllocatedSpillOperand() &&
1390 safe_point >= range->spill_start_index()) {
1391 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1392 range->id(), range->spill_start_index(), safe_point);
1393 map->RecordPointer(range->GetSpillOperand());
1394 }
1395
1396 if (!cur->IsSpilled()) {
1397 TraceAlloc("Pointer in register for range %d (start at %d) "
1398 "at safe point %d\n",
1399 cur->id(), cur->Start().Value(), safe_point);
1400 LOperand* operand = cur->CreateAssignedOperand();
1401 ASSERT(!operand->IsStackSlot());
1402 map->RecordPointer(operand);
1403 }
1404 }
1405 }
1406 }
1407
1408
ProcessOsrEntry()1409 void LAllocator::ProcessOsrEntry() {
1410 const ZoneList<LInstruction*>* instrs = chunk_->instructions();
1411
1412 // Linear search for the OSR entry instruction in the chunk.
1413 int index = -1;
1414 while (++index < instrs->length() &&
1415 !instrs->at(index)->IsOsrEntry()) {
1416 }
1417 ASSERT(index < instrs->length());
1418 LOsrEntry* instruction = LOsrEntry::cast(instrs->at(index));
1419
1420 LifetimePosition position = LifetimePosition::FromInstructionIndex(index);
1421 for (int i = 0; i < live_ranges()->length(); ++i) {
1422 LiveRange* range = live_ranges()->at(i);
1423 if (range != NULL) {
1424 if (range->Covers(position) &&
1425 range->HasRegisterAssigned() &&
1426 range->TopLevel()->HasAllocatedSpillOperand()) {
1427 int reg_index = range->assigned_register();
1428 LOperand* spill_operand = range->TopLevel()->GetSpillOperand();
1429 if (range->IsDouble()) {
1430 instruction->MarkSpilledDoubleRegister(reg_index, spill_operand);
1431 } else {
1432 instruction->MarkSpilledRegister(reg_index, spill_operand);
1433 }
1434 }
1435 }
1436 }
1437 }
1438
1439
AllocateGeneralRegisters()1440 void LAllocator::AllocateGeneralRegisters() {
1441 HPhase phase("Allocate general registers", this);
1442 num_registers_ = Register::kNumAllocatableRegisters;
1443 mode_ = GENERAL_REGISTERS;
1444 AllocateRegisters();
1445 }
1446
1447
AllocateDoubleRegisters()1448 void LAllocator::AllocateDoubleRegisters() {
1449 HPhase phase("Allocate double registers", this);
1450 num_registers_ = DoubleRegister::kNumAllocatableRegisters;
1451 mode_ = DOUBLE_REGISTERS;
1452 AllocateRegisters();
1453 }
1454
1455
AllocateRegisters()1456 void LAllocator::AllocateRegisters() {
1457 ASSERT(mode_ != NONE);
1458 ASSERT(unhandled_live_ranges_.is_empty());
1459
1460 for (int i = 0; i < live_ranges_.length(); ++i) {
1461 if (live_ranges_[i] != NULL) {
1462 if (RequiredRegisterKind(live_ranges_[i]->id()) == mode_) {
1463 AddToUnhandledUnsorted(live_ranges_[i]);
1464 }
1465 }
1466 }
1467 SortUnhandled();
1468 ASSERT(UnhandledIsSorted());
1469
1470 ASSERT(reusable_slots_.is_empty());
1471 ASSERT(active_live_ranges_.is_empty());
1472 ASSERT(inactive_live_ranges_.is_empty());
1473
1474 if (mode_ == DOUBLE_REGISTERS) {
1475 for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) {
1476 LiveRange* current = fixed_double_live_ranges_.at(i);
1477 if (current != NULL) {
1478 AddToInactive(current);
1479 }
1480 }
1481 } else {
1482 for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
1483 LiveRange* current = fixed_live_ranges_.at(i);
1484 if (current != NULL) {
1485 AddToInactive(current);
1486 }
1487 }
1488 }
1489
1490 while (!unhandled_live_ranges_.is_empty()) {
1491 ASSERT(UnhandledIsSorted());
1492 LiveRange* current = unhandled_live_ranges_.RemoveLast();
1493 ASSERT(UnhandledIsSorted());
1494 LifetimePosition position = current->Start();
1495 TraceAlloc("Processing interval %d start=%d\n",
1496 current->id(),
1497 position.Value());
1498
1499 if (current->HasAllocatedSpillOperand()) {
1500 TraceAlloc("Live range %d already has a spill operand\n", current->id());
1501 LifetimePosition next_pos = position;
1502 if (IsGapAt(next_pos.InstructionIndex())) {
1503 next_pos = next_pos.NextInstruction();
1504 }
1505 UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1506 // If the range already has a spill operand and it doesn't need a
1507 // register immediately, split it and spill the first part of the range.
1508 if (pos == NULL) {
1509 Spill(current);
1510 continue;
1511 } else if (pos->pos().Value() >
1512 current->Start().NextInstruction().Value()) {
1513 // Do not spill live range eagerly if use position that can benefit from
1514 // the register is too close to the start of live range.
1515 SpillBetween(current, current->Start(), pos->pos());
1516 ASSERT(UnhandledIsSorted());
1517 continue;
1518 }
1519 }
1520
1521 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1522 LiveRange* cur_active = active_live_ranges_.at(i);
1523 if (cur_active->End().Value() <= position.Value()) {
1524 ActiveToHandled(cur_active);
1525 --i; // The live range was removed from the list of active live ranges.
1526 } else if (!cur_active->Covers(position)) {
1527 ActiveToInactive(cur_active);
1528 --i; // The live range was removed from the list of active live ranges.
1529 }
1530 }
1531
1532 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1533 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1534 if (cur_inactive->End().Value() <= position.Value()) {
1535 InactiveToHandled(cur_inactive);
1536 --i; // Live range was removed from the list of inactive live ranges.
1537 } else if (cur_inactive->Covers(position)) {
1538 InactiveToActive(cur_inactive);
1539 --i; // Live range was removed from the list of inactive live ranges.
1540 }
1541 }
1542
1543 ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled());
1544
1545 bool result = TryAllocateFreeReg(current);
1546 if (!result) {
1547 AllocateBlockedReg(current);
1548 }
1549
1550 if (current->HasRegisterAssigned()) {
1551 AddToActive(current);
1552 }
1553 }
1554
1555 reusable_slots_.Rewind(0);
1556 active_live_ranges_.Rewind(0);
1557 inactive_live_ranges_.Rewind(0);
1558 }
1559
1560
RegisterName(int allocation_index)1561 const char* LAllocator::RegisterName(int allocation_index) {
1562 ASSERT(mode_ != NONE);
1563 if (mode_ == GENERAL_REGISTERS) {
1564 return Register::AllocationIndexToString(allocation_index);
1565 } else {
1566 return DoubleRegister::AllocationIndexToString(allocation_index);
1567 }
1568 }
1569
1570
TraceAlloc(const char * msg,...)1571 void LAllocator::TraceAlloc(const char* msg, ...) {
1572 if (FLAG_trace_alloc) {
1573 va_list arguments;
1574 va_start(arguments, msg);
1575 OS::VPrint(msg, arguments);
1576 va_end(arguments);
1577 }
1578 }
1579
1580
HasTaggedValue(int virtual_register) const1581 bool LAllocator::HasTaggedValue(int virtual_register) const {
1582 HValue* value = graph_->LookupValue(virtual_register);
1583 if (value == NULL) return false;
1584 return value->representation().IsTagged();
1585 }
1586
1587
RequiredRegisterKind(int virtual_register) const1588 RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
1589 if (virtual_register < first_artificial_register_) {
1590 HValue* value = graph_->LookupValue(virtual_register);
1591 if (value != NULL && value->representation().IsDouble()) {
1592 return DOUBLE_REGISTERS;
1593 }
1594 } else if (double_artificial_registers_.Contains(
1595 virtual_register - first_artificial_register_)) {
1596 return DOUBLE_REGISTERS;
1597 }
1598
1599 return GENERAL_REGISTERS;
1600 }
1601
1602
RecordDefinition(HInstruction * instr,LUnallocated * operand)1603 void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) {
1604 operand->set_virtual_register(instr->id());
1605 }
1606
1607
RecordTemporary(LUnallocated * operand)1608 void LAllocator::RecordTemporary(LUnallocated* operand) {
1609 ASSERT(next_virtual_register_ < LUnallocated::kMaxVirtualRegisters);
1610 if (!operand->HasFixedPolicy()) {
1611 operand->set_virtual_register(next_virtual_register_++);
1612 }
1613 }
1614
1615
RecordUse(HValue * value,LUnallocated * operand)1616 void LAllocator::RecordUse(HValue* value, LUnallocated* operand) {
1617 operand->set_virtual_register(value->id());
1618 }
1619
1620
max_initial_value_ids()1621 int LAllocator::max_initial_value_ids() {
1622 return LUnallocated::kMaxVirtualRegisters / 32;
1623 }
1624
1625
AddToActive(LiveRange * range)1626 void LAllocator::AddToActive(LiveRange* range) {
1627 TraceAlloc("Add live range %d to active\n", range->id());
1628 active_live_ranges_.Add(range);
1629 }
1630
1631
AddToInactive(LiveRange * range)1632 void LAllocator::AddToInactive(LiveRange* range) {
1633 TraceAlloc("Add live range %d to inactive\n", range->id());
1634 inactive_live_ranges_.Add(range);
1635 }
1636
1637
AddToUnhandledSorted(LiveRange * range)1638 void LAllocator::AddToUnhandledSorted(LiveRange* range) {
1639 if (range == NULL || range->IsEmpty()) return;
1640 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1641 for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1642 LiveRange* cur_range = unhandled_live_ranges_.at(i);
1643 if (range->ShouldBeAllocatedBefore(cur_range)) {
1644 TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1645 unhandled_live_ranges_.InsertAt(i + 1, range);
1646 ASSERT(UnhandledIsSorted());
1647 return;
1648 }
1649 }
1650 TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1651 unhandled_live_ranges_.InsertAt(0, range);
1652 ASSERT(UnhandledIsSorted());
1653 }
1654
1655
AddToUnhandledUnsorted(LiveRange * range)1656 void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1657 if (range == NULL || range->IsEmpty()) return;
1658 ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1659 TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1660 unhandled_live_ranges_.Add(range);
1661 }
1662
1663
UnhandledSortHelper(LiveRange * const * a,LiveRange * const * b)1664 static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
1665 ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
1666 !(*b)->ShouldBeAllocatedBefore(*a));
1667 if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
1668 if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
1669 return (*a)->id() - (*b)->id();
1670 }
1671
1672
1673 // Sort the unhandled live ranges so that the ranges to be processed first are
1674 // at the end of the array list. This is convenient for the register allocation
1675 // algorithm because it is efficient to remove elements from the end.
SortUnhandled()1676 void LAllocator::SortUnhandled() {
1677 TraceAlloc("Sort unhandled\n");
1678 unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1679 }
1680
1681
UnhandledIsSorted()1682 bool LAllocator::UnhandledIsSorted() {
1683 int len = unhandled_live_ranges_.length();
1684 for (int i = 1; i < len; i++) {
1685 LiveRange* a = unhandled_live_ranges_.at(i - 1);
1686 LiveRange* b = unhandled_live_ranges_.at(i);
1687 if (a->Start().Value() < b->Start().Value()) return false;
1688 }
1689 return true;
1690 }
1691
1692
FreeSpillSlot(LiveRange * range)1693 void LAllocator::FreeSpillSlot(LiveRange* range) {
1694 // Check that we are the last range.
1695 if (range->next() != NULL) return;
1696
1697 if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
1698
1699 int index = range->TopLevel()->GetSpillOperand()->index();
1700 if (index >= 0) {
1701 reusable_slots_.Add(range);
1702 }
1703 }
1704
1705
TryReuseSpillSlot(LiveRange * range)1706 LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
1707 if (reusable_slots_.is_empty()) return NULL;
1708 if (reusable_slots_.first()->End().Value() >
1709 range->TopLevel()->Start().Value()) {
1710 return NULL;
1711 }
1712 LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
1713 reusable_slots_.Remove(0);
1714 return result;
1715 }
1716
1717
ActiveToHandled(LiveRange * range)1718 void LAllocator::ActiveToHandled(LiveRange* range) {
1719 ASSERT(active_live_ranges_.Contains(range));
1720 active_live_ranges_.RemoveElement(range);
1721 TraceAlloc("Moving live range %d from active to handled\n", range->id());
1722 FreeSpillSlot(range);
1723 }
1724
1725
ActiveToInactive(LiveRange * range)1726 void LAllocator::ActiveToInactive(LiveRange* range) {
1727 ASSERT(active_live_ranges_.Contains(range));
1728 active_live_ranges_.RemoveElement(range);
1729 inactive_live_ranges_.Add(range);
1730 TraceAlloc("Moving live range %d from active to inactive\n", range->id());
1731 }
1732
1733
InactiveToHandled(LiveRange * range)1734 void LAllocator::InactiveToHandled(LiveRange* range) {
1735 ASSERT(inactive_live_ranges_.Contains(range));
1736 inactive_live_ranges_.RemoveElement(range);
1737 TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
1738 FreeSpillSlot(range);
1739 }
1740
1741
InactiveToActive(LiveRange * range)1742 void LAllocator::InactiveToActive(LiveRange* range) {
1743 ASSERT(inactive_live_ranges_.Contains(range));
1744 inactive_live_ranges_.RemoveElement(range);
1745 active_live_ranges_.Add(range);
1746 TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1747 }
1748
1749
1750 // TryAllocateFreeReg and AllocateBlockedReg assume this
1751 // when allocating local arrays.
1752 STATIC_ASSERT(DoubleRegister::kNumAllocatableRegisters >=
1753 Register::kNumAllocatableRegisters);
1754
1755
TryAllocateFreeReg(LiveRange * current)1756 bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1757 LifetimePosition free_until_pos[DoubleRegister::kNumAllocatableRegisters];
1758
1759 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1760 free_until_pos[i] = LifetimePosition::MaxPosition();
1761 }
1762
1763 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1764 LiveRange* cur_active = active_live_ranges_.at(i);
1765 free_until_pos[cur_active->assigned_register()] =
1766 LifetimePosition::FromInstructionIndex(0);
1767 }
1768
1769 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1770 LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1771 ASSERT(cur_inactive->End().Value() > current->Start().Value());
1772 LifetimePosition next_intersection =
1773 cur_inactive->FirstIntersection(current);
1774 if (!next_intersection.IsValid()) continue;
1775 int cur_reg = cur_inactive->assigned_register();
1776 free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1777 }
1778
1779 UsePosition* hinted_use = current->FirstPosWithHint();
1780 if (hinted_use != NULL) {
1781 LOperand* hint = hinted_use->hint();
1782 if (hint->IsRegister() || hint->IsDoubleRegister()) {
1783 int register_index = hint->index();
1784 TraceAlloc(
1785 "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1786 RegisterName(register_index),
1787 free_until_pos[register_index].Value(),
1788 current->id(),
1789 current->End().Value());
1790
1791 // The desired register is free until the end of the current live range.
1792 if (free_until_pos[register_index].Value() >= current->End().Value()) {
1793 TraceAlloc("Assigning preferred reg %s to live range %d\n",
1794 RegisterName(register_index),
1795 current->id());
1796 current->set_assigned_register(register_index, mode_);
1797 return true;
1798 }
1799 }
1800 }
1801
1802 // Find the register which stays free for the longest time.
1803 int reg = 0;
1804 for (int i = 1; i < RegisterCount(); ++i) {
1805 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1806 reg = i;
1807 }
1808 }
1809
1810 LifetimePosition pos = free_until_pos[reg];
1811
1812 if (pos.Value() <= current->Start().Value()) {
1813 // All registers are blocked.
1814 return false;
1815 }
1816
1817 if (pos.Value() < current->End().Value()) {
1818 // Register reg is available at the range start but becomes blocked before
1819 // the range end. Split current at position where it becomes blocked.
1820 LiveRange* tail = SplitAt(current, pos);
1821 AddToUnhandledSorted(tail);
1822 }
1823
1824
1825 // Register reg is available at the range start and is free until
1826 // the range end.
1827 ASSERT(pos.Value() >= current->End().Value());
1828 TraceAlloc("Assigning free reg %s to live range %d\n",
1829 RegisterName(reg),
1830 current->id());
1831 current->set_assigned_register(reg, mode_);
1832
1833 return true;
1834 }
1835
1836
AllocateBlockedReg(LiveRange * current)1837 void LAllocator::AllocateBlockedReg(LiveRange* current) {
1838 UsePosition* register_use = current->NextRegisterPosition(current->Start());
1839 if (register_use == NULL) {
1840 // There is no use in the current live range that requires a register.
1841 // We can just spill it.
1842 Spill(current);
1843 return;
1844 }
1845
1846
1847 LifetimePosition use_pos[DoubleRegister::kNumAllocatableRegisters];
1848 LifetimePosition block_pos[DoubleRegister::kNumAllocatableRegisters];
1849
1850 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1851 use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1852 }
1853
1854 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1855 LiveRange* range = active_live_ranges_[i];
1856 int cur_reg = range->assigned_register();
1857 if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1858 block_pos[cur_reg] = use_pos[cur_reg] =
1859 LifetimePosition::FromInstructionIndex(0);
1860 } else {
1861 UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
1862 current->Start());
1863 if (next_use == NULL) {
1864 use_pos[cur_reg] = range->End();
1865 } else {
1866 use_pos[cur_reg] = next_use->pos();
1867 }
1868 }
1869 }
1870
1871 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1872 LiveRange* range = inactive_live_ranges_.at(i);
1873 ASSERT(range->End().Value() > current->Start().Value());
1874 LifetimePosition next_intersection = range->FirstIntersection(current);
1875 if (!next_intersection.IsValid()) continue;
1876 int cur_reg = range->assigned_register();
1877 if (range->IsFixed()) {
1878 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1879 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1880 } else {
1881 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1882 }
1883 }
1884
1885 int reg = 0;
1886 for (int i = 1; i < RegisterCount(); ++i) {
1887 if (use_pos[i].Value() > use_pos[reg].Value()) {
1888 reg = i;
1889 }
1890 }
1891
1892 LifetimePosition pos = use_pos[reg];
1893
1894 if (pos.Value() < register_use->pos().Value()) {
1895 // All registers are blocked before the first use that requires a register.
1896 // Spill starting part of live range up to that use.
1897 //
1898 // Corner case: the first use position is equal to the start of the range.
1899 // In this case we have nothing to spill and SpillBetween will just return
1900 // this range to the list of unhandled ones. This will lead to the infinite
1901 // loop.
1902 ASSERT(current->Start().Value() < register_use->pos().Value());
1903 SpillBetween(current, current->Start(), register_use->pos());
1904 return;
1905 }
1906
1907 if (block_pos[reg].Value() < current->End().Value()) {
1908 // Register becomes blocked before the current range end. Split before that
1909 // position.
1910 LiveRange* tail = SplitBetween(current,
1911 current->Start(),
1912 block_pos[reg].InstructionStart());
1913 AddToUnhandledSorted(tail);
1914 }
1915
1916 // Register reg is not blocked for the whole range.
1917 ASSERT(block_pos[reg].Value() >= current->End().Value());
1918 TraceAlloc("Assigning blocked reg %s to live range %d\n",
1919 RegisterName(reg),
1920 current->id());
1921 current->set_assigned_register(reg, mode_);
1922
1923 // This register was not free. Thus we need to find and spill
1924 // parts of active and inactive live regions that use the same register
1925 // at the same lifetime positions as current.
1926 SplitAndSpillIntersecting(current);
1927 }
1928
1929
SplitAndSpillIntersecting(LiveRange * current)1930 void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
1931 ASSERT(current->HasRegisterAssigned());
1932 int reg = current->assigned_register();
1933 LifetimePosition split_pos = current->Start();
1934 for (int i = 0; i < active_live_ranges_.length(); ++i) {
1935 LiveRange* range = active_live_ranges_[i];
1936 if (range->assigned_register() == reg) {
1937 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1938 if (next_pos == NULL) {
1939 SpillAfter(range, split_pos);
1940 } else {
1941 SpillBetween(range, split_pos, next_pos->pos());
1942 }
1943 ActiveToHandled(range);
1944 --i;
1945 }
1946 }
1947
1948 for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1949 LiveRange* range = inactive_live_ranges_[i];
1950 ASSERT(range->End().Value() > current->Start().Value());
1951 if (range->assigned_register() == reg && !range->IsFixed()) {
1952 LifetimePosition next_intersection = range->FirstIntersection(current);
1953 if (next_intersection.IsValid()) {
1954 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1955 if (next_pos == NULL) {
1956 SpillAfter(range, split_pos);
1957 } else {
1958 next_intersection = Min(next_intersection, next_pos->pos());
1959 SpillBetween(range, split_pos, next_intersection);
1960 }
1961 InactiveToHandled(range);
1962 --i;
1963 }
1964 }
1965 }
1966 }
1967
1968
IsBlockBoundary(LifetimePosition pos)1969 bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
1970 return pos.IsInstructionStart() &&
1971 InstructionAt(pos.InstructionIndex())->IsLabel();
1972 }
1973
1974
SplitAt(LiveRange * range,LifetimePosition pos)1975 LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) {
1976 ASSERT(!range->IsFixed());
1977 TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
1978
1979 if (pos.Value() <= range->Start().Value()) return range;
1980
1981 // We can't properly connect liveranges if split occured at the end
1982 // of control instruction.
1983 ASSERT(pos.IsInstructionStart() ||
1984 !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
1985
1986 LiveRange* result = LiveRangeFor(next_virtual_register_++);
1987 range->SplitAt(pos, result);
1988 return result;
1989 }
1990
1991
SplitBetween(LiveRange * range,LifetimePosition start,LifetimePosition end)1992 LiveRange* LAllocator::SplitBetween(LiveRange* range,
1993 LifetimePosition start,
1994 LifetimePosition end) {
1995 ASSERT(!range->IsFixed());
1996 TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
1997 range->id(),
1998 start.Value(),
1999 end.Value());
2000
2001 LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2002 ASSERT(split_pos.Value() >= start.Value());
2003 return SplitAt(range, split_pos);
2004 }
2005
2006
FindOptimalSplitPos(LifetimePosition start,LifetimePosition end)2007 LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
2008 LifetimePosition end) {
2009 int start_instr = start.InstructionIndex();
2010 int end_instr = end.InstructionIndex();
2011 ASSERT(start_instr <= end_instr);
2012
2013 // We have no choice
2014 if (start_instr == end_instr) return end;
2015
2016 HBasicBlock* end_block = GetBlock(start);
2017 HBasicBlock* start_block = GetBlock(end);
2018
2019 if (end_block == start_block) {
2020 // The interval is split in the same basic block. Split at latest possible
2021 // position.
2022 return end;
2023 }
2024
2025 HBasicBlock* block = end_block;
2026 // Find header of outermost loop.
2027 while (block->parent_loop_header() != NULL &&
2028 block->parent_loop_header()->block_id() > start_block->block_id()) {
2029 block = block->parent_loop_header();
2030 }
2031
2032 if (block == end_block) return end;
2033
2034 return LifetimePosition::FromInstructionIndex(
2035 block->first_instruction_index());
2036 }
2037
2038
SpillAfter(LiveRange * range,LifetimePosition pos)2039 void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2040 LiveRange* second_part = SplitAt(range, pos);
2041 Spill(second_part);
2042 }
2043
2044
SpillBetween(LiveRange * range,LifetimePosition start,LifetimePosition end)2045 void LAllocator::SpillBetween(LiveRange* range,
2046 LifetimePosition start,
2047 LifetimePosition end) {
2048 ASSERT(start.Value() < end.Value());
2049 LiveRange* second_part = SplitAt(range, start);
2050
2051 if (second_part->Start().Value() < end.Value()) {
2052 // The split result intersects with [start, end[.
2053 // Split it at position between ]start+1, end[, spill the middle part
2054 // and put the rest to unhandled.
2055 LiveRange* third_part = SplitBetween(
2056 second_part,
2057 second_part->Start().InstructionEnd(),
2058 end.PrevInstruction().InstructionEnd());
2059
2060 ASSERT(third_part != second_part);
2061
2062 Spill(second_part);
2063 AddToUnhandledSorted(third_part);
2064 } else {
2065 // The split result does not intersect with [start, end[.
2066 // Nothing to spill. Just put it to unhandled as whole.
2067 AddToUnhandledSorted(second_part);
2068 }
2069 }
2070
2071
Spill(LiveRange * range)2072 void LAllocator::Spill(LiveRange* range) {
2073 ASSERT(!range->IsSpilled());
2074 TraceAlloc("Spilling live range %d\n", range->id());
2075 LiveRange* first = range->TopLevel();
2076
2077 if (!first->HasAllocatedSpillOperand()) {
2078 LOperand* op = TryReuseSpillSlot(range);
2079 if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS);
2080 first->SetSpillOperand(op);
2081 }
2082 range->MakeSpilled();
2083 }
2084
2085
RegisterCount() const2086 int LAllocator::RegisterCount() const {
2087 return num_registers_;
2088 }
2089
2090
2091 #ifdef DEBUG
2092
2093
Verify() const2094 void LAllocator::Verify() const {
2095 for (int i = 0; i < live_ranges()->length(); ++i) {
2096 LiveRange* current = live_ranges()->at(i);
2097 if (current != NULL) current->Verify();
2098 }
2099 }
2100
2101
2102 #endif
2103
2104
2105 } } // namespace v8::internal
2106