• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/crankshaft/ppc/lithium-gap-resolver-ppc.h"
6 
7 #include "src/crankshaft/ppc/lithium-codegen-ppc.h"
8 
9 namespace v8 {
10 namespace internal {
11 
12 static const Register kSavedValueRegister = {11};
13 
LGapResolver(LCodeGen * owner)14 LGapResolver::LGapResolver(LCodeGen* owner)
15     : cgen_(owner),
16       moves_(32, owner->zone()),
17       root_index_(0),
18       in_cycle_(false),
19       saved_destination_(NULL) {}
20 
21 
Resolve(LParallelMove * parallel_move)22 void LGapResolver::Resolve(LParallelMove* parallel_move) {
23   DCHECK(moves_.is_empty());
24   // Build up a worklist of moves.
25   BuildInitialMoveList(parallel_move);
26 
27   for (int i = 0; i < moves_.length(); ++i) {
28     LMoveOperands move = moves_[i];
29     // Skip constants to perform them last.  They don't block other moves
30     // and skipping such moves with register destinations keeps those
31     // registers free for the whole algorithm.
32     if (!move.IsEliminated() && !move.source()->IsConstantOperand()) {
33       root_index_ = i;  // Any cycle is found when by reaching this move again.
34       PerformMove(i);
35       if (in_cycle_) {
36         RestoreValue();
37       }
38     }
39   }
40 
41   // Perform the moves with constant sources.
42   for (int i = 0; i < moves_.length(); ++i) {
43     if (!moves_[i].IsEliminated()) {
44       DCHECK(moves_[i].source()->IsConstantOperand());
45       EmitMove(i);
46     }
47   }
48 
49   moves_.Rewind(0);
50 }
51 
52 
BuildInitialMoveList(LParallelMove * parallel_move)53 void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) {
54   // Perform a linear sweep of the moves to add them to the initial list of
55   // moves to perform, ignoring any move that is redundant (the source is
56   // the same as the destination, the destination is ignored and
57   // unallocated, or the move was already eliminated).
58   const ZoneList<LMoveOperands>* moves = parallel_move->move_operands();
59   for (int i = 0; i < moves->length(); ++i) {
60     LMoveOperands move = moves->at(i);
61     if (!move.IsRedundant()) moves_.Add(move, cgen_->zone());
62   }
63   Verify();
64 }
65 
66 
PerformMove(int index)67 void LGapResolver::PerformMove(int index) {
68   // Each call to this function performs a move and deletes it from the move
69   // graph.  We first recursively perform any move blocking this one.  We
70   // mark a move as "pending" on entry to PerformMove in order to detect
71   // cycles in the move graph.
72 
73   // We can only find a cycle, when doing a depth-first traversal of moves,
74   // be encountering the starting move again. So by spilling the source of
75   // the starting move, we break the cycle.  All moves are then unblocked,
76   // and the starting move is completed by writing the spilled value to
77   // its destination.  All other moves from the spilled source have been
78   // completed prior to breaking the cycle.
79   // An additional complication is that moves to MemOperands with large
80   // offsets (more than 1K or 4K) require us to spill this spilled value to
81   // the stack, to free up the register.
82   DCHECK(!moves_[index].IsPending());
83   DCHECK(!moves_[index].IsRedundant());
84 
85   // Clear this move's destination to indicate a pending move.  The actual
86   // destination is saved in a stack allocated local.  Multiple moves can
87   // be pending because this function is recursive.
88   DCHECK(moves_[index].source() != NULL);  // Or else it will look eliminated.
89   LOperand* destination = moves_[index].destination();
90   moves_[index].set_destination(NULL);
91 
92   // Perform a depth-first traversal of the move graph to resolve
93   // dependencies.  Any unperformed, unpending move with a source the same
94   // as this one's destination blocks this one so recursively perform all
95   // such moves.
96   for (int i = 0; i < moves_.length(); ++i) {
97     LMoveOperands other_move = moves_[i];
98     if (other_move.Blocks(destination) && !other_move.IsPending()) {
99       PerformMove(i);
100       // If there is a blocking, pending move it must be moves_[root_index_]
101       // and all other moves with the same source as moves_[root_index_] are
102       // sucessfully executed (because they are cycle-free) by this loop.
103     }
104   }
105 
106   // We are about to resolve this move and don't need it marked as
107   // pending, so restore its destination.
108   moves_[index].set_destination(destination);
109 
110   // The move may be blocked on a pending move, which must be the starting move.
111   // In this case, we have a cycle, and we save the source of this move to
112   // a scratch register to break it.
113   LMoveOperands other_move = moves_[root_index_];
114   if (other_move.Blocks(destination)) {
115     DCHECK(other_move.IsPending());
116     BreakCycle(index);
117     return;
118   }
119 
120   // This move is no longer blocked.
121   EmitMove(index);
122 }
123 
124 
Verify()125 void LGapResolver::Verify() {
126 #ifdef ENABLE_SLOW_DCHECKS
127   // No operand should be the destination for more than one move.
128   for (int i = 0; i < moves_.length(); ++i) {
129     LOperand* destination = moves_[i].destination();
130     for (int j = i + 1; j < moves_.length(); ++j) {
131       SLOW_DCHECK(!destination->Equals(moves_[j].destination()));
132     }
133   }
134 #endif
135 }
136 
137 #define __ ACCESS_MASM(cgen_->masm())
138 
BreakCycle(int index)139 void LGapResolver::BreakCycle(int index) {
140   // We save in a register the value that should end up in the source of
141   // moves_[root_index].  After performing all moves in the tree rooted
142   // in that move, we save the value to that source.
143   DCHECK(moves_[index].destination()->Equals(moves_[root_index_].source()));
144   DCHECK(!in_cycle_);
145   in_cycle_ = true;
146   LOperand* source = moves_[index].source();
147   saved_destination_ = moves_[index].destination();
148   if (source->IsRegister()) {
149     __ mr(kSavedValueRegister, cgen_->ToRegister(source));
150   } else if (source->IsStackSlot()) {
151     __ LoadP(kSavedValueRegister, cgen_->ToMemOperand(source));
152   } else if (source->IsDoubleRegister()) {
153     __ fmr(kScratchDoubleReg, cgen_->ToDoubleRegister(source));
154   } else if (source->IsDoubleStackSlot()) {
155     __ lfd(kScratchDoubleReg, cgen_->ToMemOperand(source));
156   } else {
157     UNREACHABLE();
158   }
159   // This move will be done by restoring the saved value to the destination.
160   moves_[index].Eliminate();
161 }
162 
163 
RestoreValue()164 void LGapResolver::RestoreValue() {
165   DCHECK(in_cycle_);
166   DCHECK(saved_destination_ != NULL);
167 
168   // Spilled value is in kSavedValueRegister or kSavedDoubleValueRegister.
169   if (saved_destination_->IsRegister()) {
170     __ mr(cgen_->ToRegister(saved_destination_), kSavedValueRegister);
171   } else if (saved_destination_->IsStackSlot()) {
172     __ StoreP(kSavedValueRegister, cgen_->ToMemOperand(saved_destination_));
173   } else if (saved_destination_->IsDoubleRegister()) {
174     __ fmr(cgen_->ToDoubleRegister(saved_destination_), kScratchDoubleReg);
175   } else if (saved_destination_->IsDoubleStackSlot()) {
176     __ stfd(kScratchDoubleReg, cgen_->ToMemOperand(saved_destination_));
177   } else {
178     UNREACHABLE();
179   }
180 
181   in_cycle_ = false;
182   saved_destination_ = NULL;
183 }
184 
185 
EmitMove(int index)186 void LGapResolver::EmitMove(int index) {
187   LOperand* source = moves_[index].source();
188   LOperand* destination = moves_[index].destination();
189 
190   // Dispatch on the source and destination operand kinds.  Not all
191   // combinations are possible.
192 
193   if (source->IsRegister()) {
194     Register source_register = cgen_->ToRegister(source);
195     if (destination->IsRegister()) {
196       __ mr(cgen_->ToRegister(destination), source_register);
197     } else {
198       DCHECK(destination->IsStackSlot());
199       __ StoreP(source_register, cgen_->ToMemOperand(destination));
200     }
201   } else if (source->IsStackSlot()) {
202     MemOperand source_operand = cgen_->ToMemOperand(source);
203     if (destination->IsRegister()) {
204       __ LoadP(cgen_->ToRegister(destination), source_operand);
205     } else {
206       DCHECK(destination->IsStackSlot());
207       MemOperand destination_operand = cgen_->ToMemOperand(destination);
208       if (in_cycle_) {
209         __ LoadP(ip, source_operand);
210         __ StoreP(ip, destination_operand);
211       } else {
212         __ LoadP(kSavedValueRegister, source_operand);
213         __ StoreP(kSavedValueRegister, destination_operand);
214       }
215     }
216 
217   } else if (source->IsConstantOperand()) {
218     LConstantOperand* constant_source = LConstantOperand::cast(source);
219     if (destination->IsRegister()) {
220       Register dst = cgen_->ToRegister(destination);
221       if (cgen_->IsInteger32(constant_source)) {
222         cgen_->EmitLoadIntegerConstant(constant_source, dst);
223       } else {
224         __ Move(dst, cgen_->ToHandle(constant_source));
225       }
226     } else if (destination->IsDoubleRegister()) {
227       DoubleRegister result = cgen_->ToDoubleRegister(destination);
228       double v = cgen_->ToDouble(constant_source);
229       __ LoadDoubleLiteral(result, v, ip);
230     } else {
231       DCHECK(destination->IsStackSlot());
232       DCHECK(!in_cycle_);  // Constant moves happen after all cycles are gone.
233       if (cgen_->IsInteger32(constant_source)) {
234         cgen_->EmitLoadIntegerConstant(constant_source, kSavedValueRegister);
235       } else {
236         __ Move(kSavedValueRegister, cgen_->ToHandle(constant_source));
237       }
238       __ StoreP(kSavedValueRegister, cgen_->ToMemOperand(destination));
239     }
240 
241   } else if (source->IsDoubleRegister()) {
242     DoubleRegister source_register = cgen_->ToDoubleRegister(source);
243     if (destination->IsDoubleRegister()) {
244       __ fmr(cgen_->ToDoubleRegister(destination), source_register);
245     } else {
246       DCHECK(destination->IsDoubleStackSlot());
247       __ stfd(source_register, cgen_->ToMemOperand(destination));
248     }
249 
250   } else if (source->IsDoubleStackSlot()) {
251     MemOperand source_operand = cgen_->ToMemOperand(source);
252     if (destination->IsDoubleRegister()) {
253       __ lfd(cgen_->ToDoubleRegister(destination), source_operand);
254     } else {
255       DCHECK(destination->IsDoubleStackSlot());
256       MemOperand destination_operand = cgen_->ToMemOperand(destination);
257       if (in_cycle_) {
258 // kSavedDoubleValueRegister was used to break the cycle,
259 // but kSavedValueRegister is free.
260 #if V8_TARGET_ARCH_PPC64
261         __ ld(kSavedValueRegister, source_operand);
262         __ std(kSavedValueRegister, destination_operand);
263 #else
264         MemOperand source_high_operand = cgen_->ToHighMemOperand(source);
265         MemOperand destination_high_operand =
266             cgen_->ToHighMemOperand(destination);
267         __ lwz(kSavedValueRegister, source_operand);
268         __ stw(kSavedValueRegister, destination_operand);
269         __ lwz(kSavedValueRegister, source_high_operand);
270         __ stw(kSavedValueRegister, destination_high_operand);
271 #endif
272       } else {
273         __ lfd(kScratchDoubleReg, source_operand);
274         __ stfd(kScratchDoubleReg, destination_operand);
275       }
276     }
277   } else {
278     UNREACHABLE();
279   }
280 
281   moves_[index].Eliminate();
282 }
283 
284 
285 #undef __
286 }  // namespace internal
287 }  // namespace v8
288