• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/base/adapters.h"
6 #include "src/compiler/instruction-selector-impl.h"
7 #include "src/compiler/node-matchers.h"
8 #include "src/compiler/node-properties.h"
9 #include "src/ppc/frames-ppc.h"
10 
11 namespace v8 {
12 namespace internal {
13 namespace compiler {
14 
15 enum ImmediateMode {
16   kInt16Imm,
17   kInt16Imm_Unsigned,
18   kInt16Imm_Negate,
19   kInt16Imm_4ByteAligned,
20   kShift32Imm,
21   kShift64Imm,
22   kNoImmediate
23 };
24 
25 
26 // Adds PPC-specific methods for generating operands.
27 class PPCOperandGenerator final : public OperandGenerator {
28  public:
PPCOperandGenerator(InstructionSelector * selector)29   explicit PPCOperandGenerator(InstructionSelector* selector)
30       : OperandGenerator(selector) {}
31 
UseOperand(Node * node,ImmediateMode mode)32   InstructionOperand UseOperand(Node* node, ImmediateMode mode) {
33     if (CanBeImmediate(node, mode)) {
34       return UseImmediate(node);
35     }
36     return UseRegister(node);
37   }
38 
CanBeImmediate(Node * node,ImmediateMode mode)39   bool CanBeImmediate(Node* node, ImmediateMode mode) {
40     int64_t value;
41     if (node->opcode() == IrOpcode::kInt32Constant)
42       value = OpParameter<int32_t>(node);
43     else if (node->opcode() == IrOpcode::kInt64Constant)
44       value = OpParameter<int64_t>(node);
45     else
46       return false;
47     return CanBeImmediate(value, mode);
48   }
49 
CanBeImmediate(int64_t value,ImmediateMode mode)50   bool CanBeImmediate(int64_t value, ImmediateMode mode) {
51     switch (mode) {
52       case kInt16Imm:
53         return is_int16(value);
54       case kInt16Imm_Unsigned:
55         return is_uint16(value);
56       case kInt16Imm_Negate:
57         return is_int16(-value);
58       case kInt16Imm_4ByteAligned:
59         return is_int16(value) && !(value & 3);
60       case kShift32Imm:
61         return 0 <= value && value < 32;
62       case kShift64Imm:
63         return 0 <= value && value < 64;
64       case kNoImmediate:
65         return false;
66     }
67     return false;
68   }
69 };
70 
71 
72 namespace {
73 
VisitRR(InstructionSelector * selector,InstructionCode opcode,Node * node)74 void VisitRR(InstructionSelector* selector, InstructionCode opcode,
75              Node* node) {
76   PPCOperandGenerator g(selector);
77   selector->Emit(opcode, g.DefineAsRegister(node),
78                  g.UseRegister(node->InputAt(0)));
79 }
80 
VisitRRR(InstructionSelector * selector,InstructionCode opcode,Node * node)81 void VisitRRR(InstructionSelector* selector, InstructionCode opcode,
82               Node* node) {
83   PPCOperandGenerator g(selector);
84   selector->Emit(opcode, g.DefineAsRegister(node),
85                  g.UseRegister(node->InputAt(0)),
86                  g.UseRegister(node->InputAt(1)));
87 }
88 
VisitRRO(InstructionSelector * selector,InstructionCode opcode,Node * node,ImmediateMode operand_mode)89 void VisitRRO(InstructionSelector* selector, InstructionCode opcode, Node* node,
90               ImmediateMode operand_mode) {
91   PPCOperandGenerator g(selector);
92   selector->Emit(opcode, g.DefineAsRegister(node),
93                  g.UseRegister(node->InputAt(0)),
94                  g.UseOperand(node->InputAt(1), operand_mode));
95 }
96 
97 
98 #if V8_TARGET_ARCH_PPC64
VisitTryTruncateDouble(InstructionSelector * selector,InstructionCode opcode,Node * node)99 void VisitTryTruncateDouble(InstructionSelector* selector,
100                             InstructionCode opcode, Node* node) {
101   PPCOperandGenerator g(selector);
102   InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
103   InstructionOperand outputs[2];
104   size_t output_count = 0;
105   outputs[output_count++] = g.DefineAsRegister(node);
106 
107   Node* success_output = NodeProperties::FindProjection(node, 1);
108   if (success_output) {
109     outputs[output_count++] = g.DefineAsRegister(success_output);
110   }
111 
112   selector->Emit(opcode, output_count, outputs, 1, inputs);
113 }
114 #endif
115 
116 
117 // Shared routine for multiple binary operations.
118 template <typename Matcher>
VisitBinop(InstructionSelector * selector,Node * node,InstructionCode opcode,ImmediateMode operand_mode,FlagsContinuation * cont)119 void VisitBinop(InstructionSelector* selector, Node* node,
120                 InstructionCode opcode, ImmediateMode operand_mode,
121                 FlagsContinuation* cont) {
122   PPCOperandGenerator g(selector);
123   Matcher m(node);
124   InstructionOperand inputs[4];
125   size_t input_count = 0;
126   InstructionOperand outputs[2];
127   size_t output_count = 0;
128 
129   inputs[input_count++] = g.UseRegister(m.left().node());
130   inputs[input_count++] = g.UseOperand(m.right().node(), operand_mode);
131 
132   if (cont->IsBranch()) {
133     inputs[input_count++] = g.Label(cont->true_block());
134     inputs[input_count++] = g.Label(cont->false_block());
135   }
136 
137   if (cont->IsDeoptimize()) {
138     // If we can deoptimize as a result of the binop, we need to make sure that
139     // the deopt inputs are not overwritten by the binop result. One way
140     // to achieve that is to declare the output register as same-as-first.
141     outputs[output_count++] = g.DefineSameAsFirst(node);
142   } else {
143     outputs[output_count++] = g.DefineAsRegister(node);
144   }
145   if (cont->IsSet()) {
146     outputs[output_count++] = g.DefineAsRegister(cont->result());
147   }
148 
149   DCHECK_NE(0u, input_count);
150   DCHECK_NE(0u, output_count);
151   DCHECK_GE(arraysize(inputs), input_count);
152   DCHECK_GE(arraysize(outputs), output_count);
153 
154   opcode = cont->Encode(opcode);
155   if (cont->IsDeoptimize()) {
156     selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
157                              cont->reason(), cont->frame_state());
158   } else {
159     selector->Emit(opcode, output_count, outputs, input_count, inputs);
160   }
161 }
162 
163 
164 // Shared routine for multiple binary operations.
165 template <typename Matcher>
VisitBinop(InstructionSelector * selector,Node * node,InstructionCode opcode,ImmediateMode operand_mode)166 void VisitBinop(InstructionSelector* selector, Node* node,
167                 InstructionCode opcode, ImmediateMode operand_mode) {
168   FlagsContinuation cont;
169   VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont);
170 }
171 
172 }  // namespace
173 
174 
VisitLoad(Node * node)175 void InstructionSelector::VisitLoad(Node* node) {
176   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
177   PPCOperandGenerator g(this);
178   Node* base = node->InputAt(0);
179   Node* offset = node->InputAt(1);
180   ArchOpcode opcode = kArchNop;
181   ImmediateMode mode = kInt16Imm;
182   switch (load_rep.representation()) {
183     case MachineRepresentation::kFloat32:
184       opcode = kPPC_LoadFloat32;
185       break;
186     case MachineRepresentation::kFloat64:
187       opcode = kPPC_LoadDouble;
188       break;
189     case MachineRepresentation::kBit:  // Fall through.
190     case MachineRepresentation::kWord8:
191       opcode = load_rep.IsSigned() ? kPPC_LoadWordS8 : kPPC_LoadWordU8;
192       break;
193     case MachineRepresentation::kWord16:
194       opcode = load_rep.IsSigned() ? kPPC_LoadWordS16 : kPPC_LoadWordU16;
195       break;
196 #if !V8_TARGET_ARCH_PPC64
197     case MachineRepresentation::kTaggedSigned:   // Fall through.
198     case MachineRepresentation::kTaggedPointer:  // Fall through.
199     case MachineRepresentation::kTagged:  // Fall through.
200 #endif
201     case MachineRepresentation::kWord32:
202       opcode = kPPC_LoadWordU32;
203       break;
204 #if V8_TARGET_ARCH_PPC64
205     case MachineRepresentation::kTaggedSigned:   // Fall through.
206     case MachineRepresentation::kTaggedPointer:  // Fall through.
207     case MachineRepresentation::kTagged:  // Fall through.
208     case MachineRepresentation::kWord64:
209       opcode = kPPC_LoadWord64;
210       mode = kInt16Imm_4ByteAligned;
211       break;
212 #else
213     case MachineRepresentation::kWord64:  // Fall through.
214 #endif
215     case MachineRepresentation::kSimd128:  // Fall through.
216     case MachineRepresentation::kNone:
217       UNREACHABLE();
218       return;
219   }
220   if (g.CanBeImmediate(offset, mode)) {
221     Emit(opcode | AddressingModeField::encode(kMode_MRI),
222          g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(offset));
223   } else if (g.CanBeImmediate(base, mode)) {
224     Emit(opcode | AddressingModeField::encode(kMode_MRI),
225          g.DefineAsRegister(node), g.UseRegister(offset), g.UseImmediate(base));
226   } else {
227     Emit(opcode | AddressingModeField::encode(kMode_MRR),
228          g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset));
229   }
230 }
231 
VisitProtectedLoad(Node * node)232 void InstructionSelector::VisitProtectedLoad(Node* node) {
233   // TODO(eholk)
234   UNIMPLEMENTED();
235 }
236 
VisitStore(Node * node)237 void InstructionSelector::VisitStore(Node* node) {
238   PPCOperandGenerator g(this);
239   Node* base = node->InputAt(0);
240   Node* offset = node->InputAt(1);
241   Node* value = node->InputAt(2);
242 
243   StoreRepresentation store_rep = StoreRepresentationOf(node->op());
244   WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
245   MachineRepresentation rep = store_rep.representation();
246 
247   if (write_barrier_kind != kNoWriteBarrier) {
248     DCHECK(CanBeTaggedPointer(rep));
249     AddressingMode addressing_mode;
250     InstructionOperand inputs[3];
251     size_t input_count = 0;
252     inputs[input_count++] = g.UseUniqueRegister(base);
253     // OutOfLineRecordWrite uses the offset in an 'add' instruction as well as
254     // for the store itself, so we must check compatibility with both.
255     if (g.CanBeImmediate(offset, kInt16Imm)
256 #if V8_TARGET_ARCH_PPC64
257         && g.CanBeImmediate(offset, kInt16Imm_4ByteAligned)
258 #endif
259             ) {
260       inputs[input_count++] = g.UseImmediate(offset);
261       addressing_mode = kMode_MRI;
262     } else {
263       inputs[input_count++] = g.UseUniqueRegister(offset);
264       addressing_mode = kMode_MRR;
265     }
266     inputs[input_count++] = g.UseUniqueRegister(value);
267     RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
268     switch (write_barrier_kind) {
269       case kNoWriteBarrier:
270         UNREACHABLE();
271         break;
272       case kMapWriteBarrier:
273         record_write_mode = RecordWriteMode::kValueIsMap;
274         break;
275       case kPointerWriteBarrier:
276         record_write_mode = RecordWriteMode::kValueIsPointer;
277         break;
278       case kFullWriteBarrier:
279         record_write_mode = RecordWriteMode::kValueIsAny;
280         break;
281     }
282     InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
283     size_t const temp_count = arraysize(temps);
284     InstructionCode code = kArchStoreWithWriteBarrier;
285     code |= AddressingModeField::encode(addressing_mode);
286     code |= MiscField::encode(static_cast<int>(record_write_mode));
287     Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
288   } else {
289     ArchOpcode opcode = kArchNop;
290     ImmediateMode mode = kInt16Imm;
291     switch (rep) {
292       case MachineRepresentation::kFloat32:
293         opcode = kPPC_StoreFloat32;
294         break;
295       case MachineRepresentation::kFloat64:
296         opcode = kPPC_StoreDouble;
297         break;
298       case MachineRepresentation::kBit:  // Fall through.
299       case MachineRepresentation::kWord8:
300         opcode = kPPC_StoreWord8;
301         break;
302       case MachineRepresentation::kWord16:
303         opcode = kPPC_StoreWord16;
304         break;
305 #if !V8_TARGET_ARCH_PPC64
306       case MachineRepresentation::kTaggedSigned:   // Fall through.
307       case MachineRepresentation::kTaggedPointer:  // Fall through.
308       case MachineRepresentation::kTagged:  // Fall through.
309 #endif
310       case MachineRepresentation::kWord32:
311         opcode = kPPC_StoreWord32;
312         break;
313 #if V8_TARGET_ARCH_PPC64
314       case MachineRepresentation::kTaggedSigned:   // Fall through.
315       case MachineRepresentation::kTaggedPointer:  // Fall through.
316       case MachineRepresentation::kTagged:  // Fall through.
317       case MachineRepresentation::kWord64:
318         opcode = kPPC_StoreWord64;
319         mode = kInt16Imm_4ByteAligned;
320         break;
321 #else
322       case MachineRepresentation::kWord64:  // Fall through.
323 #endif
324       case MachineRepresentation::kSimd128:  // Fall through.
325       case MachineRepresentation::kNone:
326         UNREACHABLE();
327         return;
328     }
329     if (g.CanBeImmediate(offset, mode)) {
330       Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
331            g.UseRegister(base), g.UseImmediate(offset), g.UseRegister(value));
332     } else if (g.CanBeImmediate(base, mode)) {
333       Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
334            g.UseRegister(offset), g.UseImmediate(base), g.UseRegister(value));
335     } else {
336       Emit(opcode | AddressingModeField::encode(kMode_MRR), g.NoOutput(),
337            g.UseRegister(base), g.UseRegister(offset), g.UseRegister(value));
338     }
339   }
340 }
341 
342 // Architecture supports unaligned access, therefore VisitLoad is used instead
VisitUnalignedLoad(Node * node)343 void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); }
344 
345 // Architecture supports unaligned access, therefore VisitStore is used instead
VisitUnalignedStore(Node * node)346 void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); }
347 
VisitCheckedLoad(Node * node)348 void InstructionSelector::VisitCheckedLoad(Node* node) {
349   CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
350   PPCOperandGenerator g(this);
351   Node* const base = node->InputAt(0);
352   Node* const offset = node->InputAt(1);
353   Node* const length = node->InputAt(2);
354   ArchOpcode opcode = kArchNop;
355   switch (load_rep.representation()) {
356     case MachineRepresentation::kWord8:
357       opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
358       break;
359     case MachineRepresentation::kWord16:
360       opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
361       break;
362     case MachineRepresentation::kWord32:
363       opcode = kCheckedLoadWord32;
364       break;
365 #if V8_TARGET_ARCH_PPC64
366     case MachineRepresentation::kWord64:
367       opcode = kCheckedLoadWord64;
368       break;
369 #endif
370     case MachineRepresentation::kFloat32:
371       opcode = kCheckedLoadFloat32;
372       break;
373     case MachineRepresentation::kFloat64:
374       opcode = kCheckedLoadFloat64;
375       break;
376     case MachineRepresentation::kBit:     // Fall through.
377     case MachineRepresentation::kTaggedSigned:   // Fall through.
378     case MachineRepresentation::kTaggedPointer:  // Fall through.
379     case MachineRepresentation::kTagged:  // Fall through.
380 #if !V8_TARGET_ARCH_PPC64
381     case MachineRepresentation::kWord64:  // Fall through.
382 #endif
383     case MachineRepresentation::kSimd128:  // Fall through.
384     case MachineRepresentation::kNone:
385       UNREACHABLE();
386       return;
387   }
388   AddressingMode addressingMode = kMode_MRR;
389   Emit(opcode | AddressingModeField::encode(addressingMode),
390        g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset),
391        g.UseOperand(length, kInt16Imm_Unsigned));
392 }
393 
394 
VisitCheckedStore(Node * node)395 void InstructionSelector::VisitCheckedStore(Node* node) {
396   MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
397   PPCOperandGenerator g(this);
398   Node* const base = node->InputAt(0);
399   Node* const offset = node->InputAt(1);
400   Node* const length = node->InputAt(2);
401   Node* const value = node->InputAt(3);
402   ArchOpcode opcode = kArchNop;
403   switch (rep) {
404     case MachineRepresentation::kWord8:
405       opcode = kCheckedStoreWord8;
406       break;
407     case MachineRepresentation::kWord16:
408       opcode = kCheckedStoreWord16;
409       break;
410     case MachineRepresentation::kWord32:
411       opcode = kCheckedStoreWord32;
412       break;
413 #if V8_TARGET_ARCH_PPC64
414     case MachineRepresentation::kWord64:
415       opcode = kCheckedStoreWord64;
416       break;
417 #endif
418     case MachineRepresentation::kFloat32:
419       opcode = kCheckedStoreFloat32;
420       break;
421     case MachineRepresentation::kFloat64:
422       opcode = kCheckedStoreFloat64;
423       break;
424     case MachineRepresentation::kBit:     // Fall through.
425     case MachineRepresentation::kTaggedSigned:   // Fall through.
426     case MachineRepresentation::kTaggedPointer:  // Fall through.
427     case MachineRepresentation::kTagged:  // Fall through.
428 #if !V8_TARGET_ARCH_PPC64
429     case MachineRepresentation::kWord64:  // Fall through.
430 #endif
431     case MachineRepresentation::kSimd128:  // Fall through.
432     case MachineRepresentation::kNone:
433       UNREACHABLE();
434       return;
435   }
436   AddressingMode addressingMode = kMode_MRR;
437   Emit(opcode | AddressingModeField::encode(addressingMode), g.NoOutput(),
438        g.UseRegister(base), g.UseRegister(offset),
439        g.UseOperand(length, kInt16Imm_Unsigned), g.UseRegister(value));
440 }
441 
442 
443 template <typename Matcher>
VisitLogical(InstructionSelector * selector,Node * node,Matcher * m,ArchOpcode opcode,bool left_can_cover,bool right_can_cover,ImmediateMode imm_mode)444 static void VisitLogical(InstructionSelector* selector, Node* node, Matcher* m,
445                          ArchOpcode opcode, bool left_can_cover,
446                          bool right_can_cover, ImmediateMode imm_mode) {
447   PPCOperandGenerator g(selector);
448 
449   // Map instruction to equivalent operation with inverted right input.
450   ArchOpcode inv_opcode = opcode;
451   switch (opcode) {
452     case kPPC_And:
453       inv_opcode = kPPC_AndComplement;
454       break;
455     case kPPC_Or:
456       inv_opcode = kPPC_OrComplement;
457       break;
458     default:
459       UNREACHABLE();
460   }
461 
462   // Select Logical(y, ~x) for Logical(Xor(x, -1), y).
463   if ((m->left().IsWord32Xor() || m->left().IsWord64Xor()) && left_can_cover) {
464     Matcher mleft(m->left().node());
465     if (mleft.right().Is(-1)) {
466       selector->Emit(inv_opcode, g.DefineAsRegister(node),
467                      g.UseRegister(m->right().node()),
468                      g.UseRegister(mleft.left().node()));
469       return;
470     }
471   }
472 
473   // Select Logical(x, ~y) for Logical(x, Xor(y, -1)).
474   if ((m->right().IsWord32Xor() || m->right().IsWord64Xor()) &&
475       right_can_cover) {
476     Matcher mright(m->right().node());
477     if (mright.right().Is(-1)) {
478       // TODO(all): support shifted operand on right.
479       selector->Emit(inv_opcode, g.DefineAsRegister(node),
480                      g.UseRegister(m->left().node()),
481                      g.UseRegister(mright.left().node()));
482       return;
483     }
484   }
485 
486   VisitBinop<Matcher>(selector, node, opcode, imm_mode);
487 }
488 
489 
IsContiguousMask32(uint32_t value,int * mb,int * me)490 static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) {
491   int mask_width = base::bits::CountPopulation32(value);
492   int mask_msb = base::bits::CountLeadingZeros32(value);
493   int mask_lsb = base::bits::CountTrailingZeros32(value);
494   if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 32))
495     return false;
496   *mb = mask_lsb + mask_width - 1;
497   *me = mask_lsb;
498   return true;
499 }
500 
501 
502 #if V8_TARGET_ARCH_PPC64
IsContiguousMask64(uint64_t value,int * mb,int * me)503 static inline bool IsContiguousMask64(uint64_t value, int* mb, int* me) {
504   int mask_width = base::bits::CountPopulation64(value);
505   int mask_msb = base::bits::CountLeadingZeros64(value);
506   int mask_lsb = base::bits::CountTrailingZeros64(value);
507   if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 64))
508     return false;
509   *mb = mask_lsb + mask_width - 1;
510   *me = mask_lsb;
511   return true;
512 }
513 #endif
514 
515 
516 // TODO(mbrandy): Absorb rotate-right into rlwinm?
VisitWord32And(Node * node)517 void InstructionSelector::VisitWord32And(Node* node) {
518   PPCOperandGenerator g(this);
519   Int32BinopMatcher m(node);
520   int mb = 0;
521   int me = 0;
522   if (m.right().HasValue() && IsContiguousMask32(m.right().Value(), &mb, &me)) {
523     int sh = 0;
524     Node* left = m.left().node();
525     if ((m.left().IsWord32Shr() || m.left().IsWord32Shl()) &&
526         CanCover(node, left)) {
527       // Try to absorb left/right shift into rlwinm
528       Int32BinopMatcher mleft(m.left().node());
529       if (mleft.right().IsInRange(0, 31)) {
530         left = mleft.left().node();
531         sh = mleft.right().Value();
532         if (m.left().IsWord32Shr()) {
533           // Adjust the mask such that it doesn't include any rotated bits.
534           if (mb > 31 - sh) mb = 31 - sh;
535           sh = (32 - sh) & 0x1f;
536         } else {
537           // Adjust the mask such that it doesn't include any rotated bits.
538           if (me < sh) me = sh;
539         }
540       }
541     }
542     if (mb >= me) {
543       Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node), g.UseRegister(left),
544            g.TempImmediate(sh), g.TempImmediate(mb), g.TempImmediate(me));
545       return;
546     }
547   }
548   VisitLogical<Int32BinopMatcher>(
549       this, node, &m, kPPC_And, CanCover(node, m.left().node()),
550       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
551 }
552 
553 
554 #if V8_TARGET_ARCH_PPC64
555 // TODO(mbrandy): Absorb rotate-right into rldic?
VisitWord64And(Node * node)556 void InstructionSelector::VisitWord64And(Node* node) {
557   PPCOperandGenerator g(this);
558   Int64BinopMatcher m(node);
559   int mb = 0;
560   int me = 0;
561   if (m.right().HasValue() && IsContiguousMask64(m.right().Value(), &mb, &me)) {
562     int sh = 0;
563     Node* left = m.left().node();
564     if ((m.left().IsWord64Shr() || m.left().IsWord64Shl()) &&
565         CanCover(node, left)) {
566       // Try to absorb left/right shift into rldic
567       Int64BinopMatcher mleft(m.left().node());
568       if (mleft.right().IsInRange(0, 63)) {
569         left = mleft.left().node();
570         sh = mleft.right().Value();
571         if (m.left().IsWord64Shr()) {
572           // Adjust the mask such that it doesn't include any rotated bits.
573           if (mb > 63 - sh) mb = 63 - sh;
574           sh = (64 - sh) & 0x3f;
575         } else {
576           // Adjust the mask such that it doesn't include any rotated bits.
577           if (me < sh) me = sh;
578         }
579       }
580     }
581     if (mb >= me) {
582       bool match = false;
583       ArchOpcode opcode;
584       int mask;
585       if (me == 0) {
586         match = true;
587         opcode = kPPC_RotLeftAndClearLeft64;
588         mask = mb;
589       } else if (mb == 63) {
590         match = true;
591         opcode = kPPC_RotLeftAndClearRight64;
592         mask = me;
593       } else if (sh && me <= sh && m.left().IsWord64Shl()) {
594         match = true;
595         opcode = kPPC_RotLeftAndClear64;
596         mask = mb;
597       }
598       if (match) {
599         Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left),
600              g.TempImmediate(sh), g.TempImmediate(mask));
601         return;
602       }
603     }
604   }
605   VisitLogical<Int64BinopMatcher>(
606       this, node, &m, kPPC_And, CanCover(node, m.left().node()),
607       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
608 }
609 #endif
610 
611 
VisitWord32Or(Node * node)612 void InstructionSelector::VisitWord32Or(Node* node) {
613   Int32BinopMatcher m(node);
614   VisitLogical<Int32BinopMatcher>(
615       this, node, &m, kPPC_Or, CanCover(node, m.left().node()),
616       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
617 }
618 
619 
620 #if V8_TARGET_ARCH_PPC64
VisitWord64Or(Node * node)621 void InstructionSelector::VisitWord64Or(Node* node) {
622   Int64BinopMatcher m(node);
623   VisitLogical<Int64BinopMatcher>(
624       this, node, &m, kPPC_Or, CanCover(node, m.left().node()),
625       CanCover(node, m.right().node()), kInt16Imm_Unsigned);
626 }
627 #endif
628 
629 
VisitWord32Xor(Node * node)630 void InstructionSelector::VisitWord32Xor(Node* node) {
631   PPCOperandGenerator g(this);
632   Int32BinopMatcher m(node);
633   if (m.right().Is(-1)) {
634     Emit(kPPC_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
635   } else {
636     VisitBinop<Int32BinopMatcher>(this, node, kPPC_Xor, kInt16Imm_Unsigned);
637   }
638 }
639 
640 
641 #if V8_TARGET_ARCH_PPC64
VisitWord64Xor(Node * node)642 void InstructionSelector::VisitWord64Xor(Node* node) {
643   PPCOperandGenerator g(this);
644   Int64BinopMatcher m(node);
645   if (m.right().Is(-1)) {
646     Emit(kPPC_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
647   } else {
648     VisitBinop<Int64BinopMatcher>(this, node, kPPC_Xor, kInt16Imm_Unsigned);
649   }
650 }
651 #endif
652 
653 
VisitWord32Shl(Node * node)654 void InstructionSelector::VisitWord32Shl(Node* node) {
655   PPCOperandGenerator g(this);
656   Int32BinopMatcher m(node);
657   if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
658     // Try to absorb logical-and into rlwinm
659     Int32BinopMatcher mleft(m.left().node());
660     int sh = m.right().Value();
661     int mb;
662     int me;
663     if (mleft.right().HasValue() &&
664         IsContiguousMask32(mleft.right().Value() << sh, &mb, &me)) {
665       // Adjust the mask such that it doesn't include any rotated bits.
666       if (me < sh) me = sh;
667       if (mb >= me) {
668         Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node),
669              g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
670              g.TempImmediate(mb), g.TempImmediate(me));
671         return;
672       }
673     }
674   }
675   VisitRRO(this, kPPC_ShiftLeft32, node, kShift32Imm);
676 }
677 
678 
679 #if V8_TARGET_ARCH_PPC64
VisitWord64Shl(Node * node)680 void InstructionSelector::VisitWord64Shl(Node* node) {
681   PPCOperandGenerator g(this);
682   Int64BinopMatcher m(node);
683   // TODO(mbrandy): eliminate left sign extension if right >= 32
684   if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
685     // Try to absorb logical-and into rldic
686     Int64BinopMatcher mleft(m.left().node());
687     int sh = m.right().Value();
688     int mb;
689     int me;
690     if (mleft.right().HasValue() &&
691         IsContiguousMask64(mleft.right().Value() << sh, &mb, &me)) {
692       // Adjust the mask such that it doesn't include any rotated bits.
693       if (me < sh) me = sh;
694       if (mb >= me) {
695         bool match = false;
696         ArchOpcode opcode;
697         int mask;
698         if (me == 0) {
699           match = true;
700           opcode = kPPC_RotLeftAndClearLeft64;
701           mask = mb;
702         } else if (mb == 63) {
703           match = true;
704           opcode = kPPC_RotLeftAndClearRight64;
705           mask = me;
706         } else if (sh && me <= sh) {
707           match = true;
708           opcode = kPPC_RotLeftAndClear64;
709           mask = mb;
710         }
711         if (match) {
712           Emit(opcode, g.DefineAsRegister(node),
713                g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
714                g.TempImmediate(mask));
715           return;
716         }
717       }
718     }
719   }
720   VisitRRO(this, kPPC_ShiftLeft64, node, kShift64Imm);
721 }
722 #endif
723 
724 
VisitWord32Shr(Node * node)725 void InstructionSelector::VisitWord32Shr(Node* node) {
726   PPCOperandGenerator g(this);
727   Int32BinopMatcher m(node);
728   if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
729     // Try to absorb logical-and into rlwinm
730     Int32BinopMatcher mleft(m.left().node());
731     int sh = m.right().Value();
732     int mb;
733     int me;
734     if (mleft.right().HasValue() &&
735         IsContiguousMask32((uint32_t)(mleft.right().Value()) >> sh, &mb, &me)) {
736       // Adjust the mask such that it doesn't include any rotated bits.
737       if (mb > 31 - sh) mb = 31 - sh;
738       sh = (32 - sh) & 0x1f;
739       if (mb >= me) {
740         Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node),
741              g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
742              g.TempImmediate(mb), g.TempImmediate(me));
743         return;
744       }
745     }
746   }
747   VisitRRO(this, kPPC_ShiftRight32, node, kShift32Imm);
748 }
749 
750 #if V8_TARGET_ARCH_PPC64
VisitWord64Shr(Node * node)751 void InstructionSelector::VisitWord64Shr(Node* node) {
752   PPCOperandGenerator g(this);
753   Int64BinopMatcher m(node);
754   if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
755     // Try to absorb logical-and into rldic
756     Int64BinopMatcher mleft(m.left().node());
757     int sh = m.right().Value();
758     int mb;
759     int me;
760     if (mleft.right().HasValue() &&
761         IsContiguousMask64((uint64_t)(mleft.right().Value()) >> sh, &mb, &me)) {
762       // Adjust the mask such that it doesn't include any rotated bits.
763       if (mb > 63 - sh) mb = 63 - sh;
764       sh = (64 - sh) & 0x3f;
765       if (mb >= me) {
766         bool match = false;
767         ArchOpcode opcode;
768         int mask;
769         if (me == 0) {
770           match = true;
771           opcode = kPPC_RotLeftAndClearLeft64;
772           mask = mb;
773         } else if (mb == 63) {
774           match = true;
775           opcode = kPPC_RotLeftAndClearRight64;
776           mask = me;
777         }
778         if (match) {
779           Emit(opcode, g.DefineAsRegister(node),
780                g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
781                g.TempImmediate(mask));
782           return;
783         }
784       }
785     }
786   }
787   VisitRRO(this, kPPC_ShiftRight64, node, kShift64Imm);
788 }
789 #endif
790 
791 
VisitWord32Sar(Node * node)792 void InstructionSelector::VisitWord32Sar(Node* node) {
793   PPCOperandGenerator g(this);
794   Int32BinopMatcher m(node);
795   // Replace with sign extension for (x << K) >> K where K is 16 or 24.
796   if (CanCover(node, m.left().node()) && m.left().IsWord32Shl()) {
797     Int32BinopMatcher mleft(m.left().node());
798     if (mleft.right().Is(16) && m.right().Is(16)) {
799       Emit(kPPC_ExtendSignWord16, g.DefineAsRegister(node),
800            g.UseRegister(mleft.left().node()));
801       return;
802     } else if (mleft.right().Is(24) && m.right().Is(24)) {
803       Emit(kPPC_ExtendSignWord8, g.DefineAsRegister(node),
804            g.UseRegister(mleft.left().node()));
805       return;
806     }
807   }
808   VisitRRO(this, kPPC_ShiftRightAlg32, node, kShift32Imm);
809 }
810 
811 #if !V8_TARGET_ARCH_PPC64
VisitPairBinop(InstructionSelector * selector,InstructionCode opcode,InstructionCode opcode2,Node * node)812 void VisitPairBinop(InstructionSelector* selector, InstructionCode opcode,
813                     InstructionCode opcode2, Node* node) {
814   PPCOperandGenerator g(selector);
815 
816   Node* projection1 = NodeProperties::FindProjection(node, 1);
817   if (projection1) {
818     // We use UseUniqueRegister here to avoid register sharing with the output
819     // registers.
820     InstructionOperand inputs[] = {
821         g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)),
822         g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))};
823 
824     InstructionOperand outputs[] = {
825         g.DefineAsRegister(node),
826         g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
827 
828     selector->Emit(opcode, 2, outputs, 4, inputs);
829   } else {
830     // The high word of the result is not used, so we emit the standard 32 bit
831     // instruction.
832     selector->Emit(opcode2, g.DefineSameAsFirst(node),
833                    g.UseRegister(node->InputAt(0)),
834                    g.UseRegister(node->InputAt(2)));
835   }
836 }
837 
VisitInt32PairAdd(Node * node)838 void InstructionSelector::VisitInt32PairAdd(Node* node) {
839   VisitPairBinop(this, kPPC_AddPair, kPPC_Add, node);
840 }
841 
VisitInt32PairSub(Node * node)842 void InstructionSelector::VisitInt32PairSub(Node* node) {
843   VisitPairBinop(this, kPPC_SubPair, kPPC_Sub, node);
844 }
845 
VisitInt32PairMul(Node * node)846 void InstructionSelector::VisitInt32PairMul(Node* node) {
847   PPCOperandGenerator g(this);
848   Node* projection1 = NodeProperties::FindProjection(node, 1);
849   if (projection1) {
850     InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
851                                    g.UseUniqueRegister(node->InputAt(1)),
852                                    g.UseUniqueRegister(node->InputAt(2)),
853                                    g.UseUniqueRegister(node->InputAt(3))};
854 
855     InstructionOperand outputs[] = {
856         g.DefineAsRegister(node),
857         g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
858 
859     InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
860 
861     Emit(kPPC_MulPair, 2, outputs, 4, inputs, 2, temps);
862   } else {
863     // The high word of the result is not used, so we emit the standard 32 bit
864     // instruction.
865     Emit(kPPC_Mul32, g.DefineSameAsFirst(node), g.UseRegister(node->InputAt(0)),
866          g.UseRegister(node->InputAt(2)));
867   }
868 }
869 
870 namespace {
871 // Shared routine for multiple shift operations.
VisitPairShift(InstructionSelector * selector,InstructionCode opcode,Node * node)872 void VisitPairShift(InstructionSelector* selector, InstructionCode opcode,
873                     Node* node) {
874   PPCOperandGenerator g(selector);
875   // We use g.UseUniqueRegister here to guarantee that there is
876   // no register aliasing of input registers with output registers.
877   Int32Matcher m(node->InputAt(2));
878   InstructionOperand shift_operand;
879   if (m.HasValue()) {
880     shift_operand = g.UseImmediate(m.node());
881   } else {
882     shift_operand = g.UseUniqueRegister(m.node());
883   }
884 
885   InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
886                                  g.UseUniqueRegister(node->InputAt(1)),
887                                  shift_operand};
888 
889   Node* projection1 = NodeProperties::FindProjection(node, 1);
890 
891   InstructionOperand outputs[2];
892   InstructionOperand temps[1];
893   int32_t output_count = 0;
894   int32_t temp_count = 0;
895 
896   outputs[output_count++] = g.DefineAsRegister(node);
897   if (projection1) {
898     outputs[output_count++] = g.DefineAsRegister(projection1);
899   } else {
900     temps[temp_count++] = g.TempRegister();
901   }
902 
903   selector->Emit(opcode, output_count, outputs, 3, inputs, temp_count, temps);
904 }
905 }  // namespace
906 
VisitWord32PairShl(Node * node)907 void InstructionSelector::VisitWord32PairShl(Node* node) {
908   VisitPairShift(this, kPPC_ShiftLeftPair, node);
909 }
910 
VisitWord32PairShr(Node * node)911 void InstructionSelector::VisitWord32PairShr(Node* node) {
912   VisitPairShift(this, kPPC_ShiftRightPair, node);
913 }
914 
VisitWord32PairSar(Node * node)915 void InstructionSelector::VisitWord32PairSar(Node* node) {
916   VisitPairShift(this, kPPC_ShiftRightAlgPair, node);
917 }
918 #endif
919 
920 #if V8_TARGET_ARCH_PPC64
VisitWord64Sar(Node * node)921 void InstructionSelector::VisitWord64Sar(Node* node) {
922   PPCOperandGenerator g(this);
923   Int64BinopMatcher m(node);
924   if (CanCover(m.node(), m.left().node()) && m.left().IsLoad() &&
925       m.right().Is(32)) {
926     // Just load and sign-extend the interesting 4 bytes instead. This happens,
927     // for example, when we're loading and untagging SMIs.
928     BaseWithIndexAndDisplacement64Matcher mleft(m.left().node(),
929                                                 AddressOption::kAllowAll);
930     if (mleft.matches() && mleft.index() == nullptr) {
931       int64_t offset = 0;
932       Node* displacement = mleft.displacement();
933       if (displacement != nullptr) {
934         Int64Matcher mdisplacement(displacement);
935         DCHECK(mdisplacement.HasValue());
936         offset = mdisplacement.Value();
937       }
938       offset = SmiWordOffset(offset);
939       if (g.CanBeImmediate(offset, kInt16Imm_4ByteAligned)) {
940         Emit(kPPC_LoadWordS32 | AddressingModeField::encode(kMode_MRI),
941              g.DefineAsRegister(node), g.UseRegister(mleft.base()),
942              g.TempImmediate(offset));
943         return;
944       }
945     }
946   }
947   VisitRRO(this, kPPC_ShiftRightAlg64, node, kShift64Imm);
948 }
949 #endif
950 
951 
952 // TODO(mbrandy): Absorb logical-and into rlwinm?
VisitWord32Ror(Node * node)953 void InstructionSelector::VisitWord32Ror(Node* node) {
954   VisitRRO(this, kPPC_RotRight32, node, kShift32Imm);
955 }
956 
957 
958 #if V8_TARGET_ARCH_PPC64
959 // TODO(mbrandy): Absorb logical-and into rldic?
VisitWord64Ror(Node * node)960 void InstructionSelector::VisitWord64Ror(Node* node) {
961   VisitRRO(this, kPPC_RotRight64, node, kShift64Imm);
962 }
963 #endif
964 
965 
VisitWord32Clz(Node * node)966 void InstructionSelector::VisitWord32Clz(Node* node) {
967   PPCOperandGenerator g(this);
968   Emit(kPPC_Cntlz32, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
969 }
970 
971 
972 #if V8_TARGET_ARCH_PPC64
VisitWord64Clz(Node * node)973 void InstructionSelector::VisitWord64Clz(Node* node) {
974   PPCOperandGenerator g(this);
975   Emit(kPPC_Cntlz64, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
976 }
977 #endif
978 
979 
VisitWord32Popcnt(Node * node)980 void InstructionSelector::VisitWord32Popcnt(Node* node) {
981   PPCOperandGenerator g(this);
982   Emit(kPPC_Popcnt32, g.DefineAsRegister(node),
983        g.UseRegister(node->InputAt(0)));
984 }
985 
986 
987 #if V8_TARGET_ARCH_PPC64
VisitWord64Popcnt(Node * node)988 void InstructionSelector::VisitWord64Popcnt(Node* node) {
989   PPCOperandGenerator g(this);
990   Emit(kPPC_Popcnt64, g.DefineAsRegister(node),
991        g.UseRegister(node->InputAt(0)));
992 }
993 #endif
994 
995 
VisitWord32Ctz(Node * node)996 void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
997 
998 
999 #if V8_TARGET_ARCH_PPC64
VisitWord64Ctz(Node * node)1000 void InstructionSelector::VisitWord64Ctz(Node* node) { UNREACHABLE(); }
1001 #endif
1002 
1003 
VisitWord32ReverseBits(Node * node)1004 void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
1005 
1006 
1007 #if V8_TARGET_ARCH_PPC64
VisitWord64ReverseBits(Node * node)1008 void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
1009 #endif
1010 
VisitWord64ReverseBytes(Node * node)1011 void InstructionSelector::VisitWord64ReverseBytes(Node* node) { UNREACHABLE(); }
1012 
VisitWord32ReverseBytes(Node * node)1013 void InstructionSelector::VisitWord32ReverseBytes(Node* node) { UNREACHABLE(); }
1014 
VisitInt32Add(Node * node)1015 void InstructionSelector::VisitInt32Add(Node* node) {
1016   VisitBinop<Int32BinopMatcher>(this, node, kPPC_Add, kInt16Imm);
1017 }
1018 
1019 
1020 #if V8_TARGET_ARCH_PPC64
VisitInt64Add(Node * node)1021 void InstructionSelector::VisitInt64Add(Node* node) {
1022   VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add, kInt16Imm);
1023 }
1024 #endif
1025 
VisitInt32Sub(Node * node)1026 void InstructionSelector::VisitInt32Sub(Node* node) {
1027   PPCOperandGenerator g(this);
1028   Int32BinopMatcher m(node);
1029   if (m.left().Is(0)) {
1030     Emit(kPPC_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
1031   } else {
1032     VisitBinop<Int32BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate);
1033   }
1034 }
1035 
1036 
1037 #if V8_TARGET_ARCH_PPC64
VisitInt64Sub(Node * node)1038 void InstructionSelector::VisitInt64Sub(Node* node) {
1039   PPCOperandGenerator g(this);
1040   Int64BinopMatcher m(node);
1041   if (m.left().Is(0)) {
1042     Emit(kPPC_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
1043   } else {
1044     VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate);
1045   }
1046 }
1047 #endif
1048 
1049 namespace {
1050 
1051 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1052                   InstructionOperand left, InstructionOperand right,
1053                   FlagsContinuation* cont);
EmitInt32MulWithOverflow(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1054 void EmitInt32MulWithOverflow(InstructionSelector* selector, Node* node,
1055                               FlagsContinuation* cont) {
1056   PPCOperandGenerator g(selector);
1057   Int32BinopMatcher m(node);
1058   InstructionOperand result_operand = g.DefineAsRegister(node);
1059   InstructionOperand high32_operand = g.TempRegister();
1060   InstructionOperand temp_operand = g.TempRegister();
1061   {
1062     InstructionOperand outputs[] = {result_operand, high32_operand};
1063     InstructionOperand inputs[] = {g.UseRegister(m.left().node()),
1064                                    g.UseRegister(m.right().node())};
1065     selector->Emit(kPPC_Mul32WithHigh32, 2, outputs, 2, inputs);
1066   }
1067   {
1068     InstructionOperand shift_31 = g.UseImmediate(31);
1069     InstructionOperand outputs[] = {temp_operand};
1070     InstructionOperand inputs[] = {result_operand, shift_31};
1071     selector->Emit(kPPC_ShiftRightAlg32, 1, outputs, 2, inputs);
1072   }
1073 
1074   VisitCompare(selector, kPPC_Cmp32, high32_operand, temp_operand, cont);
1075 }
1076 
1077 }  // namespace
1078 
1079 
VisitInt32Mul(Node * node)1080 void InstructionSelector::VisitInt32Mul(Node* node) {
1081   VisitRRR(this, kPPC_Mul32, node);
1082 }
1083 
1084 
1085 #if V8_TARGET_ARCH_PPC64
VisitInt64Mul(Node * node)1086 void InstructionSelector::VisitInt64Mul(Node* node) {
1087   VisitRRR(this, kPPC_Mul64, node);
1088 }
1089 #endif
1090 
1091 
VisitInt32MulHigh(Node * node)1092 void InstructionSelector::VisitInt32MulHigh(Node* node) {
1093   PPCOperandGenerator g(this);
1094   Emit(kPPC_MulHigh32, g.DefineAsRegister(node),
1095        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
1096 }
1097 
1098 
VisitUint32MulHigh(Node * node)1099 void InstructionSelector::VisitUint32MulHigh(Node* node) {
1100   PPCOperandGenerator g(this);
1101   Emit(kPPC_MulHighU32, g.DefineAsRegister(node),
1102        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
1103 }
1104 
1105 
VisitInt32Div(Node * node)1106 void InstructionSelector::VisitInt32Div(Node* node) {
1107   VisitRRR(this, kPPC_Div32, node);
1108 }
1109 
1110 
1111 #if V8_TARGET_ARCH_PPC64
VisitInt64Div(Node * node)1112 void InstructionSelector::VisitInt64Div(Node* node) {
1113   VisitRRR(this, kPPC_Div64, node);
1114 }
1115 #endif
1116 
1117 
VisitUint32Div(Node * node)1118 void InstructionSelector::VisitUint32Div(Node* node) {
1119   VisitRRR(this, kPPC_DivU32, node);
1120 }
1121 
1122 
1123 #if V8_TARGET_ARCH_PPC64
VisitUint64Div(Node * node)1124 void InstructionSelector::VisitUint64Div(Node* node) {
1125   VisitRRR(this, kPPC_DivU64, node);
1126 }
1127 #endif
1128 
1129 
VisitInt32Mod(Node * node)1130 void InstructionSelector::VisitInt32Mod(Node* node) {
1131   VisitRRR(this, kPPC_Mod32, node);
1132 }
1133 
1134 
1135 #if V8_TARGET_ARCH_PPC64
VisitInt64Mod(Node * node)1136 void InstructionSelector::VisitInt64Mod(Node* node) {
1137   VisitRRR(this, kPPC_Mod64, node);
1138 }
1139 #endif
1140 
1141 
VisitUint32Mod(Node * node)1142 void InstructionSelector::VisitUint32Mod(Node* node) {
1143   VisitRRR(this, kPPC_ModU32, node);
1144 }
1145 
1146 
1147 #if V8_TARGET_ARCH_PPC64
VisitUint64Mod(Node * node)1148 void InstructionSelector::VisitUint64Mod(Node* node) {
1149   VisitRRR(this, kPPC_ModU64, node);
1150 }
1151 #endif
1152 
1153 
VisitChangeFloat32ToFloat64(Node * node)1154 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
1155   VisitRR(this, kPPC_Float32ToDouble, node);
1156 }
1157 
1158 
VisitRoundInt32ToFloat32(Node * node)1159 void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
1160   VisitRR(this, kPPC_Int32ToFloat32, node);
1161 }
1162 
1163 
VisitRoundUint32ToFloat32(Node * node)1164 void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
1165   VisitRR(this, kPPC_Uint32ToFloat32, node);
1166 }
1167 
1168 
VisitChangeInt32ToFloat64(Node * node)1169 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
1170   VisitRR(this, kPPC_Int32ToDouble, node);
1171 }
1172 
1173 
VisitChangeUint32ToFloat64(Node * node)1174 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
1175   VisitRR(this, kPPC_Uint32ToDouble, node);
1176 }
1177 
1178 
VisitChangeFloat64ToInt32(Node * node)1179 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
1180   VisitRR(this, kPPC_DoubleToInt32, node);
1181 }
1182 
1183 
VisitChangeFloat64ToUint32(Node * node)1184 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
1185   VisitRR(this, kPPC_DoubleToUint32, node);
1186 }
1187 
VisitTruncateFloat64ToUint32(Node * node)1188 void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
1189   VisitRR(this, kPPC_DoubleToUint32, node);
1190 }
1191 
1192 #if V8_TARGET_ARCH_PPC64
VisitTryTruncateFloat32ToInt64(Node * node)1193 void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
1194   VisitTryTruncateDouble(this, kPPC_DoubleToInt64, node);
1195 }
1196 
1197 
VisitTryTruncateFloat64ToInt64(Node * node)1198 void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1199   VisitTryTruncateDouble(this, kPPC_DoubleToInt64, node);
1200 }
1201 
1202 
VisitTryTruncateFloat32ToUint64(Node * node)1203 void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1204   VisitTryTruncateDouble(this, kPPC_DoubleToUint64, node);
1205 }
1206 
1207 
VisitTryTruncateFloat64ToUint64(Node * node)1208 void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1209   VisitTryTruncateDouble(this, kPPC_DoubleToUint64, node);
1210 }
1211 
1212 
VisitChangeInt32ToInt64(Node * node)1213 void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
1214   // TODO(mbrandy): inspect input to see if nop is appropriate.
1215   VisitRR(this, kPPC_ExtendSignWord32, node);
1216 }
1217 
1218 
VisitChangeUint32ToUint64(Node * node)1219 void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1220   // TODO(mbrandy): inspect input to see if nop is appropriate.
1221   VisitRR(this, kPPC_Uint32ToUint64, node);
1222 }
1223 #endif
1224 
1225 
VisitTruncateFloat64ToFloat32(Node * node)1226 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
1227   VisitRR(this, kPPC_DoubleToFloat32, node);
1228 }
1229 
VisitTruncateFloat64ToWord32(Node * node)1230 void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
1231   VisitRR(this, kArchTruncateDoubleToI, node);
1232 }
1233 
VisitRoundFloat64ToInt32(Node * node)1234 void InstructionSelector::VisitRoundFloat64ToInt32(Node* node) {
1235   VisitRR(this, kPPC_DoubleToInt32, node);
1236 }
1237 
1238 
VisitTruncateFloat32ToInt32(Node * node)1239 void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
1240   VisitRR(this, kPPC_DoubleToInt32, node);
1241 }
1242 
1243 
VisitTruncateFloat32ToUint32(Node * node)1244 void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
1245   VisitRR(this, kPPC_DoubleToUint32, node);
1246 }
1247 
1248 
1249 #if V8_TARGET_ARCH_PPC64
VisitTruncateInt64ToInt32(Node * node)1250 void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1251   // TODO(mbrandy): inspect input to see if nop is appropriate.
1252   VisitRR(this, kPPC_Int64ToInt32, node);
1253 }
1254 
1255 
VisitRoundInt64ToFloat32(Node * node)1256 void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1257   VisitRR(this, kPPC_Int64ToFloat32, node);
1258 }
1259 
1260 
VisitRoundInt64ToFloat64(Node * node)1261 void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1262   VisitRR(this, kPPC_Int64ToDouble, node);
1263 }
1264 
1265 
VisitRoundUint64ToFloat32(Node * node)1266 void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1267   VisitRR(this, kPPC_Uint64ToFloat32, node);
1268 }
1269 
1270 
VisitRoundUint64ToFloat64(Node * node)1271 void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1272   VisitRR(this, kPPC_Uint64ToDouble, node);
1273 }
1274 #endif
1275 
1276 
VisitBitcastFloat32ToInt32(Node * node)1277 void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1278   VisitRR(this, kPPC_BitcastFloat32ToInt32, node);
1279 }
1280 
1281 
1282 #if V8_TARGET_ARCH_PPC64
VisitBitcastFloat64ToInt64(Node * node)1283 void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1284   VisitRR(this, kPPC_BitcastDoubleToInt64, node);
1285 }
1286 #endif
1287 
1288 
VisitBitcastInt32ToFloat32(Node * node)1289 void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1290   VisitRR(this, kPPC_BitcastInt32ToFloat32, node);
1291 }
1292 
1293 
1294 #if V8_TARGET_ARCH_PPC64
VisitBitcastInt64ToFloat64(Node * node)1295 void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1296   VisitRR(this, kPPC_BitcastInt64ToDouble, node);
1297 }
1298 #endif
1299 
1300 
VisitFloat32Add(Node * node)1301 void InstructionSelector::VisitFloat32Add(Node* node) {
1302   VisitRRR(this, kPPC_AddDouble | MiscField::encode(1), node);
1303 }
1304 
1305 
VisitFloat64Add(Node * node)1306 void InstructionSelector::VisitFloat64Add(Node* node) {
1307   // TODO(mbrandy): detect multiply-add
1308   VisitRRR(this, kPPC_AddDouble, node);
1309 }
1310 
1311 
VisitFloat32Sub(Node * node)1312 void InstructionSelector::VisitFloat32Sub(Node* node) {
1313   VisitRRR(this, kPPC_SubDouble | MiscField::encode(1), node);
1314 }
1315 
VisitFloat64Sub(Node * node)1316 void InstructionSelector::VisitFloat64Sub(Node* node) {
1317   // TODO(mbrandy): detect multiply-subtract
1318   VisitRRR(this, kPPC_SubDouble, node);
1319 }
1320 
VisitFloat32Mul(Node * node)1321 void InstructionSelector::VisitFloat32Mul(Node* node) {
1322   VisitRRR(this, kPPC_MulDouble | MiscField::encode(1), node);
1323 }
1324 
1325 
VisitFloat64Mul(Node * node)1326 void InstructionSelector::VisitFloat64Mul(Node* node) {
1327   // TODO(mbrandy): detect negate
1328   VisitRRR(this, kPPC_MulDouble, node);
1329 }
1330 
1331 
VisitFloat32Div(Node * node)1332 void InstructionSelector::VisitFloat32Div(Node* node) {
1333   VisitRRR(this, kPPC_DivDouble | MiscField::encode(1), node);
1334 }
1335 
1336 
VisitFloat64Div(Node * node)1337 void InstructionSelector::VisitFloat64Div(Node* node) {
1338   VisitRRR(this, kPPC_DivDouble, node);
1339 }
1340 
1341 
VisitFloat64Mod(Node * node)1342 void InstructionSelector::VisitFloat64Mod(Node* node) {
1343   PPCOperandGenerator g(this);
1344   Emit(kPPC_ModDouble, g.DefineAsFixed(node, d1),
1345        g.UseFixed(node->InputAt(0), d1),
1346        g.UseFixed(node->InputAt(1), d2))->MarkAsCall();
1347 }
1348 
VisitFloat32Max(Node * node)1349 void InstructionSelector::VisitFloat32Max(Node* node) {
1350   VisitRRR(this, kPPC_MaxDouble | MiscField::encode(1), node);
1351 }
1352 
VisitFloat64Max(Node * node)1353 void InstructionSelector::VisitFloat64Max(Node* node) {
1354   VisitRRR(this, kPPC_MaxDouble, node);
1355 }
1356 
1357 
VisitFloat64SilenceNaN(Node * node)1358 void InstructionSelector::VisitFloat64SilenceNaN(Node* node) {
1359   VisitRR(this, kPPC_Float64SilenceNaN, node);
1360 }
1361 
VisitFloat32Min(Node * node)1362 void InstructionSelector::VisitFloat32Min(Node* node) {
1363   VisitRRR(this, kPPC_MinDouble | MiscField::encode(1), node);
1364 }
1365 
VisitFloat64Min(Node * node)1366 void InstructionSelector::VisitFloat64Min(Node* node) {
1367   VisitRRR(this, kPPC_MinDouble, node);
1368 }
1369 
1370 
VisitFloat32Abs(Node * node)1371 void InstructionSelector::VisitFloat32Abs(Node* node) {
1372   VisitRR(this, kPPC_AbsDouble | MiscField::encode(1), node);
1373 }
1374 
1375 
VisitFloat64Abs(Node * node)1376 void InstructionSelector::VisitFloat64Abs(Node* node) {
1377   VisitRR(this, kPPC_AbsDouble, node);
1378 }
1379 
VisitFloat32Sqrt(Node * node)1380 void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1381   VisitRR(this, kPPC_SqrtDouble | MiscField::encode(1), node);
1382 }
1383 
VisitFloat64Ieee754Unop(Node * node,InstructionCode opcode)1384 void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
1385                                                   InstructionCode opcode) {
1386   PPCOperandGenerator g(this);
1387   Emit(opcode, g.DefineAsFixed(node, d1), g.UseFixed(node->InputAt(0), d1))
1388        ->MarkAsCall();
1389 }
1390 
VisitFloat64Ieee754Binop(Node * node,InstructionCode opcode)1391 void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
1392                                                   InstructionCode opcode) {
1393   PPCOperandGenerator g(this);
1394   Emit(opcode, g.DefineAsFixed(node, d1),
1395        g.UseFixed(node->InputAt(0), d1),
1396        g.UseFixed(node->InputAt(1), d2))->MarkAsCall();
1397 }
1398 
VisitFloat64Sqrt(Node * node)1399 void InstructionSelector::VisitFloat64Sqrt(Node* node) {
1400   VisitRR(this, kPPC_SqrtDouble, node);
1401 }
1402 
1403 
VisitFloat32RoundDown(Node * node)1404 void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1405   VisitRR(this, kPPC_FloorDouble | MiscField::encode(1), node);
1406 }
1407 
1408 
VisitFloat64RoundDown(Node * node)1409 void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1410   VisitRR(this, kPPC_FloorDouble, node);
1411 }
1412 
1413 
VisitFloat32RoundUp(Node * node)1414 void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1415   VisitRR(this, kPPC_CeilDouble | MiscField::encode(1), node);
1416 }
1417 
1418 
VisitFloat64RoundUp(Node * node)1419 void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1420   VisitRR(this, kPPC_CeilDouble, node);
1421 }
1422 
1423 
VisitFloat32RoundTruncate(Node * node)1424 void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1425   VisitRR(this, kPPC_TruncateDouble | MiscField::encode(1), node);
1426 }
1427 
1428 
VisitFloat64RoundTruncate(Node * node)1429 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
1430   VisitRR(this, kPPC_TruncateDouble, node);
1431 }
1432 
1433 
VisitFloat64RoundTiesAway(Node * node)1434 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1435   VisitRR(this, kPPC_RoundDouble, node);
1436 }
1437 
1438 
VisitFloat32RoundTiesEven(Node * node)1439 void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1440   UNREACHABLE();
1441 }
1442 
1443 
VisitFloat64RoundTiesEven(Node * node)1444 void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1445   UNREACHABLE();
1446 }
1447 
VisitFloat32Neg(Node * node)1448 void InstructionSelector::VisitFloat32Neg(Node* node) {
1449   VisitRR(this, kPPC_NegDouble, node);
1450 }
1451 
VisitFloat64Neg(Node * node)1452 void InstructionSelector::VisitFloat64Neg(Node* node) {
1453   VisitRR(this, kPPC_NegDouble, node);
1454 }
1455 
VisitInt32AddWithOverflow(Node * node)1456 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1457   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1458     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1459     return VisitBinop<Int32BinopMatcher>(this, node, kPPC_AddWithOverflow32,
1460                                          kInt16Imm, &cont);
1461   }
1462   FlagsContinuation cont;
1463   VisitBinop<Int32BinopMatcher>(this, node, kPPC_AddWithOverflow32, kInt16Imm,
1464                                 &cont);
1465 }
1466 
1467 
VisitInt32SubWithOverflow(Node * node)1468 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1469   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1470     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1471     return VisitBinop<Int32BinopMatcher>(this, node, kPPC_SubWithOverflow32,
1472                                          kInt16Imm_Negate, &cont);
1473   }
1474   FlagsContinuation cont;
1475   VisitBinop<Int32BinopMatcher>(this, node, kPPC_SubWithOverflow32,
1476                                 kInt16Imm_Negate, &cont);
1477 }
1478 
1479 
1480 #if V8_TARGET_ARCH_PPC64
VisitInt64AddWithOverflow(Node * node)1481 void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
1482   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1483     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1484     return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add, kInt16Imm,
1485                                          &cont);
1486   }
1487   FlagsContinuation cont;
1488   VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add, kInt16Imm, &cont);
1489 }
1490 
1491 
VisitInt64SubWithOverflow(Node * node)1492 void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
1493   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1494     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1495     return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate,
1496                                          &cont);
1497   }
1498   FlagsContinuation cont;
1499   VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate, &cont);
1500 }
1501 #endif
1502 
1503 
CompareLogical(FlagsContinuation * cont)1504 static bool CompareLogical(FlagsContinuation* cont) {
1505   switch (cont->condition()) {
1506     case kUnsignedLessThan:
1507     case kUnsignedGreaterThanOrEqual:
1508     case kUnsignedLessThanOrEqual:
1509     case kUnsignedGreaterThan:
1510       return true;
1511     default:
1512       return false;
1513   }
1514   UNREACHABLE();
1515   return false;
1516 }
1517 
1518 
1519 namespace {
1520 
1521 // Shared routine for multiple compare operations.
VisitCompare(InstructionSelector * selector,InstructionCode opcode,InstructionOperand left,InstructionOperand right,FlagsContinuation * cont)1522 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1523                   InstructionOperand left, InstructionOperand right,
1524                   FlagsContinuation* cont) {
1525   PPCOperandGenerator g(selector);
1526   opcode = cont->Encode(opcode);
1527   if (cont->IsBranch()) {
1528     selector->Emit(opcode, g.NoOutput(), left, right,
1529                    g.Label(cont->true_block()), g.Label(cont->false_block()));
1530   } else if (cont->IsDeoptimize()) {
1531     selector->EmitDeoptimize(opcode, g.NoOutput(), left, right, cont->reason(),
1532                              cont->frame_state());
1533   } else {
1534     DCHECK(cont->IsSet());
1535     selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
1536   }
1537 }
1538 
1539 
1540 // Shared routine for multiple word compare operations.
VisitWordCompare(InstructionSelector * selector,Node * node,InstructionCode opcode,FlagsContinuation * cont,bool commutative,ImmediateMode immediate_mode)1541 void VisitWordCompare(InstructionSelector* selector, Node* node,
1542                       InstructionCode opcode, FlagsContinuation* cont,
1543                       bool commutative, ImmediateMode immediate_mode) {
1544   PPCOperandGenerator g(selector);
1545   Node* left = node->InputAt(0);
1546   Node* right = node->InputAt(1);
1547 
1548   // Match immediates on left or right side of comparison.
1549   if (g.CanBeImmediate(right, immediate_mode)) {
1550     VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right),
1551                  cont);
1552   } else if (g.CanBeImmediate(left, immediate_mode)) {
1553     if (!commutative) cont->Commute();
1554     VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left),
1555                  cont);
1556   } else {
1557     VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
1558                  cont);
1559   }
1560 }
1561 
1562 
VisitWord32Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1563 void VisitWord32Compare(InstructionSelector* selector, Node* node,
1564                         FlagsContinuation* cont) {
1565   ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1566   VisitWordCompare(selector, node, kPPC_Cmp32, cont, false, mode);
1567 }
1568 
1569 
1570 #if V8_TARGET_ARCH_PPC64
VisitWord64Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1571 void VisitWord64Compare(InstructionSelector* selector, Node* node,
1572                         FlagsContinuation* cont) {
1573   ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1574   VisitWordCompare(selector, node, kPPC_Cmp64, cont, false, mode);
1575 }
1576 #endif
1577 
1578 
1579 // Shared routine for multiple float32 compare operations.
VisitFloat32Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1580 void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1581                          FlagsContinuation* cont) {
1582   PPCOperandGenerator g(selector);
1583   Node* left = node->InputAt(0);
1584   Node* right = node->InputAt(1);
1585   VisitCompare(selector, kPPC_CmpDouble, g.UseRegister(left),
1586                g.UseRegister(right), cont);
1587 }
1588 
1589 
1590 // Shared routine for multiple float64 compare operations.
VisitFloat64Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1591 void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1592                          FlagsContinuation* cont) {
1593   PPCOperandGenerator g(selector);
1594   Node* left = node->InputAt(0);
1595   Node* right = node->InputAt(1);
1596   VisitCompare(selector, kPPC_CmpDouble, g.UseRegister(left),
1597                g.UseRegister(right), cont);
1598 }
1599 
1600 
1601 // Shared routine for word comparisons against zero.
VisitWordCompareZero(InstructionSelector * selector,Node * user,Node * value,InstructionCode opcode,FlagsContinuation * cont)1602 void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1603                           Node* value, InstructionCode opcode,
1604                           FlagsContinuation* cont) {
1605   // Try to combine with comparisons against 0 by simply inverting the branch.
1606   while (value->opcode() == IrOpcode::kWord32Equal &&
1607          selector->CanCover(user, value)) {
1608     Int32BinopMatcher m(value);
1609     if (!m.right().Is(0)) break;
1610 
1611     user = value;
1612     value = m.left().node();
1613     cont->Negate();
1614   }
1615 
1616   if (selector->CanCover(user, value)) {
1617     switch (value->opcode()) {
1618       case IrOpcode::kWord32Equal:
1619         cont->OverwriteAndNegateIfEqual(kEqual);
1620         return VisitWord32Compare(selector, value, cont);
1621       case IrOpcode::kInt32LessThan:
1622         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1623         return VisitWord32Compare(selector, value, cont);
1624       case IrOpcode::kInt32LessThanOrEqual:
1625         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1626         return VisitWord32Compare(selector, value, cont);
1627       case IrOpcode::kUint32LessThan:
1628         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1629         return VisitWord32Compare(selector, value, cont);
1630       case IrOpcode::kUint32LessThanOrEqual:
1631         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1632         return VisitWord32Compare(selector, value, cont);
1633 #if V8_TARGET_ARCH_PPC64
1634       case IrOpcode::kWord64Equal:
1635         cont->OverwriteAndNegateIfEqual(kEqual);
1636         return VisitWord64Compare(selector, value, cont);
1637       case IrOpcode::kInt64LessThan:
1638         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1639         return VisitWord64Compare(selector, value, cont);
1640       case IrOpcode::kInt64LessThanOrEqual:
1641         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1642         return VisitWord64Compare(selector, value, cont);
1643       case IrOpcode::kUint64LessThan:
1644         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1645         return VisitWord64Compare(selector, value, cont);
1646       case IrOpcode::kUint64LessThanOrEqual:
1647         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1648         return VisitWord64Compare(selector, value, cont);
1649 #endif
1650       case IrOpcode::kFloat32Equal:
1651         cont->OverwriteAndNegateIfEqual(kEqual);
1652         return VisitFloat32Compare(selector, value, cont);
1653       case IrOpcode::kFloat32LessThan:
1654         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1655         return VisitFloat32Compare(selector, value, cont);
1656       case IrOpcode::kFloat32LessThanOrEqual:
1657         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1658         return VisitFloat32Compare(selector, value, cont);
1659       case IrOpcode::kFloat64Equal:
1660         cont->OverwriteAndNegateIfEqual(kEqual);
1661         return VisitFloat64Compare(selector, value, cont);
1662       case IrOpcode::kFloat64LessThan:
1663         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1664         return VisitFloat64Compare(selector, value, cont);
1665       case IrOpcode::kFloat64LessThanOrEqual:
1666         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1667         return VisitFloat64Compare(selector, value, cont);
1668       case IrOpcode::kProjection:
1669         // Check if this is the overflow output projection of an
1670         // <Operation>WithOverflow node.
1671         if (ProjectionIndexOf(value->op()) == 1u) {
1672           // We cannot combine the <Operation>WithOverflow with this branch
1673           // unless the 0th projection (the use of the actual value of the
1674           // <Operation> is either nullptr, which means there's no use of the
1675           // actual value, or was already defined, which means it is scheduled
1676           // *AFTER* this branch).
1677           Node* const node = value->InputAt(0);
1678           Node* const result = NodeProperties::FindProjection(node, 0);
1679           if (result == nullptr || selector->IsDefined(result)) {
1680             switch (node->opcode()) {
1681               case IrOpcode::kInt32AddWithOverflow:
1682                 cont->OverwriteAndNegateIfEqual(kOverflow);
1683                 return VisitBinop<Int32BinopMatcher>(
1684                     selector, node, kPPC_AddWithOverflow32, kInt16Imm, cont);
1685               case IrOpcode::kInt32SubWithOverflow:
1686                 cont->OverwriteAndNegateIfEqual(kOverflow);
1687                 return VisitBinop<Int32BinopMatcher>(selector, node,
1688                                                      kPPC_SubWithOverflow32,
1689                                                      kInt16Imm_Negate, cont);
1690               case IrOpcode::kInt32MulWithOverflow:
1691                 cont->OverwriteAndNegateIfEqual(kNotEqual);
1692                 return EmitInt32MulWithOverflow(selector, node, cont);
1693 #if V8_TARGET_ARCH_PPC64
1694               case IrOpcode::kInt64AddWithOverflow:
1695                 cont->OverwriteAndNegateIfEqual(kOverflow);
1696                 return VisitBinop<Int64BinopMatcher>(selector, node, kPPC_Add,
1697                                                      kInt16Imm, cont);
1698               case IrOpcode::kInt64SubWithOverflow:
1699                 cont->OverwriteAndNegateIfEqual(kOverflow);
1700                 return VisitBinop<Int64BinopMatcher>(selector, node, kPPC_Sub,
1701                                                      kInt16Imm_Negate, cont);
1702 #endif
1703               default:
1704                 break;
1705             }
1706           }
1707         }
1708         break;
1709       case IrOpcode::kInt32Sub:
1710         return VisitWord32Compare(selector, value, cont);
1711       case IrOpcode::kWord32And:
1712         // TODO(mbandy): opportunity for rlwinm?
1713         return VisitWordCompare(selector, value, kPPC_Tst32, cont, true,
1714                                 kInt16Imm_Unsigned);
1715 // TODO(mbrandy): Handle?
1716 // case IrOpcode::kInt32Add:
1717 // case IrOpcode::kWord32Or:
1718 // case IrOpcode::kWord32Xor:
1719 // case IrOpcode::kWord32Sar:
1720 // case IrOpcode::kWord32Shl:
1721 // case IrOpcode::kWord32Shr:
1722 // case IrOpcode::kWord32Ror:
1723 #if V8_TARGET_ARCH_PPC64
1724       case IrOpcode::kInt64Sub:
1725         return VisitWord64Compare(selector, value, cont);
1726       case IrOpcode::kWord64And:
1727         // TODO(mbandy): opportunity for rldic?
1728         return VisitWordCompare(selector, value, kPPC_Tst64, cont, true,
1729                                 kInt16Imm_Unsigned);
1730 // TODO(mbrandy): Handle?
1731 // case IrOpcode::kInt64Add:
1732 // case IrOpcode::kWord64Or:
1733 // case IrOpcode::kWord64Xor:
1734 // case IrOpcode::kWord64Sar:
1735 // case IrOpcode::kWord64Shl:
1736 // case IrOpcode::kWord64Shr:
1737 // case IrOpcode::kWord64Ror:
1738 #endif
1739       default:
1740         break;
1741     }
1742   }
1743 
1744   // Branch could not be combined with a compare, emit compare against 0.
1745   PPCOperandGenerator g(selector);
1746   VisitCompare(selector, opcode, g.UseRegister(value), g.TempImmediate(0),
1747                cont);
1748 }
1749 
1750 
VisitWord32CompareZero(InstructionSelector * selector,Node * user,Node * value,FlagsContinuation * cont)1751 void VisitWord32CompareZero(InstructionSelector* selector, Node* user,
1752                             Node* value, FlagsContinuation* cont) {
1753   VisitWordCompareZero(selector, user, value, kPPC_Cmp32, cont);
1754 }
1755 
1756 
1757 #if V8_TARGET_ARCH_PPC64
VisitWord64CompareZero(InstructionSelector * selector,Node * user,Node * value,FlagsContinuation * cont)1758 void VisitWord64CompareZero(InstructionSelector* selector, Node* user,
1759                             Node* value, FlagsContinuation* cont) {
1760   VisitWordCompareZero(selector, user, value, kPPC_Cmp64, cont);
1761 }
1762 #endif
1763 
1764 }  // namespace
1765 
1766 
VisitBranch(Node * branch,BasicBlock * tbranch,BasicBlock * fbranch)1767 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1768                                       BasicBlock* fbranch) {
1769   FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1770   VisitWord32CompareZero(this, branch, branch->InputAt(0), &cont);
1771 }
1772 
VisitDeoptimizeIf(Node * node)1773 void InstructionSelector::VisitDeoptimizeIf(Node* node) {
1774   FlagsContinuation cont = FlagsContinuation::ForDeoptimize(
1775       kNotEqual, DeoptimizeReasonOf(node->op()), node->InputAt(1));
1776   VisitWord32CompareZero(this, node, node->InputAt(0), &cont);
1777 }
1778 
VisitDeoptimizeUnless(Node * node)1779 void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
1780   FlagsContinuation cont = FlagsContinuation::ForDeoptimize(
1781       kEqual, DeoptimizeReasonOf(node->op()), node->InputAt(1));
1782   VisitWord32CompareZero(this, node, node->InputAt(0), &cont);
1783 }
1784 
VisitSwitch(Node * node,const SwitchInfo & sw)1785 void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1786   PPCOperandGenerator g(this);
1787   InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1788 
1789   // Emit either ArchTableSwitch or ArchLookupSwitch.
1790   size_t table_space_cost = 4 + sw.value_range;
1791   size_t table_time_cost = 3;
1792   size_t lookup_space_cost = 3 + 2 * sw.case_count;
1793   size_t lookup_time_cost = sw.case_count;
1794   if (sw.case_count > 0 &&
1795       table_space_cost + 3 * table_time_cost <=
1796           lookup_space_cost + 3 * lookup_time_cost &&
1797       sw.min_value > std::numeric_limits<int32_t>::min()) {
1798     InstructionOperand index_operand = value_operand;
1799     if (sw.min_value) {
1800       index_operand = g.TempRegister();
1801       Emit(kPPC_Sub, index_operand, value_operand,
1802            g.TempImmediate(sw.min_value));
1803     }
1804     // Generate a table lookup.
1805     return EmitTableSwitch(sw, index_operand);
1806   }
1807 
1808   // Generate a sequence of conditional jumps.
1809   return EmitLookupSwitch(sw, value_operand);
1810 }
1811 
1812 
VisitWord32Equal(Node * const node)1813 void InstructionSelector::VisitWord32Equal(Node* const node) {
1814   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1815   Int32BinopMatcher m(node);
1816   if (m.right().Is(0)) {
1817     return VisitWord32CompareZero(this, m.node(), m.left().node(), &cont);
1818   }
1819   VisitWord32Compare(this, node, &cont);
1820 }
1821 
1822 
VisitInt32LessThan(Node * node)1823 void InstructionSelector::VisitInt32LessThan(Node* node) {
1824   FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1825   VisitWord32Compare(this, node, &cont);
1826 }
1827 
1828 
VisitInt32LessThanOrEqual(Node * node)1829 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1830   FlagsContinuation cont =
1831       FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1832   VisitWord32Compare(this, node, &cont);
1833 }
1834 
1835 
VisitUint32LessThan(Node * node)1836 void InstructionSelector::VisitUint32LessThan(Node* node) {
1837   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1838   VisitWord32Compare(this, node, &cont);
1839 }
1840 
1841 
VisitUint32LessThanOrEqual(Node * node)1842 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1843   FlagsContinuation cont =
1844       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1845   VisitWord32Compare(this, node, &cont);
1846 }
1847 
1848 
1849 #if V8_TARGET_ARCH_PPC64
VisitWord64Equal(Node * const node)1850 void InstructionSelector::VisitWord64Equal(Node* const node) {
1851   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1852   Int64BinopMatcher m(node);
1853   if (m.right().Is(0)) {
1854     return VisitWord64CompareZero(this, m.node(), m.left().node(), &cont);
1855   }
1856   VisitWord64Compare(this, node, &cont);
1857 }
1858 
1859 
VisitInt64LessThan(Node * node)1860 void InstructionSelector::VisitInt64LessThan(Node* node) {
1861   FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1862   VisitWord64Compare(this, node, &cont);
1863 }
1864 
1865 
VisitInt64LessThanOrEqual(Node * node)1866 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
1867   FlagsContinuation cont =
1868       FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1869   VisitWord64Compare(this, node, &cont);
1870 }
1871 
1872 
VisitUint64LessThan(Node * node)1873 void InstructionSelector::VisitUint64LessThan(Node* node) {
1874   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1875   VisitWord64Compare(this, node, &cont);
1876 }
1877 
1878 
VisitUint64LessThanOrEqual(Node * node)1879 void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
1880   FlagsContinuation cont =
1881       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1882   VisitWord64Compare(this, node, &cont);
1883 }
1884 #endif
1885 
VisitInt32MulWithOverflow(Node * node)1886 void InstructionSelector::VisitInt32MulWithOverflow(Node* node) {
1887   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1888     FlagsContinuation cont = FlagsContinuation::ForSet(kNotEqual, ovf);
1889     return EmitInt32MulWithOverflow(this, node, &cont);
1890   }
1891   FlagsContinuation cont;
1892   EmitInt32MulWithOverflow(this, node, &cont);
1893 }
1894 
1895 
VisitFloat32Equal(Node * node)1896 void InstructionSelector::VisitFloat32Equal(Node* node) {
1897   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1898   VisitFloat32Compare(this, node, &cont);
1899 }
1900 
1901 
VisitFloat32LessThan(Node * node)1902 void InstructionSelector::VisitFloat32LessThan(Node* node) {
1903   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1904   VisitFloat32Compare(this, node, &cont);
1905 }
1906 
1907 
VisitFloat32LessThanOrEqual(Node * node)1908 void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
1909   FlagsContinuation cont =
1910       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1911   VisitFloat32Compare(this, node, &cont);
1912 }
1913 
1914 
VisitFloat64Equal(Node * node)1915 void InstructionSelector::VisitFloat64Equal(Node* node) {
1916   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1917   VisitFloat64Compare(this, node, &cont);
1918 }
1919 
1920 
VisitFloat64LessThan(Node * node)1921 void InstructionSelector::VisitFloat64LessThan(Node* node) {
1922   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1923   VisitFloat64Compare(this, node, &cont);
1924 }
1925 
1926 
VisitFloat64LessThanOrEqual(Node * node)1927 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1928   FlagsContinuation cont =
1929       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1930   VisitFloat64Compare(this, node, &cont);
1931 }
1932 
1933 
EmitPrepareArguments(ZoneVector<PushParameter> * arguments,const CallDescriptor * descriptor,Node * node)1934 void InstructionSelector::EmitPrepareArguments(
1935     ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1936     Node* node) {
1937   PPCOperandGenerator g(this);
1938 
1939   // Prepare for C function call.
1940   if (descriptor->IsCFunctionCall()) {
1941     Emit(kArchPrepareCallCFunction |
1942              MiscField::encode(static_cast<int>(descriptor->ParameterCount())),
1943          0, nullptr, 0, nullptr);
1944 
1945     // Poke any stack arguments.
1946     int slot = kStackFrameExtraParamSlot;
1947     for (PushParameter input : (*arguments)) {
1948       Emit(kPPC_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node()),
1949            g.TempImmediate(slot));
1950       ++slot;
1951     }
1952   } else {
1953     // Push any stack arguments.
1954     int num_slots = static_cast<int>(descriptor->StackParameterCount());
1955     int slot = 0;
1956     for (PushParameter input : (*arguments)) {
1957       if (slot == 0) {
1958         DCHECK(input.node());
1959         Emit(kPPC_PushFrame, g.NoOutput(), g.UseRegister(input.node()),
1960              g.TempImmediate(num_slots));
1961       } else {
1962         // Skip any alignment holes in pushed nodes.
1963         if (input.node()) {
1964           Emit(kPPC_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node()),
1965                g.TempImmediate(slot));
1966         }
1967       }
1968       ++slot;
1969     }
1970   }
1971 }
1972 
1973 
IsTailCallAddressImmediate()1974 bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
1975 
GetTempsCountForTailCallFromJSFunction()1976 int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
1977 
VisitFloat64ExtractLowWord32(Node * node)1978 void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1979   PPCOperandGenerator g(this);
1980   Emit(kPPC_DoubleExtractLowWord32, g.DefineAsRegister(node),
1981        g.UseRegister(node->InputAt(0)));
1982 }
1983 
1984 
VisitFloat64ExtractHighWord32(Node * node)1985 void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1986   PPCOperandGenerator g(this);
1987   Emit(kPPC_DoubleExtractHighWord32, g.DefineAsRegister(node),
1988        g.UseRegister(node->InputAt(0)));
1989 }
1990 
1991 
VisitFloat64InsertLowWord32(Node * node)1992 void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1993   PPCOperandGenerator g(this);
1994   Node* left = node->InputAt(0);
1995   Node* right = node->InputAt(1);
1996   if (left->opcode() == IrOpcode::kFloat64InsertHighWord32 &&
1997       CanCover(node, left)) {
1998     left = left->InputAt(1);
1999     Emit(kPPC_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(left),
2000          g.UseRegister(right));
2001     return;
2002   }
2003   Emit(kPPC_DoubleInsertLowWord32, g.DefineSameAsFirst(node),
2004        g.UseRegister(left), g.UseRegister(right));
2005 }
2006 
2007 
VisitFloat64InsertHighWord32(Node * node)2008 void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
2009   PPCOperandGenerator g(this);
2010   Node* left = node->InputAt(0);
2011   Node* right = node->InputAt(1);
2012   if (left->opcode() == IrOpcode::kFloat64InsertLowWord32 &&
2013       CanCover(node, left)) {
2014     left = left->InputAt(1);
2015     Emit(kPPC_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(right),
2016          g.UseRegister(left));
2017     return;
2018   }
2019   Emit(kPPC_DoubleInsertHighWord32, g.DefineSameAsFirst(node),
2020        g.UseRegister(left), g.UseRegister(right));
2021 }
2022 
VisitAtomicLoad(Node * node)2023 void InstructionSelector::VisitAtomicLoad(Node* node) {
2024   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
2025   PPCOperandGenerator g(this);
2026   Node* base = node->InputAt(0);
2027   Node* index = node->InputAt(1);
2028   ArchOpcode opcode = kArchNop;
2029   switch (load_rep.representation()) {
2030     case MachineRepresentation::kWord8:
2031       opcode = load_rep.IsSigned() ? kAtomicLoadInt8 : kAtomicLoadUint8;
2032       break;
2033     case MachineRepresentation::kWord16:
2034       opcode = load_rep.IsSigned() ? kAtomicLoadInt16 : kAtomicLoadUint16;
2035       break;
2036     case MachineRepresentation::kWord32:
2037       opcode = kAtomicLoadWord32;
2038       break;
2039     default:
2040       UNREACHABLE();
2041       return;
2042   }
2043   Emit(opcode | AddressingModeField::encode(kMode_MRR),
2044       g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(index));
2045 }
2046 
VisitAtomicStore(Node * node)2047 void InstructionSelector::VisitAtomicStore(Node* node) {
2048   MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
2049   PPCOperandGenerator g(this);
2050   Node* base = node->InputAt(0);
2051   Node* index = node->InputAt(1);
2052   Node* value = node->InputAt(2);
2053   ArchOpcode opcode = kArchNop;
2054   switch (rep) {
2055     case MachineRepresentation::kWord8:
2056       opcode = kAtomicStoreWord8;
2057       break;
2058     case MachineRepresentation::kWord16:
2059       opcode = kAtomicStoreWord16;
2060       break;
2061     case MachineRepresentation::kWord32:
2062       opcode = kAtomicStoreWord32;
2063       break;
2064     default:
2065       UNREACHABLE();
2066       return;
2067   }
2068 
2069   InstructionOperand inputs[4];
2070   size_t input_count = 0;
2071   inputs[input_count++] = g.UseUniqueRegister(base);
2072   inputs[input_count++] = g.UseUniqueRegister(index);
2073   inputs[input_count++] = g.UseUniqueRegister(value);
2074   Emit(opcode | AddressingModeField::encode(kMode_MRR),
2075       0, nullptr, input_count, inputs);
2076 }
2077 
2078 // static
2079 MachineOperatorBuilder::Flags
SupportedMachineOperatorFlags()2080 InstructionSelector::SupportedMachineOperatorFlags() {
2081   return MachineOperatorBuilder::kFloat32RoundDown |
2082          MachineOperatorBuilder::kFloat64RoundDown |
2083          MachineOperatorBuilder::kFloat32RoundUp |
2084          MachineOperatorBuilder::kFloat64RoundUp |
2085          MachineOperatorBuilder::kFloat32RoundTruncate |
2086          MachineOperatorBuilder::kFloat64RoundTruncate |
2087          MachineOperatorBuilder::kFloat64RoundTiesAway |
2088          MachineOperatorBuilder::kWord32Popcnt |
2089          MachineOperatorBuilder::kWord64Popcnt;
2090   // We omit kWord32ShiftIsSafe as s[rl]w use 0x3f as a mask rather than 0x1f.
2091 }
2092 
2093 // static
2094 MachineOperatorBuilder::AlignmentRequirements
AlignmentRequirements()2095 InstructionSelector::AlignmentRequirements() {
2096   return MachineOperatorBuilder::AlignmentRequirements::
2097       FullUnalignedAccessSupport();
2098 }
2099 
2100 }  // namespace compiler
2101 }  // namespace internal
2102 }  // namespace v8
2103