• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/base/adapters.h"
6 #include "src/compiler/instruction-selector-impl.h"
7 #include "src/compiler/node-matchers.h"
8 #include "src/compiler/node-properties.h"
9 
10 namespace v8 {
11 namespace internal {
12 namespace compiler {
13 
14 // Adds X87-specific methods for generating operands.
15 class X87OperandGenerator final : public OperandGenerator {
16  public:
X87OperandGenerator(InstructionSelector * selector)17   explicit X87OperandGenerator(InstructionSelector* selector)
18       : OperandGenerator(selector) {}
19 
UseByteRegister(Node * node)20   InstructionOperand UseByteRegister(Node* node) {
21     // TODO(titzer): encode byte register use constraints.
22     return UseFixed(node, edx);
23   }
24 
DefineAsByteRegister(Node * node)25   InstructionOperand DefineAsByteRegister(Node* node) {
26     // TODO(titzer): encode byte register def constraints.
27     return DefineAsRegister(node);
28   }
29 
CanBeMemoryOperand(InstructionCode opcode,Node * node,Node * input,int effect_level)30   bool CanBeMemoryOperand(InstructionCode opcode, Node* node, Node* input,
31                           int effect_level) {
32     if (input->opcode() != IrOpcode::kLoad ||
33         !selector()->CanCover(node, input)) {
34       return false;
35     }
36     if (effect_level != selector()->GetEffectLevel(input)) {
37       return false;
38     }
39     MachineRepresentation rep =
40         LoadRepresentationOf(input->op()).representation();
41     switch (opcode) {
42       case kX87Cmp:
43       case kX87Test:
44         return rep == MachineRepresentation::kWord32 ||
45                rep == MachineRepresentation::kTagged;
46       case kX87Cmp16:
47       case kX87Test16:
48         return rep == MachineRepresentation::kWord16;
49       case kX87Cmp8:
50       case kX87Test8:
51         return rep == MachineRepresentation::kWord8;
52       default:
53         break;
54     }
55     return false;
56   }
57 
CreateImmediate(int imm)58   InstructionOperand CreateImmediate(int imm) {
59     return sequence()->AddImmediate(Constant(imm));
60   }
61 
CanBeImmediate(Node * node)62   bool CanBeImmediate(Node* node) {
63     switch (node->opcode()) {
64       case IrOpcode::kInt32Constant:
65       case IrOpcode::kNumberConstant:
66       case IrOpcode::kExternalConstant:
67       case IrOpcode::kRelocatableInt32Constant:
68       case IrOpcode::kRelocatableInt64Constant:
69         return true;
70       case IrOpcode::kHeapConstant: {
71 // TODO(bmeurer): We must not dereference handles concurrently. If we
72 // really have to this here, then we need to find a way to put this
73 // information on the HeapConstant node already.
74 #if 0
75         // Constants in new space cannot be used as immediates in V8 because
76         // the GC does not scan code objects when collecting the new generation.
77         Handle<HeapObject> value = OpParameter<Handle<HeapObject>>(node);
78         Isolate* isolate = value->GetIsolate();
79         return !isolate->heap()->InNewSpace(*value);
80 #endif
81       }
82       default:
83         return false;
84     }
85   }
86 
GenerateMemoryOperandInputs(Node * index,int scale,Node * base,Node * displacement_node,InstructionOperand inputs[],size_t * input_count)87   AddressingMode GenerateMemoryOperandInputs(Node* index, int scale, Node* base,
88                                              Node* displacement_node,
89                                              InstructionOperand inputs[],
90                                              size_t* input_count) {
91     AddressingMode mode = kMode_MRI;
92     int32_t displacement = (displacement_node == nullptr)
93                                ? 0
94                                : OpParameter<int32_t>(displacement_node);
95     if (base != nullptr) {
96       if (base->opcode() == IrOpcode::kInt32Constant) {
97         displacement += OpParameter<int32_t>(base);
98         base = nullptr;
99       }
100     }
101     if (base != nullptr) {
102       inputs[(*input_count)++] = UseRegister(base);
103       if (index != nullptr) {
104         DCHECK(scale >= 0 && scale <= 3);
105         inputs[(*input_count)++] = UseRegister(index);
106         if (displacement != 0) {
107           inputs[(*input_count)++] = TempImmediate(displacement);
108           static const AddressingMode kMRnI_modes[] = {kMode_MR1I, kMode_MR2I,
109                                                        kMode_MR4I, kMode_MR8I};
110           mode = kMRnI_modes[scale];
111         } else {
112           static const AddressingMode kMRn_modes[] = {kMode_MR1, kMode_MR2,
113                                                       kMode_MR4, kMode_MR8};
114           mode = kMRn_modes[scale];
115         }
116       } else {
117         if (displacement == 0) {
118           mode = kMode_MR;
119         } else {
120           inputs[(*input_count)++] = TempImmediate(displacement);
121           mode = kMode_MRI;
122         }
123       }
124     } else {
125       DCHECK(scale >= 0 && scale <= 3);
126       if (index != nullptr) {
127         inputs[(*input_count)++] = UseRegister(index);
128         if (displacement != 0) {
129           inputs[(*input_count)++] = TempImmediate(displacement);
130           static const AddressingMode kMnI_modes[] = {kMode_MRI, kMode_M2I,
131                                                       kMode_M4I, kMode_M8I};
132           mode = kMnI_modes[scale];
133         } else {
134           static const AddressingMode kMn_modes[] = {kMode_MR, kMode_M2,
135                                                      kMode_M4, kMode_M8};
136           mode = kMn_modes[scale];
137         }
138       } else {
139         inputs[(*input_count)++] = TempImmediate(displacement);
140         return kMode_MI;
141       }
142     }
143     return mode;
144   }
145 
GetEffectiveAddressMemoryOperand(Node * node,InstructionOperand inputs[],size_t * input_count)146   AddressingMode GetEffectiveAddressMemoryOperand(Node* node,
147                                                   InstructionOperand inputs[],
148                                                   size_t* input_count) {
149     BaseWithIndexAndDisplacement32Matcher m(node, true);
150     DCHECK(m.matches());
151     if ((m.displacement() == nullptr || CanBeImmediate(m.displacement()))) {
152       return GenerateMemoryOperandInputs(m.index(), m.scale(), m.base(),
153                                          m.displacement(), inputs, input_count);
154     } else {
155       inputs[(*input_count)++] = UseRegister(node->InputAt(0));
156       inputs[(*input_count)++] = UseRegister(node->InputAt(1));
157       return kMode_MR1;
158     }
159   }
160 
CanBeBetterLeftOperand(Node * node) const161   bool CanBeBetterLeftOperand(Node* node) const {
162     return !selector()->IsLive(node);
163   }
164 };
165 
166 
VisitLoad(Node * node)167 void InstructionSelector::VisitLoad(Node* node) {
168   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
169 
170   ArchOpcode opcode = kArchNop;
171   switch (load_rep.representation()) {
172     case MachineRepresentation::kFloat32:
173       opcode = kX87Movss;
174       break;
175     case MachineRepresentation::kFloat64:
176       opcode = kX87Movsd;
177       break;
178     case MachineRepresentation::kBit:  // Fall through.
179     case MachineRepresentation::kWord8:
180       opcode = load_rep.IsSigned() ? kX87Movsxbl : kX87Movzxbl;
181       break;
182     case MachineRepresentation::kWord16:
183       opcode = load_rep.IsSigned() ? kX87Movsxwl : kX87Movzxwl;
184       break;
185     case MachineRepresentation::kTagged:  // Fall through.
186     case MachineRepresentation::kWord32:
187       opcode = kX87Movl;
188       break;
189     case MachineRepresentation::kWord64:   // Fall through.
190     case MachineRepresentation::kSimd128:  // Fall through.
191     case MachineRepresentation::kNone:
192       UNREACHABLE();
193       return;
194   }
195 
196   X87OperandGenerator g(this);
197   InstructionOperand outputs[1];
198   outputs[0] = g.DefineAsRegister(node);
199   InstructionOperand inputs[3];
200   size_t input_count = 0;
201   AddressingMode mode =
202       g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
203   InstructionCode code = opcode | AddressingModeField::encode(mode);
204   Emit(code, 1, outputs, input_count, inputs);
205 }
206 
207 
VisitStore(Node * node)208 void InstructionSelector::VisitStore(Node* node) {
209   X87OperandGenerator g(this);
210   Node* base = node->InputAt(0);
211   Node* index = node->InputAt(1);
212   Node* value = node->InputAt(2);
213 
214   StoreRepresentation store_rep = StoreRepresentationOf(node->op());
215   WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
216   MachineRepresentation rep = store_rep.representation();
217 
218   if (write_barrier_kind != kNoWriteBarrier) {
219     DCHECK_EQ(MachineRepresentation::kTagged, rep);
220     AddressingMode addressing_mode;
221     InstructionOperand inputs[3];
222     size_t input_count = 0;
223     inputs[input_count++] = g.UseUniqueRegister(base);
224     if (g.CanBeImmediate(index)) {
225       inputs[input_count++] = g.UseImmediate(index);
226       addressing_mode = kMode_MRI;
227     } else {
228       inputs[input_count++] = g.UseUniqueRegister(index);
229       addressing_mode = kMode_MR1;
230     }
231     inputs[input_count++] = g.UseUniqueRegister(value);
232     RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
233     switch (write_barrier_kind) {
234       case kNoWriteBarrier:
235         UNREACHABLE();
236         break;
237       case kMapWriteBarrier:
238         record_write_mode = RecordWriteMode::kValueIsMap;
239         break;
240       case kPointerWriteBarrier:
241         record_write_mode = RecordWriteMode::kValueIsPointer;
242         break;
243       case kFullWriteBarrier:
244         record_write_mode = RecordWriteMode::kValueIsAny;
245         break;
246     }
247     InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
248     size_t const temp_count = arraysize(temps);
249     InstructionCode code = kArchStoreWithWriteBarrier;
250     code |= AddressingModeField::encode(addressing_mode);
251     code |= MiscField::encode(static_cast<int>(record_write_mode));
252     Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
253   } else {
254     ArchOpcode opcode = kArchNop;
255     switch (rep) {
256       case MachineRepresentation::kFloat32:
257         opcode = kX87Movss;
258         break;
259       case MachineRepresentation::kFloat64:
260         opcode = kX87Movsd;
261         break;
262       case MachineRepresentation::kBit:  // Fall through.
263       case MachineRepresentation::kWord8:
264         opcode = kX87Movb;
265         break;
266       case MachineRepresentation::kWord16:
267         opcode = kX87Movw;
268         break;
269       case MachineRepresentation::kTagged:  // Fall through.
270       case MachineRepresentation::kWord32:
271         opcode = kX87Movl;
272         break;
273       case MachineRepresentation::kWord64:   // Fall through.
274       case MachineRepresentation::kSimd128:  // Fall through.
275       case MachineRepresentation::kNone:
276         UNREACHABLE();
277         return;
278     }
279 
280     InstructionOperand val;
281     if (g.CanBeImmediate(value)) {
282       val = g.UseImmediate(value);
283     } else if (rep == MachineRepresentation::kWord8 ||
284                rep == MachineRepresentation::kBit) {
285       val = g.UseByteRegister(value);
286     } else {
287       val = g.UseRegister(value);
288     }
289 
290     InstructionOperand inputs[4];
291     size_t input_count = 0;
292     AddressingMode addressing_mode =
293         g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
294     InstructionCode code =
295         opcode | AddressingModeField::encode(addressing_mode);
296     inputs[input_count++] = val;
297     Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count,
298          inputs);
299   }
300 }
301 
302 
VisitCheckedLoad(Node * node)303 void InstructionSelector::VisitCheckedLoad(Node* node) {
304   CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
305   X87OperandGenerator g(this);
306   Node* const buffer = node->InputAt(0);
307   Node* const offset = node->InputAt(1);
308   Node* const length = node->InputAt(2);
309   ArchOpcode opcode = kArchNop;
310   switch (load_rep.representation()) {
311     case MachineRepresentation::kWord8:
312       opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
313       break;
314     case MachineRepresentation::kWord16:
315       opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
316       break;
317     case MachineRepresentation::kWord32:
318       opcode = kCheckedLoadWord32;
319       break;
320     case MachineRepresentation::kFloat32:
321       opcode = kCheckedLoadFloat32;
322       break;
323     case MachineRepresentation::kFloat64:
324       opcode = kCheckedLoadFloat64;
325       break;
326     case MachineRepresentation::kBit:      // Fall through.
327     case MachineRepresentation::kTagged:   // Fall through.
328     case MachineRepresentation::kWord64:   // Fall through.
329     case MachineRepresentation::kSimd128:  // Fall through.
330     case MachineRepresentation::kNone:
331       UNREACHABLE();
332       return;
333   }
334   InstructionOperand offset_operand = g.UseRegister(offset);
335   InstructionOperand length_operand =
336       g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
337   if (g.CanBeImmediate(buffer)) {
338     Emit(opcode | AddressingModeField::encode(kMode_MRI),
339          g.DefineAsRegister(node), offset_operand, length_operand,
340          offset_operand, g.UseImmediate(buffer));
341   } else {
342     Emit(opcode | AddressingModeField::encode(kMode_MR1),
343          g.DefineAsRegister(node), offset_operand, length_operand,
344          g.UseRegister(buffer), offset_operand);
345   }
346 }
347 
348 
VisitCheckedStore(Node * node)349 void InstructionSelector::VisitCheckedStore(Node* node) {
350   MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
351   X87OperandGenerator g(this);
352   Node* const buffer = node->InputAt(0);
353   Node* const offset = node->InputAt(1);
354   Node* const length = node->InputAt(2);
355   Node* const value = node->InputAt(3);
356   ArchOpcode opcode = kArchNop;
357   switch (rep) {
358     case MachineRepresentation::kWord8:
359       opcode = kCheckedStoreWord8;
360       break;
361     case MachineRepresentation::kWord16:
362       opcode = kCheckedStoreWord16;
363       break;
364     case MachineRepresentation::kWord32:
365       opcode = kCheckedStoreWord32;
366       break;
367     case MachineRepresentation::kFloat32:
368       opcode = kCheckedStoreFloat32;
369       break;
370     case MachineRepresentation::kFloat64:
371       opcode = kCheckedStoreFloat64;
372       break;
373     case MachineRepresentation::kBit:      // Fall through.
374     case MachineRepresentation::kTagged:   // Fall through.
375     case MachineRepresentation::kWord64:   // Fall through.
376     case MachineRepresentation::kSimd128:  // Fall through.
377     case MachineRepresentation::kNone:
378       UNREACHABLE();
379       return;
380   }
381   InstructionOperand value_operand =
382       g.CanBeImmediate(value) ? g.UseImmediate(value)
383                               : ((rep == MachineRepresentation::kWord8 ||
384                                   rep == MachineRepresentation::kBit)
385                                      ? g.UseByteRegister(value)
386                                      : g.UseRegister(value));
387   InstructionOperand offset_operand = g.UseRegister(offset);
388   InstructionOperand length_operand =
389       g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
390   if (g.CanBeImmediate(buffer)) {
391     Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
392          offset_operand, length_operand, value_operand, offset_operand,
393          g.UseImmediate(buffer));
394   } else {
395     Emit(opcode | AddressingModeField::encode(kMode_MR1), g.NoOutput(),
396          offset_operand, length_operand, value_operand, g.UseRegister(buffer),
397          offset_operand);
398   }
399 }
400 
401 namespace {
402 
403 // Shared routine for multiple binary operations.
VisitBinop(InstructionSelector * selector,Node * node,InstructionCode opcode,FlagsContinuation * cont)404 void VisitBinop(InstructionSelector* selector, Node* node,
405                 InstructionCode opcode, FlagsContinuation* cont) {
406   X87OperandGenerator g(selector);
407   Int32BinopMatcher m(node);
408   Node* left = m.left().node();
409   Node* right = m.right().node();
410   InstructionOperand inputs[4];
411   size_t input_count = 0;
412   InstructionOperand outputs[2];
413   size_t output_count = 0;
414 
415   // TODO(turbofan): match complex addressing modes.
416   if (left == right) {
417     // If both inputs refer to the same operand, enforce allocating a register
418     // for both of them to ensure that we don't end up generating code like
419     // this:
420     //
421     //   mov eax, [ebp-0x10]
422     //   add eax, [ebp-0x10]
423     //   jo label
424     InstructionOperand const input = g.UseRegister(left);
425     inputs[input_count++] = input;
426     inputs[input_count++] = input;
427   } else if (g.CanBeImmediate(right)) {
428     inputs[input_count++] = g.UseRegister(left);
429     inputs[input_count++] = g.UseImmediate(right);
430   } else {
431     if (node->op()->HasProperty(Operator::kCommutative) &&
432         g.CanBeBetterLeftOperand(right)) {
433       std::swap(left, right);
434     }
435     inputs[input_count++] = g.UseRegister(left);
436     inputs[input_count++] = g.Use(right);
437   }
438 
439   if (cont->IsBranch()) {
440     inputs[input_count++] = g.Label(cont->true_block());
441     inputs[input_count++] = g.Label(cont->false_block());
442   }
443 
444   outputs[output_count++] = g.DefineSameAsFirst(node);
445   if (cont->IsSet()) {
446     outputs[output_count++] = g.DefineAsRegister(cont->result());
447   }
448 
449   DCHECK_NE(0u, input_count);
450   DCHECK_NE(0u, output_count);
451   DCHECK_GE(arraysize(inputs), input_count);
452   DCHECK_GE(arraysize(outputs), output_count);
453 
454   opcode = cont->Encode(opcode);
455   if (cont->IsDeoptimize()) {
456     selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
457                              cont->frame_state());
458   } else {
459     selector->Emit(opcode, output_count, outputs, input_count, inputs);
460   }
461 }
462 
463 
464 // Shared routine for multiple binary operations.
VisitBinop(InstructionSelector * selector,Node * node,InstructionCode opcode)465 void VisitBinop(InstructionSelector* selector, Node* node,
466                 InstructionCode opcode) {
467   FlagsContinuation cont;
468   VisitBinop(selector, node, opcode, &cont);
469 }
470 
471 }  // namespace
472 
VisitWord32And(Node * node)473 void InstructionSelector::VisitWord32And(Node* node) {
474   VisitBinop(this, node, kX87And);
475 }
476 
477 
VisitWord32Or(Node * node)478 void InstructionSelector::VisitWord32Or(Node* node) {
479   VisitBinop(this, node, kX87Or);
480 }
481 
482 
VisitWord32Xor(Node * node)483 void InstructionSelector::VisitWord32Xor(Node* node) {
484   X87OperandGenerator g(this);
485   Int32BinopMatcher m(node);
486   if (m.right().Is(-1)) {
487     Emit(kX87Not, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
488   } else {
489     VisitBinop(this, node, kX87Xor);
490   }
491 }
492 
493 
494 // Shared routine for multiple shift operations.
VisitShift(InstructionSelector * selector,Node * node,ArchOpcode opcode)495 static inline void VisitShift(InstructionSelector* selector, Node* node,
496                               ArchOpcode opcode) {
497   X87OperandGenerator g(selector);
498   Node* left = node->InputAt(0);
499   Node* right = node->InputAt(1);
500 
501   if (g.CanBeImmediate(right)) {
502     selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
503                    g.UseImmediate(right));
504   } else {
505     selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
506                    g.UseFixed(right, ecx));
507   }
508 }
509 
510 
511 namespace {
512 
VisitMulHigh(InstructionSelector * selector,Node * node,ArchOpcode opcode)513 void VisitMulHigh(InstructionSelector* selector, Node* node,
514                   ArchOpcode opcode) {
515   X87OperandGenerator g(selector);
516   InstructionOperand temps[] = {g.TempRegister(eax)};
517   selector->Emit(
518       opcode, g.DefineAsFixed(node, edx), g.UseFixed(node->InputAt(0), eax),
519       g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
520 }
521 
522 
VisitDiv(InstructionSelector * selector,Node * node,ArchOpcode opcode)523 void VisitDiv(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
524   X87OperandGenerator g(selector);
525   InstructionOperand temps[] = {g.TempRegister(edx)};
526   selector->Emit(opcode, g.DefineAsFixed(node, eax),
527                  g.UseFixed(node->InputAt(0), eax),
528                  g.UseUnique(node->InputAt(1)), arraysize(temps), temps);
529 }
530 
531 
VisitMod(InstructionSelector * selector,Node * node,ArchOpcode opcode)532 void VisitMod(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
533   X87OperandGenerator g(selector);
534   InstructionOperand temps[] = {g.TempRegister(eax)};
535   selector->Emit(opcode, g.DefineAsFixed(node, edx),
536                  g.UseFixed(node->InputAt(0), eax),
537                  g.UseUnique(node->InputAt(1)), arraysize(temps), temps);
538 }
539 
EmitLea(InstructionSelector * selector,Node * result,Node * index,int scale,Node * base,Node * displacement)540 void EmitLea(InstructionSelector* selector, Node* result, Node* index,
541              int scale, Node* base, Node* displacement) {
542   X87OperandGenerator g(selector);
543   InstructionOperand inputs[4];
544   size_t input_count = 0;
545   AddressingMode mode = g.GenerateMemoryOperandInputs(
546       index, scale, base, displacement, inputs, &input_count);
547 
548   DCHECK_NE(0u, input_count);
549   DCHECK_GE(arraysize(inputs), input_count);
550 
551   InstructionOperand outputs[1];
552   outputs[0] = g.DefineAsRegister(result);
553 
554   InstructionCode opcode = AddressingModeField::encode(mode) | kX87Lea;
555 
556   selector->Emit(opcode, 1, outputs, input_count, inputs);
557 }
558 
559 }  // namespace
560 
561 
VisitWord32Shl(Node * node)562 void InstructionSelector::VisitWord32Shl(Node* node) {
563   Int32ScaleMatcher m(node, true);
564   if (m.matches()) {
565     Node* index = node->InputAt(0);
566     Node* base = m.power_of_two_plus_one() ? index : nullptr;
567     EmitLea(this, node, index, m.scale(), base, nullptr);
568     return;
569   }
570   VisitShift(this, node, kX87Shl);
571 }
572 
573 
VisitWord32Shr(Node * node)574 void InstructionSelector::VisitWord32Shr(Node* node) {
575   VisitShift(this, node, kX87Shr);
576 }
577 
578 
VisitWord32Sar(Node * node)579 void InstructionSelector::VisitWord32Sar(Node* node) {
580   VisitShift(this, node, kX87Sar);
581 }
582 
VisitInt32PairAdd(Node * node)583 void InstructionSelector::VisitInt32PairAdd(Node* node) {
584   X87OperandGenerator g(this);
585 
586   // We use UseUniqueRegister here to avoid register sharing with the temp
587   // register.
588   InstructionOperand inputs[] = {
589       g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)),
590       g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))};
591 
592   InstructionOperand outputs[] = {
593       g.DefineSameAsFirst(node),
594       g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
595 
596   InstructionOperand temps[] = {g.TempRegister()};
597 
598   Emit(kX87AddPair, 2, outputs, 4, inputs, 1, temps);
599 }
600 
VisitInt32PairSub(Node * node)601 void InstructionSelector::VisitInt32PairSub(Node* node) {
602   X87OperandGenerator g(this);
603 
604   // We use UseUniqueRegister here to avoid register sharing with the temp
605   // register.
606   InstructionOperand inputs[] = {
607       g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)),
608       g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))};
609 
610   InstructionOperand outputs[] = {
611       g.DefineSameAsFirst(node),
612       g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
613 
614   InstructionOperand temps[] = {g.TempRegister()};
615 
616   Emit(kX87SubPair, 2, outputs, 4, inputs, 1, temps);
617 }
618 
VisitInt32PairMul(Node * node)619 void InstructionSelector::VisitInt32PairMul(Node* node) {
620   X87OperandGenerator g(this);
621 
622   // InputAt(3) explicitly shares ecx with OutputRegister(1) to save one
623   // register and one mov instruction.
624   InstructionOperand inputs[] = {
625       g.UseUnique(node->InputAt(0)), g.UseUnique(node->InputAt(1)),
626       g.UseUniqueRegister(node->InputAt(2)), g.UseFixed(node->InputAt(3), ecx)};
627 
628   InstructionOperand outputs[] = {
629       g.DefineAsFixed(node, eax),
630       g.DefineAsFixed(NodeProperties::FindProjection(node, 1), ecx)};
631 
632   InstructionOperand temps[] = {g.TempRegister(edx)};
633 
634   Emit(kX87MulPair, 2, outputs, 4, inputs, 1, temps);
635 }
636 
VisitWord32PairShift(InstructionSelector * selector,InstructionCode opcode,Node * node)637 void VisitWord32PairShift(InstructionSelector* selector, InstructionCode opcode,
638                           Node* node) {
639   X87OperandGenerator g(selector);
640 
641   Node* shift = node->InputAt(2);
642   InstructionOperand shift_operand;
643   if (g.CanBeImmediate(shift)) {
644     shift_operand = g.UseImmediate(shift);
645   } else {
646     shift_operand = g.UseFixed(shift, ecx);
647   }
648   InstructionOperand inputs[] = {g.UseFixed(node->InputAt(0), eax),
649                                  g.UseFixed(node->InputAt(1), edx),
650                                  shift_operand};
651 
652   InstructionOperand outputs[] = {
653       g.DefineAsFixed(node, eax),
654       g.DefineAsFixed(NodeProperties::FindProjection(node, 1), edx)};
655 
656   selector->Emit(opcode, 2, outputs, 3, inputs);
657 }
658 
VisitWord32PairShl(Node * node)659 void InstructionSelector::VisitWord32PairShl(Node* node) {
660   VisitWord32PairShift(this, kX87ShlPair, node);
661 }
662 
VisitWord32PairShr(Node * node)663 void InstructionSelector::VisitWord32PairShr(Node* node) {
664   VisitWord32PairShift(this, kX87ShrPair, node);
665 }
666 
VisitWord32PairSar(Node * node)667 void InstructionSelector::VisitWord32PairSar(Node* node) {
668   VisitWord32PairShift(this, kX87SarPair, node);
669 }
670 
VisitWord32Ror(Node * node)671 void InstructionSelector::VisitWord32Ror(Node* node) {
672   VisitShift(this, node, kX87Ror);
673 }
674 
675 
VisitWord32Clz(Node * node)676 void InstructionSelector::VisitWord32Clz(Node* node) {
677   X87OperandGenerator g(this);
678   Emit(kX87Lzcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
679 }
680 
681 
VisitWord32Ctz(Node * node)682 void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
683 
684 
VisitWord32ReverseBits(Node * node)685 void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
686 
687 
VisitWord32Popcnt(Node * node)688 void InstructionSelector::VisitWord32Popcnt(Node* node) {
689   X87OperandGenerator g(this);
690   Emit(kX87Popcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
691 }
692 
693 
VisitInt32Add(Node * node)694 void InstructionSelector::VisitInt32Add(Node* node) {
695   X87OperandGenerator g(this);
696 
697   // Try to match the Add to a lea pattern
698   BaseWithIndexAndDisplacement32Matcher m(node);
699   if (m.matches() &&
700       (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) {
701     InstructionOperand inputs[4];
702     size_t input_count = 0;
703     AddressingMode mode = g.GenerateMemoryOperandInputs(
704         m.index(), m.scale(), m.base(), m.displacement(), inputs, &input_count);
705 
706     DCHECK_NE(0u, input_count);
707     DCHECK_GE(arraysize(inputs), input_count);
708 
709     InstructionOperand outputs[1];
710     outputs[0] = g.DefineAsRegister(node);
711 
712     InstructionCode opcode = AddressingModeField::encode(mode) | kX87Lea;
713     Emit(opcode, 1, outputs, input_count, inputs);
714     return;
715   }
716 
717   // No lea pattern match, use add
718   VisitBinop(this, node, kX87Add);
719 }
720 
721 
VisitInt32Sub(Node * node)722 void InstructionSelector::VisitInt32Sub(Node* node) {
723   X87OperandGenerator g(this);
724   Int32BinopMatcher m(node);
725   if (m.left().Is(0)) {
726     Emit(kX87Neg, g.DefineSameAsFirst(node), g.Use(m.right().node()));
727   } else {
728     VisitBinop(this, node, kX87Sub);
729   }
730 }
731 
732 
VisitInt32Mul(Node * node)733 void InstructionSelector::VisitInt32Mul(Node* node) {
734   Int32ScaleMatcher m(node, true);
735   if (m.matches()) {
736     Node* index = node->InputAt(0);
737     Node* base = m.power_of_two_plus_one() ? index : nullptr;
738     EmitLea(this, node, index, m.scale(), base, nullptr);
739     return;
740   }
741   X87OperandGenerator g(this);
742   Node* left = node->InputAt(0);
743   Node* right = node->InputAt(1);
744   if (g.CanBeImmediate(right)) {
745     Emit(kX87Imul, g.DefineAsRegister(node), g.Use(left),
746          g.UseImmediate(right));
747   } else {
748     if (g.CanBeBetterLeftOperand(right)) {
749       std::swap(left, right);
750     }
751     Emit(kX87Imul, g.DefineSameAsFirst(node), g.UseRegister(left),
752          g.Use(right));
753   }
754 }
755 
756 
VisitInt32MulHigh(Node * node)757 void InstructionSelector::VisitInt32MulHigh(Node* node) {
758   VisitMulHigh(this, node, kX87ImulHigh);
759 }
760 
761 
VisitUint32MulHigh(Node * node)762 void InstructionSelector::VisitUint32MulHigh(Node* node) {
763   VisitMulHigh(this, node, kX87UmulHigh);
764 }
765 
766 
VisitInt32Div(Node * node)767 void InstructionSelector::VisitInt32Div(Node* node) {
768   VisitDiv(this, node, kX87Idiv);
769 }
770 
771 
VisitUint32Div(Node * node)772 void InstructionSelector::VisitUint32Div(Node* node) {
773   VisitDiv(this, node, kX87Udiv);
774 }
775 
776 
VisitInt32Mod(Node * node)777 void InstructionSelector::VisitInt32Mod(Node* node) {
778   VisitMod(this, node, kX87Idiv);
779 }
780 
781 
VisitUint32Mod(Node * node)782 void InstructionSelector::VisitUint32Mod(Node* node) {
783   VisitMod(this, node, kX87Udiv);
784 }
785 
786 
VisitChangeFloat32ToFloat64(Node * node)787 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
788   X87OperandGenerator g(this);
789   Emit(kX87Float32ToFloat64, g.DefineAsFixed(node, stX_0),
790        g.Use(node->InputAt(0)));
791 }
792 
793 
VisitRoundInt32ToFloat32(Node * node)794 void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
795   X87OperandGenerator g(this);
796   Emit(kX87Int32ToFloat32, g.DefineAsFixed(node, stX_0),
797        g.Use(node->InputAt(0)));
798 }
799 
800 
VisitRoundUint32ToFloat32(Node * node)801 void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
802   X87OperandGenerator g(this);
803   Emit(kX87Uint32ToFloat32, g.DefineAsFixed(node, stX_0),
804        g.Use(node->InputAt(0)));
805 }
806 
807 
VisitChangeInt32ToFloat64(Node * node)808 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
809   X87OperandGenerator g(this);
810   Emit(kX87Int32ToFloat64, g.DefineAsFixed(node, stX_0),
811        g.Use(node->InputAt(0)));
812 }
813 
814 
VisitChangeUint32ToFloat64(Node * node)815 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
816   X87OperandGenerator g(this);
817   Emit(kX87Uint32ToFloat64, g.DefineAsFixed(node, stX_0),
818        g.UseRegister(node->InputAt(0)));
819 }
820 
821 
VisitTruncateFloat32ToInt32(Node * node)822 void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
823   X87OperandGenerator g(this);
824   Emit(kX87Float32ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
825 }
826 
827 
VisitTruncateFloat32ToUint32(Node * node)828 void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
829   X87OperandGenerator g(this);
830   Emit(kX87Float32ToUint32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
831 }
832 
833 
VisitChangeFloat64ToInt32(Node * node)834 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
835   X87OperandGenerator g(this);
836   Emit(kX87Float64ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
837 }
838 
839 
VisitChangeFloat64ToUint32(Node * node)840 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
841   X87OperandGenerator g(this);
842   Emit(kX87Float64ToUint32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
843 }
844 
VisitTruncateFloat64ToUint32(Node * node)845 void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
846   X87OperandGenerator g(this);
847   Emit(kX87Float64ToUint32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
848 }
849 
VisitTruncateFloat64ToFloat32(Node * node)850 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
851   X87OperandGenerator g(this);
852   Emit(kX87Float64ToFloat32, g.DefineAsFixed(node, stX_0),
853        g.Use(node->InputAt(0)));
854 }
855 
VisitTruncateFloat64ToWord32(Node * node)856 void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
857   X87OperandGenerator g(this);
858   Emit(kArchTruncateDoubleToI, g.DefineAsRegister(node),
859        g.Use(node->InputAt(0)));
860 }
861 
VisitRoundFloat64ToInt32(Node * node)862 void InstructionSelector::VisitRoundFloat64ToInt32(Node* node) {
863   X87OperandGenerator g(this);
864   Emit(kX87Float64ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
865 }
866 
867 
VisitBitcastFloat32ToInt32(Node * node)868 void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
869   X87OperandGenerator g(this);
870   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
871   Emit(kX87BitcastFI, g.DefineAsRegister(node), 0, nullptr);
872 }
873 
874 
VisitBitcastInt32ToFloat32(Node * node)875 void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
876   X87OperandGenerator g(this);
877   Emit(kX87BitcastIF, g.DefineAsFixed(node, stX_0), g.Use(node->InputAt(0)));
878 }
879 
880 
VisitFloat32Add(Node * node)881 void InstructionSelector::VisitFloat32Add(Node* node) {
882   X87OperandGenerator g(this);
883   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
884   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
885   Emit(kX87Float32Add, g.DefineAsFixed(node, stX_0), 0, nullptr);
886 }
887 
888 
VisitFloat64Add(Node * node)889 void InstructionSelector::VisitFloat64Add(Node* node) {
890   X87OperandGenerator g(this);
891   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
892   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
893   Emit(kX87Float64Add, g.DefineAsFixed(node, stX_0), 0, nullptr);
894 }
895 
896 
VisitFloat32Sub(Node * node)897 void InstructionSelector::VisitFloat32Sub(Node* node) {
898   X87OperandGenerator g(this);
899   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
900   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
901   Emit(kX87Float32Sub, g.DefineAsFixed(node, stX_0), 0, nullptr);
902 }
903 
VisitFloat32SubPreserveNan(Node * node)904 void InstructionSelector::VisitFloat32SubPreserveNan(Node* node) {
905   X87OperandGenerator g(this);
906   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
907   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
908   Emit(kX87Float32Sub, g.DefineAsFixed(node, stX_0), 0, nullptr);
909 }
910 
VisitFloat64Sub(Node * node)911 void InstructionSelector::VisitFloat64Sub(Node* node) {
912   X87OperandGenerator g(this);
913   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
914   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
915   Emit(kX87Float64Sub, g.DefineAsFixed(node, stX_0), 0, nullptr);
916 }
917 
VisitFloat64SubPreserveNan(Node * node)918 void InstructionSelector::VisitFloat64SubPreserveNan(Node* node) {
919   X87OperandGenerator g(this);
920   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
921   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
922   Emit(kX87Float64Sub, g.DefineAsFixed(node, stX_0), 0, nullptr);
923 }
924 
925 
VisitFloat32Mul(Node * node)926 void InstructionSelector::VisitFloat32Mul(Node* node) {
927   X87OperandGenerator g(this);
928   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
929   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
930   Emit(kX87Float32Mul, g.DefineAsFixed(node, stX_0), 0, nullptr);
931 }
932 
933 
VisitFloat64Mul(Node * node)934 void InstructionSelector::VisitFloat64Mul(Node* node) {
935   X87OperandGenerator g(this);
936   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
937   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
938   Emit(kX87Float64Mul, g.DefineAsFixed(node, stX_0), 0, nullptr);
939 }
940 
941 
VisitFloat32Div(Node * node)942 void InstructionSelector::VisitFloat32Div(Node* node) {
943   X87OperandGenerator g(this);
944   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
945   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
946   Emit(kX87Float32Div, g.DefineAsFixed(node, stX_0), 0, nullptr);
947 }
948 
949 
VisitFloat64Div(Node * node)950 void InstructionSelector::VisitFloat64Div(Node* node) {
951   X87OperandGenerator g(this);
952   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
953   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
954   Emit(kX87Float64Div, g.DefineAsFixed(node, stX_0), 0, nullptr);
955 }
956 
957 
VisitFloat64Mod(Node * node)958 void InstructionSelector::VisitFloat64Mod(Node* node) {
959   X87OperandGenerator g(this);
960   InstructionOperand temps[] = {g.TempRegister(eax)};
961   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
962   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
963   Emit(kX87Float64Mod, g.DefineAsFixed(node, stX_0), 1, temps)->MarkAsCall();
964 }
965 
966 
VisitFloat32Max(Node * node)967 void InstructionSelector::VisitFloat32Max(Node* node) {
968   X87OperandGenerator g(this);
969   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
970   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
971   Emit(kX87Float32Max, g.DefineAsFixed(node, stX_0), 0, nullptr);
972 }
973 
974 
VisitFloat64Max(Node * node)975 void InstructionSelector::VisitFloat64Max(Node* node) {
976   X87OperandGenerator g(this);
977   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
978   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
979   Emit(kX87Float64Max, g.DefineAsFixed(node, stX_0), 0, nullptr);
980 }
981 
982 
VisitFloat32Min(Node * node)983 void InstructionSelector::VisitFloat32Min(Node* node) {
984   X87OperandGenerator g(this);
985   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
986   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
987   Emit(kX87Float32Min, g.DefineAsFixed(node, stX_0), 0, nullptr);
988 }
989 
990 
VisitFloat64Min(Node * node)991 void InstructionSelector::VisitFloat64Min(Node* node) {
992   X87OperandGenerator g(this);
993   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
994   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
995   Emit(kX87Float64Min, g.DefineAsFixed(node, stX_0), 0, nullptr);
996 }
997 
998 
VisitFloat32Abs(Node * node)999 void InstructionSelector::VisitFloat32Abs(Node* node) {
1000   X87OperandGenerator g(this);
1001   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
1002   Emit(kX87Float32Abs, g.DefineAsFixed(node, stX_0), 0, nullptr);
1003 }
1004 
1005 
VisitFloat64Abs(Node * node)1006 void InstructionSelector::VisitFloat64Abs(Node* node) {
1007   X87OperandGenerator g(this);
1008   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1009   Emit(kX87Float64Abs, g.DefineAsFixed(node, stX_0), 0, nullptr);
1010 }
1011 
VisitFloat32Sqrt(Node * node)1012 void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1013   X87OperandGenerator g(this);
1014   Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
1015   Emit(kX87Float32Sqrt, g.DefineAsFixed(node, stX_0), 0, nullptr);
1016 }
1017 
1018 
VisitFloat64Sqrt(Node * node)1019 void InstructionSelector::VisitFloat64Sqrt(Node* node) {
1020   X87OperandGenerator g(this);
1021   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1022   Emit(kX87Float64Sqrt, g.DefineAsFixed(node, stX_0), 0, nullptr);
1023 }
1024 
1025 
VisitFloat32RoundDown(Node * node)1026 void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1027   X87OperandGenerator g(this);
1028   Emit(kX87Float32Round | MiscField::encode(kRoundDown),
1029        g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
1030 }
1031 
1032 
VisitFloat64RoundDown(Node * node)1033 void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1034   X87OperandGenerator g(this);
1035   Emit(kX87Float64Round | MiscField::encode(kRoundDown),
1036        g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
1037 }
1038 
1039 
VisitFloat32RoundUp(Node * node)1040 void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1041   X87OperandGenerator g(this);
1042   Emit(kX87Float32Round | MiscField::encode(kRoundUp), g.UseFixed(node, stX_0),
1043        g.Use(node->InputAt(0)));
1044 }
1045 
1046 
VisitFloat64RoundUp(Node * node)1047 void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1048   X87OperandGenerator g(this);
1049   Emit(kX87Float64Round | MiscField::encode(kRoundUp), g.UseFixed(node, stX_0),
1050        g.Use(node->InputAt(0)));
1051 }
1052 
1053 
VisitFloat32RoundTruncate(Node * node)1054 void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1055   X87OperandGenerator g(this);
1056   Emit(kX87Float32Round | MiscField::encode(kRoundToZero),
1057        g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
1058 }
1059 
1060 
VisitFloat64RoundTruncate(Node * node)1061 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
1062   X87OperandGenerator g(this);
1063   Emit(kX87Float64Round | MiscField::encode(kRoundToZero),
1064        g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
1065 }
1066 
1067 
VisitFloat64RoundTiesAway(Node * node)1068 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1069   UNREACHABLE();
1070 }
1071 
1072 
VisitFloat32RoundTiesEven(Node * node)1073 void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1074   X87OperandGenerator g(this);
1075   Emit(kX87Float32Round | MiscField::encode(kRoundToNearest),
1076        g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
1077 }
1078 
1079 
VisitFloat64RoundTiesEven(Node * node)1080 void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1081   X87OperandGenerator g(this);
1082   Emit(kX87Float64Round | MiscField::encode(kRoundToNearest),
1083        g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
1084 }
1085 
VisitFloat32Neg(Node * node)1086 void InstructionSelector::VisitFloat32Neg(Node* node) { UNREACHABLE(); }
1087 
VisitFloat64Neg(Node * node)1088 void InstructionSelector::VisitFloat64Neg(Node* node) { UNREACHABLE(); }
1089 
VisitFloat64Ieee754Binop(Node * node,InstructionCode opcode)1090 void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
1091                                                    InstructionCode opcode) {
1092   X87OperandGenerator g(this);
1093   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1094   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
1095   Emit(opcode, g.DefineAsFixed(node, stX_0), 0, nullptr)->MarkAsCall();
1096 }
1097 
VisitFloat64Ieee754Unop(Node * node,InstructionCode opcode)1098 void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
1099                                                   InstructionCode opcode) {
1100   X87OperandGenerator g(this);
1101   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1102   Emit(opcode, g.DefineAsFixed(node, stX_0), 0, nullptr)->MarkAsCall();
1103 }
1104 
EmitPrepareArguments(ZoneVector<PushParameter> * arguments,const CallDescriptor * descriptor,Node * node)1105 void InstructionSelector::EmitPrepareArguments(
1106     ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1107     Node* node) {
1108   X87OperandGenerator g(this);
1109 
1110   // Prepare for C function call.
1111   if (descriptor->IsCFunctionCall()) {
1112     InstructionOperand temps[] = {g.TempRegister()};
1113     size_t const temp_count = arraysize(temps);
1114     Emit(kArchPrepareCallCFunction |
1115              MiscField::encode(static_cast<int>(descriptor->CParameterCount())),
1116          0, nullptr, 0, nullptr, temp_count, temps);
1117 
1118     // Poke any stack arguments.
1119     for (size_t n = 0; n < arguments->size(); ++n) {
1120       PushParameter input = (*arguments)[n];
1121       if (input.node()) {
1122         int const slot = static_cast<int>(n);
1123         InstructionOperand value = g.CanBeImmediate(input.node())
1124                                        ? g.UseImmediate(input.node())
1125                                        : g.UseRegister(input.node());
1126         Emit(kX87Poke | MiscField::encode(slot), g.NoOutput(), value);
1127       }
1128     }
1129   } else {
1130     // Push any stack arguments.
1131     for (PushParameter input : base::Reversed(*arguments)) {
1132       // TODO(titzer): handle pushing double parameters.
1133       if (input.node() == nullptr) continue;
1134       InstructionOperand value =
1135           g.CanBeImmediate(input.node())
1136               ? g.UseImmediate(input.node())
1137               : IsSupported(ATOM) ||
1138                         sequence()->IsFP(GetVirtualRegister(input.node()))
1139                     ? g.UseRegister(input.node())
1140                     : g.Use(input.node());
1141       Emit(kX87Push, g.NoOutput(), value);
1142     }
1143   }
1144 }
1145 
1146 
IsTailCallAddressImmediate()1147 bool InstructionSelector::IsTailCallAddressImmediate() { return true; }
1148 
GetTempsCountForTailCallFromJSFunction()1149 int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 0; }
1150 
1151 namespace {
1152 
VisitCompareWithMemoryOperand(InstructionSelector * selector,InstructionCode opcode,Node * left,InstructionOperand right,FlagsContinuation * cont)1153 void VisitCompareWithMemoryOperand(InstructionSelector* selector,
1154                                    InstructionCode opcode, Node* left,
1155                                    InstructionOperand right,
1156                                    FlagsContinuation* cont) {
1157   DCHECK(left->opcode() == IrOpcode::kLoad);
1158   X87OperandGenerator g(selector);
1159   size_t input_count = 0;
1160   InstructionOperand inputs[6];
1161   AddressingMode addressing_mode =
1162       g.GetEffectiveAddressMemoryOperand(left, inputs, &input_count);
1163   opcode |= AddressingModeField::encode(addressing_mode);
1164   opcode = cont->Encode(opcode);
1165   inputs[input_count++] = right;
1166 
1167   if (cont->IsBranch()) {
1168     inputs[input_count++] = g.Label(cont->true_block());
1169     inputs[input_count++] = g.Label(cont->false_block());
1170     selector->Emit(opcode, 0, nullptr, input_count, inputs);
1171   } else if (cont->IsDeoptimize()) {
1172     selector->EmitDeoptimize(opcode, 0, nullptr, input_count, inputs,
1173                              cont->frame_state());
1174   } else {
1175     DCHECK(cont->IsSet());
1176     InstructionOperand output = g.DefineAsRegister(cont->result());
1177     selector->Emit(opcode, 1, &output, input_count, inputs);
1178   }
1179 }
1180 
1181 // Shared routine for multiple compare operations.
VisitCompare(InstructionSelector * selector,InstructionCode opcode,InstructionOperand left,InstructionOperand right,FlagsContinuation * cont)1182 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1183                   InstructionOperand left, InstructionOperand right,
1184                   FlagsContinuation* cont) {
1185   X87OperandGenerator g(selector);
1186   opcode = cont->Encode(opcode);
1187   if (cont->IsBranch()) {
1188     selector->Emit(opcode, g.NoOutput(), left, right,
1189                    g.Label(cont->true_block()), g.Label(cont->false_block()));
1190   } else if (cont->IsDeoptimize()) {
1191     selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
1192                              cont->frame_state());
1193   } else {
1194     DCHECK(cont->IsSet());
1195     selector->Emit(opcode, g.DefineAsByteRegister(cont->result()), left, right);
1196   }
1197 }
1198 
1199 
1200 // Shared routine for multiple compare operations.
VisitCompare(InstructionSelector * selector,InstructionCode opcode,Node * left,Node * right,FlagsContinuation * cont,bool commutative)1201 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1202                   Node* left, Node* right, FlagsContinuation* cont,
1203                   bool commutative) {
1204   X87OperandGenerator g(selector);
1205   if (commutative && g.CanBeBetterLeftOperand(right)) {
1206     std::swap(left, right);
1207   }
1208   VisitCompare(selector, opcode, g.UseRegister(left), g.Use(right), cont);
1209 }
1210 
1211 // Tries to match the size of the given opcode to that of the operands, if
1212 // possible.
TryNarrowOpcodeSize(InstructionCode opcode,Node * left,Node * right)1213 InstructionCode TryNarrowOpcodeSize(InstructionCode opcode, Node* left,
1214                                     Node* right) {
1215   if (opcode != kX87Cmp && opcode != kX87Test) {
1216     return opcode;
1217   }
1218   // Currently, if one of the two operands is not a Load, we don't know what its
1219   // machine representation is, so we bail out.
1220   // TODO(epertoso): we can probably get some size information out of immediates
1221   // and phi nodes.
1222   if (left->opcode() != IrOpcode::kLoad || right->opcode() != IrOpcode::kLoad) {
1223     return opcode;
1224   }
1225   // If the load representations don't match, both operands will be
1226   // zero/sign-extended to 32bit.
1227   LoadRepresentation left_representation = LoadRepresentationOf(left->op());
1228   if (left_representation != LoadRepresentationOf(right->op())) {
1229     return opcode;
1230   }
1231   switch (left_representation.representation()) {
1232     case MachineRepresentation::kBit:
1233     case MachineRepresentation::kWord8:
1234       return opcode == kX87Cmp ? kX87Cmp8 : kX87Test8;
1235     case MachineRepresentation::kWord16:
1236       return opcode == kX87Cmp ? kX87Cmp16 : kX87Test16;
1237     default:
1238       return opcode;
1239   }
1240 }
1241 
1242 // Shared routine for multiple float32 compare operations (inputs commuted).
VisitFloat32Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1243 void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1244                          FlagsContinuation* cont) {
1245   X87OperandGenerator g(selector);
1246   selector->Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
1247   selector->Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
1248   if (cont->IsBranch()) {
1249     selector->Emit(cont->Encode(kX87Float32Cmp), g.NoOutput(),
1250                    g.Label(cont->true_block()), g.Label(cont->false_block()));
1251   } else if (cont->IsDeoptimize()) {
1252     selector->EmitDeoptimize(cont->Encode(kX87Float32Cmp), g.NoOutput(),
1253                              g.Use(node->InputAt(0)), g.Use(node->InputAt(1)),
1254                              cont->frame_state());
1255   } else {
1256     DCHECK(cont->IsSet());
1257     selector->Emit(cont->Encode(kX87Float32Cmp),
1258                    g.DefineAsByteRegister(cont->result()));
1259   }
1260 }
1261 
1262 
1263 // Shared routine for multiple float64 compare operations (inputs commuted).
VisitFloat64Compare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1264 void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1265                          FlagsContinuation* cont) {
1266   X87OperandGenerator g(selector);
1267   selector->Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1268   selector->Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
1269   if (cont->IsBranch()) {
1270     selector->Emit(cont->Encode(kX87Float64Cmp), g.NoOutput(),
1271                    g.Label(cont->true_block()), g.Label(cont->false_block()));
1272   } else if (cont->IsDeoptimize()) {
1273     selector->EmitDeoptimize(cont->Encode(kX87Float64Cmp), g.NoOutput(),
1274                              g.Use(node->InputAt(0)), g.Use(node->InputAt(1)),
1275                              cont->frame_state());
1276   } else {
1277     DCHECK(cont->IsSet());
1278     selector->Emit(cont->Encode(kX87Float64Cmp),
1279                    g.DefineAsByteRegister(cont->result()));
1280   }
1281 }
1282 
1283 // Shared routine for multiple word compare operations.
VisitWordCompare(InstructionSelector * selector,Node * node,InstructionCode opcode,FlagsContinuation * cont)1284 void VisitWordCompare(InstructionSelector* selector, Node* node,
1285                       InstructionCode opcode, FlagsContinuation* cont) {
1286   X87OperandGenerator g(selector);
1287   Node* left = node->InputAt(0);
1288   Node* right = node->InputAt(1);
1289 
1290   InstructionCode narrowed_opcode = TryNarrowOpcodeSize(opcode, left, right);
1291 
1292   int effect_level = selector->GetEffectLevel(node);
1293   if (cont->IsBranch()) {
1294     effect_level = selector->GetEffectLevel(
1295         cont->true_block()->PredecessorAt(0)->control_input());
1296   }
1297 
1298   // If one of the two inputs is an immediate, make sure it's on the right, or
1299   // if one of the two inputs is a memory operand, make sure it's on the left.
1300   if ((!g.CanBeImmediate(right) && g.CanBeImmediate(left)) ||
1301       (g.CanBeMemoryOperand(narrowed_opcode, node, right, effect_level) &&
1302        !g.CanBeMemoryOperand(narrowed_opcode, node, left, effect_level))) {
1303     if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1304     std::swap(left, right);
1305   }
1306 
1307   // Match immediates on right side of comparison.
1308   if (g.CanBeImmediate(right)) {
1309     if (g.CanBeMemoryOperand(opcode, node, left, effect_level)) {
1310       // TODO(epertoso): we should use `narrowed_opcode' here once we match
1311       // immediates too.
1312       return VisitCompareWithMemoryOperand(selector, opcode, left,
1313                                            g.UseImmediate(right), cont);
1314     }
1315     return VisitCompare(selector, opcode, g.Use(left), g.UseImmediate(right),
1316                         cont);
1317   }
1318 
1319   // Match memory operands on left side of comparison.
1320   if (g.CanBeMemoryOperand(narrowed_opcode, node, left, effect_level)) {
1321     bool needs_byte_register =
1322         narrowed_opcode == kX87Test8 || narrowed_opcode == kX87Cmp8;
1323     return VisitCompareWithMemoryOperand(
1324         selector, narrowed_opcode, left,
1325         needs_byte_register ? g.UseByteRegister(right) : g.UseRegister(right),
1326         cont);
1327   }
1328 
1329   if (g.CanBeBetterLeftOperand(right)) {
1330     if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1331     std::swap(left, right);
1332   }
1333 
1334   return VisitCompare(selector, opcode, left, right, cont,
1335                       node->op()->HasProperty(Operator::kCommutative));
1336 }
1337 
VisitWordCompare(InstructionSelector * selector,Node * node,FlagsContinuation * cont)1338 void VisitWordCompare(InstructionSelector* selector, Node* node,
1339                       FlagsContinuation* cont) {
1340   X87OperandGenerator g(selector);
1341   Int32BinopMatcher m(node);
1342   if (m.left().IsLoad() && m.right().IsLoadStackPointer()) {
1343     LoadMatcher<ExternalReferenceMatcher> mleft(m.left().node());
1344     ExternalReference js_stack_limit =
1345         ExternalReference::address_of_stack_limit(selector->isolate());
1346     if (mleft.object().Is(js_stack_limit) && mleft.index().Is(0)) {
1347       // Compare(Load(js_stack_limit), LoadStackPointer)
1348       if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1349       InstructionCode opcode = cont->Encode(kX87StackCheck);
1350       if (cont->IsBranch()) {
1351         selector->Emit(opcode, g.NoOutput(), g.Label(cont->true_block()),
1352                        g.Label(cont->false_block()));
1353       } else if (cont->IsDeoptimize()) {
1354         selector->EmitDeoptimize(opcode, 0, nullptr, 0, nullptr,
1355                                  cont->frame_state());
1356       } else {
1357         DCHECK(cont->IsSet());
1358         selector->Emit(opcode, g.DefineAsRegister(cont->result()));
1359       }
1360       return;
1361     }
1362   }
1363   VisitWordCompare(selector, node, kX87Cmp, cont);
1364 }
1365 
1366 
1367 // Shared routine for word comparison with zero.
VisitWordCompareZero(InstructionSelector * selector,Node * user,Node * value,FlagsContinuation * cont)1368 void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1369                           Node* value, FlagsContinuation* cont) {
1370   // Try to combine the branch with a comparison.
1371   while (selector->CanCover(user, value)) {
1372     switch (value->opcode()) {
1373       case IrOpcode::kWord32Equal: {
1374         // Try to combine with comparisons against 0 by simply inverting the
1375         // continuation.
1376         Int32BinopMatcher m(value);
1377         if (m.right().Is(0)) {
1378           user = value;
1379           value = m.left().node();
1380           cont->Negate();
1381           continue;
1382         }
1383         cont->OverwriteAndNegateIfEqual(kEqual);
1384         return VisitWordCompare(selector, value, cont);
1385       }
1386       case IrOpcode::kInt32LessThan:
1387         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1388         return VisitWordCompare(selector, value, cont);
1389       case IrOpcode::kInt32LessThanOrEqual:
1390         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1391         return VisitWordCompare(selector, value, cont);
1392       case IrOpcode::kUint32LessThan:
1393         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1394         return VisitWordCompare(selector, value, cont);
1395       case IrOpcode::kUint32LessThanOrEqual:
1396         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1397         return VisitWordCompare(selector, value, cont);
1398       case IrOpcode::kFloat32Equal:
1399         cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
1400         return VisitFloat32Compare(selector, value, cont);
1401       case IrOpcode::kFloat32LessThan:
1402         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
1403         return VisitFloat32Compare(selector, value, cont);
1404       case IrOpcode::kFloat32LessThanOrEqual:
1405         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
1406         return VisitFloat32Compare(selector, value, cont);
1407       case IrOpcode::kFloat64Equal:
1408         cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
1409         return VisitFloat64Compare(selector, value, cont);
1410       case IrOpcode::kFloat64LessThan:
1411         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
1412         return VisitFloat64Compare(selector, value, cont);
1413       case IrOpcode::kFloat64LessThanOrEqual:
1414         cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
1415         return VisitFloat64Compare(selector, value, cont);
1416       case IrOpcode::kProjection:
1417         // Check if this is the overflow output projection of an
1418         // <Operation>WithOverflow node.
1419         if (ProjectionIndexOf(value->op()) == 1u) {
1420           // We cannot combine the <Operation>WithOverflow with this branch
1421           // unless the 0th projection (the use of the actual value of the
1422           // <Operation> is either nullptr, which means there's no use of the
1423           // actual value, or was already defined, which means it is scheduled
1424           // *AFTER* this branch).
1425           Node* const node = value->InputAt(0);
1426           Node* const result = NodeProperties::FindProjection(node, 0);
1427           if (result == nullptr || selector->IsDefined(result)) {
1428             switch (node->opcode()) {
1429               case IrOpcode::kInt32AddWithOverflow:
1430                 cont->OverwriteAndNegateIfEqual(kOverflow);
1431                 return VisitBinop(selector, node, kX87Add, cont);
1432               case IrOpcode::kInt32SubWithOverflow:
1433                 cont->OverwriteAndNegateIfEqual(kOverflow);
1434                 return VisitBinop(selector, node, kX87Sub, cont);
1435               default:
1436                 break;
1437             }
1438           }
1439         }
1440         break;
1441       case IrOpcode::kInt32Sub:
1442         return VisitWordCompare(selector, value, cont);
1443       case IrOpcode::kWord32And:
1444         return VisitWordCompare(selector, value, kX87Test, cont);
1445       default:
1446         break;
1447     }
1448     break;
1449   }
1450 
1451   // Continuation could not be combined with a compare, emit compare against 0.
1452   X87OperandGenerator g(selector);
1453   VisitCompare(selector, kX87Cmp, g.Use(value), g.TempImmediate(0), cont);
1454 }
1455 
1456 }  // namespace
1457 
1458 
VisitBranch(Node * branch,BasicBlock * tbranch,BasicBlock * fbranch)1459 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1460                                       BasicBlock* fbranch) {
1461   FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1462   VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
1463 }
1464 
VisitDeoptimizeIf(Node * node)1465 void InstructionSelector::VisitDeoptimizeIf(Node* node) {
1466   FlagsContinuation cont =
1467       FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
1468   VisitWordCompareZero(this, node, node->InputAt(0), &cont);
1469 }
1470 
VisitDeoptimizeUnless(Node * node)1471 void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
1472   FlagsContinuation cont =
1473       FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
1474   VisitWordCompareZero(this, node, node->InputAt(0), &cont);
1475 }
1476 
VisitSwitch(Node * node,const SwitchInfo & sw)1477 void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1478   X87OperandGenerator g(this);
1479   InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1480 
1481   // Emit either ArchTableSwitch or ArchLookupSwitch.
1482   size_t table_space_cost = 4 + sw.value_range;
1483   size_t table_time_cost = 3;
1484   size_t lookup_space_cost = 3 + 2 * sw.case_count;
1485   size_t lookup_time_cost = sw.case_count;
1486   if (sw.case_count > 4 &&
1487       table_space_cost + 3 * table_time_cost <=
1488           lookup_space_cost + 3 * lookup_time_cost &&
1489       sw.min_value > std::numeric_limits<int32_t>::min()) {
1490     InstructionOperand index_operand = value_operand;
1491     if (sw.min_value) {
1492       index_operand = g.TempRegister();
1493       Emit(kX87Lea | AddressingModeField::encode(kMode_MRI), index_operand,
1494            value_operand, g.TempImmediate(-sw.min_value));
1495     }
1496     // Generate a table lookup.
1497     return EmitTableSwitch(sw, index_operand);
1498   }
1499 
1500   // Generate a sequence of conditional jumps.
1501   return EmitLookupSwitch(sw, value_operand);
1502 }
1503 
1504 
VisitWord32Equal(Node * const node)1505 void InstructionSelector::VisitWord32Equal(Node* const node) {
1506   FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1507   Int32BinopMatcher m(node);
1508   if (m.right().Is(0)) {
1509     return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
1510   }
1511   VisitWordCompare(this, node, &cont);
1512 }
1513 
1514 
VisitInt32LessThan(Node * node)1515 void InstructionSelector::VisitInt32LessThan(Node* node) {
1516   FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1517   VisitWordCompare(this, node, &cont);
1518 }
1519 
1520 
VisitInt32LessThanOrEqual(Node * node)1521 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1522   FlagsContinuation cont =
1523       FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1524   VisitWordCompare(this, node, &cont);
1525 }
1526 
1527 
VisitUint32LessThan(Node * node)1528 void InstructionSelector::VisitUint32LessThan(Node* node) {
1529   FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1530   VisitWordCompare(this, node, &cont);
1531 }
1532 
1533 
VisitUint32LessThanOrEqual(Node * node)1534 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1535   FlagsContinuation cont =
1536       FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1537   VisitWordCompare(this, node, &cont);
1538 }
1539 
1540 
VisitInt32AddWithOverflow(Node * node)1541 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1542   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1543     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1544     return VisitBinop(this, node, kX87Add, &cont);
1545   }
1546   FlagsContinuation cont;
1547   VisitBinop(this, node, kX87Add, &cont);
1548 }
1549 
1550 
VisitInt32SubWithOverflow(Node * node)1551 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1552   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1553     FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1554     return VisitBinop(this, node, kX87Sub, &cont);
1555   }
1556   FlagsContinuation cont;
1557   VisitBinop(this, node, kX87Sub, &cont);
1558 }
1559 
1560 
VisitFloat32Equal(Node * node)1561 void InstructionSelector::VisitFloat32Equal(Node* node) {
1562   FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
1563   VisitFloat32Compare(this, node, &cont);
1564 }
1565 
1566 
VisitFloat32LessThan(Node * node)1567 void InstructionSelector::VisitFloat32LessThan(Node* node) {
1568   FlagsContinuation cont =
1569       FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
1570   VisitFloat32Compare(this, node, &cont);
1571 }
1572 
1573 
VisitFloat32LessThanOrEqual(Node * node)1574 void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
1575   FlagsContinuation cont =
1576       FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
1577   VisitFloat32Compare(this, node, &cont);
1578 }
1579 
1580 
VisitFloat64Equal(Node * node)1581 void InstructionSelector::VisitFloat64Equal(Node* node) {
1582   FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
1583   VisitFloat64Compare(this, node, &cont);
1584 }
1585 
1586 
VisitFloat64LessThan(Node * node)1587 void InstructionSelector::VisitFloat64LessThan(Node* node) {
1588   FlagsContinuation cont =
1589       FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
1590   VisitFloat64Compare(this, node, &cont);
1591 }
1592 
1593 
VisitFloat64LessThanOrEqual(Node * node)1594 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1595   FlagsContinuation cont =
1596       FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
1597   VisitFloat64Compare(this, node, &cont);
1598 }
1599 
1600 
VisitFloat64ExtractLowWord32(Node * node)1601 void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1602   X87OperandGenerator g(this);
1603   Emit(kX87Float64ExtractLowWord32, g.DefineAsRegister(node),
1604        g.Use(node->InputAt(0)));
1605 }
1606 
1607 
VisitFloat64ExtractHighWord32(Node * node)1608 void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1609   X87OperandGenerator g(this);
1610   Emit(kX87Float64ExtractHighWord32, g.DefineAsRegister(node),
1611        g.Use(node->InputAt(0)));
1612 }
1613 
1614 
VisitFloat64InsertLowWord32(Node * node)1615 void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1616   X87OperandGenerator g(this);
1617   Node* left = node->InputAt(0);
1618   Node* right = node->InputAt(1);
1619   Emit(kX87Float64InsertLowWord32, g.UseFixed(node, stX_0), g.UseRegister(left),
1620        g.UseRegister(right));
1621 }
1622 
1623 
VisitFloat64InsertHighWord32(Node * node)1624 void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1625   X87OperandGenerator g(this);
1626   Node* left = node->InputAt(0);
1627   Node* right = node->InputAt(1);
1628   Emit(kX87Float64InsertHighWord32, g.UseFixed(node, stX_0),
1629        g.UseRegister(left), g.UseRegister(right));
1630 }
1631 
VisitFloat64SilenceNaN(Node * node)1632 void InstructionSelector::VisitFloat64SilenceNaN(Node* node) {
1633   X87OperandGenerator g(this);
1634   Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1635   Emit(kX87Float64SilenceNaN, g.DefineAsFixed(node, stX_0), 0, nullptr);
1636 }
1637 
VisitAtomicLoad(Node * node)1638 void InstructionSelector::VisitAtomicLoad(Node* node) {
1639   LoadRepresentation load_rep = LoadRepresentationOf(node->op());
1640   DCHECK(load_rep.representation() == MachineRepresentation::kWord8 ||
1641          load_rep.representation() == MachineRepresentation::kWord16 ||
1642          load_rep.representation() == MachineRepresentation::kWord32);
1643   USE(load_rep);
1644   VisitLoad(node);
1645 }
1646 
VisitAtomicStore(Node * node)1647 void InstructionSelector::VisitAtomicStore(Node* node) {
1648   X87OperandGenerator g(this);
1649   Node* base = node->InputAt(0);
1650   Node* index = node->InputAt(1);
1651   Node* value = node->InputAt(2);
1652 
1653   MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
1654   ArchOpcode opcode = kArchNop;
1655   switch (rep) {
1656     case MachineRepresentation::kWord8:
1657       opcode = kX87Xchgb;
1658       break;
1659     case MachineRepresentation::kWord16:
1660       opcode = kX87Xchgw;
1661       break;
1662     case MachineRepresentation::kWord32:
1663       opcode = kX87Xchgl;
1664       break;
1665     default:
1666       UNREACHABLE();
1667       break;
1668   }
1669   AddressingMode addressing_mode;
1670   InstructionOperand inputs[4];
1671   size_t input_count = 0;
1672   inputs[input_count++] = g.UseUniqueRegister(base);
1673   if (g.CanBeImmediate(index)) {
1674     inputs[input_count++] = g.UseImmediate(index);
1675     addressing_mode = kMode_MRI;
1676   } else {
1677     inputs[input_count++] = g.UseUniqueRegister(index);
1678     addressing_mode = kMode_MR1;
1679   }
1680   inputs[input_count++] = g.UseUniqueRegister(value);
1681   InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
1682   Emit(code, 0, nullptr, input_count, inputs);
1683 }
1684 
1685 // static
1686 MachineOperatorBuilder::Flags
SupportedMachineOperatorFlags()1687 InstructionSelector::SupportedMachineOperatorFlags() {
1688   MachineOperatorBuilder::Flags flags =
1689       MachineOperatorBuilder::kFloat32Max |
1690       MachineOperatorBuilder::kFloat32Min |
1691       MachineOperatorBuilder::kFloat64Max |
1692       MachineOperatorBuilder::kFloat64Min |
1693       MachineOperatorBuilder::kWord32ShiftIsSafe;
1694   if (CpuFeatures::IsSupported(POPCNT)) {
1695     flags |= MachineOperatorBuilder::kWord32Popcnt;
1696   }
1697 
1698   flags |= MachineOperatorBuilder::kFloat32RoundDown |
1699            MachineOperatorBuilder::kFloat64RoundDown |
1700            MachineOperatorBuilder::kFloat32RoundUp |
1701            MachineOperatorBuilder::kFloat64RoundUp |
1702            MachineOperatorBuilder::kFloat32RoundTruncate |
1703            MachineOperatorBuilder::kFloat64RoundTruncate |
1704            MachineOperatorBuilder::kFloat32RoundTiesEven |
1705            MachineOperatorBuilder::kFloat64RoundTiesEven;
1706   return flags;
1707 }
1708 
1709 // static
1710 MachineOperatorBuilder::AlignmentRequirements
AlignmentRequirements()1711 InstructionSelector::AlignmentRequirements() {
1712   return MachineOperatorBuilder::AlignmentRequirements::
1713       FullUnalignedAccessSupport();
1714 }
1715 
1716 }  // namespace compiler
1717 }  // namespace internal
1718 }  // namespace v8
1719