• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/instruction-selector.h"
6 
7 #include <limits>
8 
9 #include "src/base/adapters.h"
10 #include "src/compiler/instruction-selector-impl.h"
11 #include "src/compiler/node-matchers.h"
12 #include "src/compiler/pipeline.h"
13 #include "src/compiler/schedule.h"
14 #include "src/compiler/state-values-utils.h"
15 #include "src/deoptimizer.h"
16 
17 namespace v8 {
18 namespace internal {
19 namespace compiler {
20 
InstructionSelector(Zone * zone,size_t node_count,Linkage * linkage,InstructionSequence * sequence,Schedule * schedule,SourcePositionTable * source_positions,Frame * frame,SourcePositionMode source_position_mode,Features features)21 InstructionSelector::InstructionSelector(
22     Zone* zone, size_t node_count, Linkage* linkage,
23     InstructionSequence* sequence, Schedule* schedule,
24     SourcePositionTable* source_positions, Frame* frame,
25     SourcePositionMode source_position_mode, Features features)
26     : zone_(zone),
27       linkage_(linkage),
28       sequence_(sequence),
29       source_positions_(source_positions),
30       source_position_mode_(source_position_mode),
31       features_(features),
32       schedule_(schedule),
33       current_block_(nullptr),
34       instructions_(zone),
35       defined_(node_count, false, zone),
36       used_(node_count, false, zone),
37       effect_level_(node_count, 0, zone),
38       virtual_registers_(node_count,
39                          InstructionOperand::kInvalidVirtualRegister, zone),
40       scheduler_(nullptr),
41       frame_(frame) {
42   instructions_.reserve(node_count);
43 }
44 
45 
SelectInstructions()46 void InstructionSelector::SelectInstructions() {
47   // Mark the inputs of all phis in loop headers as used.
48   BasicBlockVector* blocks = schedule()->rpo_order();
49   for (auto const block : *blocks) {
50     if (!block->IsLoopHeader()) continue;
51     DCHECK_LE(2u, block->PredecessorCount());
52     for (Node* const phi : *block) {
53       if (phi->opcode() != IrOpcode::kPhi) continue;
54 
55       // Mark all inputs as used.
56       for (Node* const input : phi->inputs()) {
57         MarkAsUsed(input);
58       }
59     }
60   }
61 
62   // Visit each basic block in post order.
63   for (auto i = blocks->rbegin(); i != blocks->rend(); ++i) {
64     VisitBlock(*i);
65   }
66 
67   // Schedule the selected instructions.
68   if (FLAG_turbo_instruction_scheduling &&
69       InstructionScheduler::SchedulerSupported()) {
70     scheduler_ = new (zone()) InstructionScheduler(zone(), sequence());
71   }
72 
73   for (auto const block : *blocks) {
74     InstructionBlock* instruction_block =
75         sequence()->InstructionBlockAt(RpoNumber::FromInt(block->rpo_number()));
76     size_t end = instruction_block->code_end();
77     size_t start = instruction_block->code_start();
78     DCHECK_LE(end, start);
79     StartBlock(RpoNumber::FromInt(block->rpo_number()));
80     while (start-- > end) {
81       AddInstruction(instructions_[start]);
82     }
83     EndBlock(RpoNumber::FromInt(block->rpo_number()));
84   }
85 #if DEBUG
86   sequence()->ValidateSSA();
87 #endif
88 }
89 
StartBlock(RpoNumber rpo)90 void InstructionSelector::StartBlock(RpoNumber rpo) {
91   if (FLAG_turbo_instruction_scheduling &&
92       InstructionScheduler::SchedulerSupported()) {
93     DCHECK_NOT_NULL(scheduler_);
94     scheduler_->StartBlock(rpo);
95   } else {
96     sequence()->StartBlock(rpo);
97   }
98 }
99 
100 
EndBlock(RpoNumber rpo)101 void InstructionSelector::EndBlock(RpoNumber rpo) {
102   if (FLAG_turbo_instruction_scheduling &&
103       InstructionScheduler::SchedulerSupported()) {
104     DCHECK_NOT_NULL(scheduler_);
105     scheduler_->EndBlock(rpo);
106   } else {
107     sequence()->EndBlock(rpo);
108   }
109 }
110 
111 
AddInstruction(Instruction * instr)112 void InstructionSelector::AddInstruction(Instruction* instr) {
113   if (FLAG_turbo_instruction_scheduling &&
114       InstructionScheduler::SchedulerSupported()) {
115     DCHECK_NOT_NULL(scheduler_);
116     scheduler_->AddInstruction(instr);
117   } else {
118     sequence()->AddInstruction(instr);
119   }
120 }
121 
122 
Emit(InstructionCode opcode,InstructionOperand output,size_t temp_count,InstructionOperand * temps)123 Instruction* InstructionSelector::Emit(InstructionCode opcode,
124                                        InstructionOperand output,
125                                        size_t temp_count,
126                                        InstructionOperand* temps) {
127   size_t output_count = output.IsInvalid() ? 0 : 1;
128   return Emit(opcode, output_count, &output, 0, nullptr, temp_count, temps);
129 }
130 
131 
Emit(InstructionCode opcode,InstructionOperand output,InstructionOperand a,size_t temp_count,InstructionOperand * temps)132 Instruction* InstructionSelector::Emit(InstructionCode opcode,
133                                        InstructionOperand output,
134                                        InstructionOperand a, size_t temp_count,
135                                        InstructionOperand* temps) {
136   size_t output_count = output.IsInvalid() ? 0 : 1;
137   return Emit(opcode, output_count, &output, 1, &a, temp_count, temps);
138 }
139 
140 
Emit(InstructionCode opcode,InstructionOperand output,InstructionOperand a,InstructionOperand b,size_t temp_count,InstructionOperand * temps)141 Instruction* InstructionSelector::Emit(InstructionCode opcode,
142                                        InstructionOperand output,
143                                        InstructionOperand a,
144                                        InstructionOperand b, size_t temp_count,
145                                        InstructionOperand* temps) {
146   size_t output_count = output.IsInvalid() ? 0 : 1;
147   InstructionOperand inputs[] = {a, b};
148   size_t input_count = arraysize(inputs);
149   return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
150               temps);
151 }
152 
153 
Emit(InstructionCode opcode,InstructionOperand output,InstructionOperand a,InstructionOperand b,InstructionOperand c,size_t temp_count,InstructionOperand * temps)154 Instruction* InstructionSelector::Emit(InstructionCode opcode,
155                                        InstructionOperand output,
156                                        InstructionOperand a,
157                                        InstructionOperand b,
158                                        InstructionOperand c, size_t temp_count,
159                                        InstructionOperand* temps) {
160   size_t output_count = output.IsInvalid() ? 0 : 1;
161   InstructionOperand inputs[] = {a, b, c};
162   size_t input_count = arraysize(inputs);
163   return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
164               temps);
165 }
166 
167 
Emit(InstructionCode opcode,InstructionOperand output,InstructionOperand a,InstructionOperand b,InstructionOperand c,InstructionOperand d,size_t temp_count,InstructionOperand * temps)168 Instruction* InstructionSelector::Emit(
169     InstructionCode opcode, InstructionOperand output, InstructionOperand a,
170     InstructionOperand b, InstructionOperand c, InstructionOperand d,
171     size_t temp_count, InstructionOperand* temps) {
172   size_t output_count = output.IsInvalid() ? 0 : 1;
173   InstructionOperand inputs[] = {a, b, c, d};
174   size_t input_count = arraysize(inputs);
175   return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
176               temps);
177 }
178 
179 
Emit(InstructionCode opcode,InstructionOperand output,InstructionOperand a,InstructionOperand b,InstructionOperand c,InstructionOperand d,InstructionOperand e,size_t temp_count,InstructionOperand * temps)180 Instruction* InstructionSelector::Emit(
181     InstructionCode opcode, InstructionOperand output, InstructionOperand a,
182     InstructionOperand b, InstructionOperand c, InstructionOperand d,
183     InstructionOperand e, size_t temp_count, InstructionOperand* temps) {
184   size_t output_count = output.IsInvalid() ? 0 : 1;
185   InstructionOperand inputs[] = {a, b, c, d, e};
186   size_t input_count = arraysize(inputs);
187   return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
188               temps);
189 }
190 
191 
Emit(InstructionCode opcode,InstructionOperand output,InstructionOperand a,InstructionOperand b,InstructionOperand c,InstructionOperand d,InstructionOperand e,InstructionOperand f,size_t temp_count,InstructionOperand * temps)192 Instruction* InstructionSelector::Emit(
193     InstructionCode opcode, InstructionOperand output, InstructionOperand a,
194     InstructionOperand b, InstructionOperand c, InstructionOperand d,
195     InstructionOperand e, InstructionOperand f, size_t temp_count,
196     InstructionOperand* temps) {
197   size_t output_count = output.IsInvalid() ? 0 : 1;
198   InstructionOperand inputs[] = {a, b, c, d, e, f};
199   size_t input_count = arraysize(inputs);
200   return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
201               temps);
202 }
203 
204 
Emit(InstructionCode opcode,size_t output_count,InstructionOperand * outputs,size_t input_count,InstructionOperand * inputs,size_t temp_count,InstructionOperand * temps)205 Instruction* InstructionSelector::Emit(
206     InstructionCode opcode, size_t output_count, InstructionOperand* outputs,
207     size_t input_count, InstructionOperand* inputs, size_t temp_count,
208     InstructionOperand* temps) {
209   Instruction* instr =
210       Instruction::New(instruction_zone(), opcode, output_count, outputs,
211                        input_count, inputs, temp_count, temps);
212   return Emit(instr);
213 }
214 
215 
Emit(Instruction * instr)216 Instruction* InstructionSelector::Emit(Instruction* instr) {
217   instructions_.push_back(instr);
218   return instr;
219 }
220 
221 
CanCover(Node * user,Node * node) const222 bool InstructionSelector::CanCover(Node* user, Node* node) const {
223   // 1. Both {user} and {node} must be in the same basic block.
224   if (schedule()->block(node) != schedule()->block(user)) {
225     return false;
226   }
227   // 2. Pure {node}s must be owned by the {user}.
228   if (node->op()->HasProperty(Operator::kPure)) {
229     return node->OwnedBy(user);
230   }
231   // 3. Impure {node}s must match the effect level of {user}.
232   if (GetEffectLevel(node) != GetEffectLevel(user)) {
233     return false;
234   }
235   // 4. Only {node} must have value edges pointing to {user}.
236   for (Edge const edge : node->use_edges()) {
237     if (edge.from() != user && NodeProperties::IsValueEdge(edge)) {
238       return false;
239     }
240   }
241   return true;
242 }
243 
GetVirtualRegister(const Node * node)244 int InstructionSelector::GetVirtualRegister(const Node* node) {
245   DCHECK_NOT_NULL(node);
246   size_t const id = node->id();
247   DCHECK_LT(id, virtual_registers_.size());
248   int virtual_register = virtual_registers_[id];
249   if (virtual_register == InstructionOperand::kInvalidVirtualRegister) {
250     virtual_register = sequence()->NextVirtualRegister();
251     virtual_registers_[id] = virtual_register;
252   }
253   return virtual_register;
254 }
255 
256 
GetVirtualRegistersForTesting() const257 const std::map<NodeId, int> InstructionSelector::GetVirtualRegistersForTesting()
258     const {
259   std::map<NodeId, int> virtual_registers;
260   for (size_t n = 0; n < virtual_registers_.size(); ++n) {
261     if (virtual_registers_[n] != InstructionOperand::kInvalidVirtualRegister) {
262       NodeId const id = static_cast<NodeId>(n);
263       virtual_registers.insert(std::make_pair(id, virtual_registers_[n]));
264     }
265   }
266   return virtual_registers;
267 }
268 
269 
IsDefined(Node * node) const270 bool InstructionSelector::IsDefined(Node* node) const {
271   DCHECK_NOT_NULL(node);
272   size_t const id = node->id();
273   DCHECK_LT(id, defined_.size());
274   return defined_[id];
275 }
276 
277 
MarkAsDefined(Node * node)278 void InstructionSelector::MarkAsDefined(Node* node) {
279   DCHECK_NOT_NULL(node);
280   size_t const id = node->id();
281   DCHECK_LT(id, defined_.size());
282   defined_[id] = true;
283 }
284 
285 
IsUsed(Node * node) const286 bool InstructionSelector::IsUsed(Node* node) const {
287   DCHECK_NOT_NULL(node);
288   if (!node->op()->HasProperty(Operator::kEliminatable)) return true;
289   size_t const id = node->id();
290   DCHECK_LT(id, used_.size());
291   return used_[id];
292 }
293 
294 
MarkAsUsed(Node * node)295 void InstructionSelector::MarkAsUsed(Node* node) {
296   DCHECK_NOT_NULL(node);
297   size_t const id = node->id();
298   DCHECK_LT(id, used_.size());
299   used_[id] = true;
300 }
301 
GetEffectLevel(Node * node) const302 int InstructionSelector::GetEffectLevel(Node* node) const {
303   DCHECK_NOT_NULL(node);
304   size_t const id = node->id();
305   DCHECK_LT(id, effect_level_.size());
306   return effect_level_[id];
307 }
308 
SetEffectLevel(Node * node,int effect_level)309 void InstructionSelector::SetEffectLevel(Node* node, int effect_level) {
310   DCHECK_NOT_NULL(node);
311   size_t const id = node->id();
312   DCHECK_LT(id, effect_level_.size());
313   effect_level_[id] = effect_level;
314 }
315 
MarkAsRepresentation(MachineRepresentation rep,const InstructionOperand & op)316 void InstructionSelector::MarkAsRepresentation(MachineRepresentation rep,
317                                                const InstructionOperand& op) {
318   UnallocatedOperand unalloc = UnallocatedOperand::cast(op);
319   sequence()->MarkAsRepresentation(rep, unalloc.virtual_register());
320 }
321 
322 
MarkAsRepresentation(MachineRepresentation rep,Node * node)323 void InstructionSelector::MarkAsRepresentation(MachineRepresentation rep,
324                                                Node* node) {
325   sequence()->MarkAsRepresentation(rep, GetVirtualRegister(node));
326 }
327 
328 
329 namespace {
330 
331 enum class FrameStateInputKind { kAny, kStackSlot };
332 
333 
OperandForDeopt(OperandGenerator * g,Node * input,FrameStateInputKind kind)334 InstructionOperand OperandForDeopt(OperandGenerator* g, Node* input,
335                                    FrameStateInputKind kind) {
336   switch (input->opcode()) {
337     case IrOpcode::kInt32Constant:
338     case IrOpcode::kNumberConstant:
339     case IrOpcode::kFloat32Constant:
340     case IrOpcode::kFloat64Constant:
341     case IrOpcode::kHeapConstant:
342       return g->UseImmediate(input);
343     case IrOpcode::kObjectState:
344       UNREACHABLE();
345       break;
346     default:
347       switch (kind) {
348         case FrameStateInputKind::kStackSlot:
349           return g->UseUniqueSlot(input);
350         case FrameStateInputKind::kAny:
351           return g->UseAny(input);
352       }
353   }
354   UNREACHABLE();
355   return InstructionOperand();
356 }
357 
358 
359 class StateObjectDeduplicator {
360  public:
StateObjectDeduplicator(Zone * zone)361   explicit StateObjectDeduplicator(Zone* zone) : objects_(zone) {}
362   static const size_t kNotDuplicated = SIZE_MAX;
363 
GetObjectId(Node * node)364   size_t GetObjectId(Node* node) {
365     for (size_t i = 0; i < objects_.size(); ++i) {
366       if (objects_[i] == node) {
367         return i;
368       }
369     }
370     return kNotDuplicated;
371   }
372 
InsertObject(Node * node)373   size_t InsertObject(Node* node) {
374     size_t id = objects_.size();
375     objects_.push_back(node);
376     return id;
377   }
378 
379  private:
380   ZoneVector<Node*> objects_;
381 };
382 
383 
384 // Returns the number of instruction operands added to inputs.
AddOperandToStateValueDescriptor(StateValueDescriptor * descriptor,InstructionOperandVector * inputs,OperandGenerator * g,StateObjectDeduplicator * deduplicator,Node * input,MachineType type,FrameStateInputKind kind,Zone * zone)385 size_t AddOperandToStateValueDescriptor(StateValueDescriptor* descriptor,
386                                         InstructionOperandVector* inputs,
387                                         OperandGenerator* g,
388                                         StateObjectDeduplicator* deduplicator,
389                                         Node* input, MachineType type,
390                                         FrameStateInputKind kind, Zone* zone) {
391   switch (input->opcode()) {
392     case IrOpcode::kObjectState: {
393       size_t id = deduplicator->GetObjectId(input);
394       if (id == StateObjectDeduplicator::kNotDuplicated) {
395         size_t entries = 0;
396         id = deduplicator->InsertObject(input);
397         descriptor->fields().push_back(
398             StateValueDescriptor::Recursive(zone, id));
399         StateValueDescriptor* new_desc = &descriptor->fields().back();
400         for (Edge edge : input->input_edges()) {
401           entries += AddOperandToStateValueDescriptor(
402               new_desc, inputs, g, deduplicator, edge.to(),
403               MachineType::AnyTagged(), kind, zone);
404         }
405         return entries;
406       } else {
407         // Crankshaft counts duplicate objects for the running id, so we have
408         // to push the input again.
409         deduplicator->InsertObject(input);
410         descriptor->fields().push_back(
411             StateValueDescriptor::Duplicate(zone, id));
412         return 0;
413       }
414       break;
415     }
416     default: {
417       inputs->push_back(OperandForDeopt(g, input, kind));
418       descriptor->fields().push_back(StateValueDescriptor::Plain(zone, type));
419       return 1;
420     }
421   }
422 }
423 
424 
425 // Returns the number of instruction operands added to inputs.
AddInputsToFrameStateDescriptor(FrameStateDescriptor * descriptor,Node * state,OperandGenerator * g,StateObjectDeduplicator * deduplicator,InstructionOperandVector * inputs,FrameStateInputKind kind,Zone * zone)426 size_t AddInputsToFrameStateDescriptor(FrameStateDescriptor* descriptor,
427                                        Node* state, OperandGenerator* g,
428                                        StateObjectDeduplicator* deduplicator,
429                                        InstructionOperandVector* inputs,
430                                        FrameStateInputKind kind, Zone* zone) {
431   DCHECK_EQ(IrOpcode::kFrameState, state->op()->opcode());
432 
433   size_t entries = 0;
434   size_t initial_size = inputs->size();
435   USE(initial_size);  // initial_size is only used for debug.
436 
437   if (descriptor->outer_state()) {
438     entries += AddInputsToFrameStateDescriptor(
439         descriptor->outer_state(), state->InputAt(kFrameStateOuterStateInput),
440         g, deduplicator, inputs, kind, zone);
441   }
442 
443   Node* parameters = state->InputAt(kFrameStateParametersInput);
444   Node* locals = state->InputAt(kFrameStateLocalsInput);
445   Node* stack = state->InputAt(kFrameStateStackInput);
446   Node* context = state->InputAt(kFrameStateContextInput);
447   Node* function = state->InputAt(kFrameStateFunctionInput);
448 
449   DCHECK_EQ(descriptor->parameters_count(),
450             StateValuesAccess(parameters).size());
451   DCHECK_EQ(descriptor->locals_count(), StateValuesAccess(locals).size());
452   DCHECK_EQ(descriptor->stack_count(), StateValuesAccess(stack).size());
453 
454   StateValueDescriptor* values_descriptor =
455       descriptor->GetStateValueDescriptor();
456   entries += AddOperandToStateValueDescriptor(
457       values_descriptor, inputs, g, deduplicator, function,
458       MachineType::AnyTagged(), FrameStateInputKind::kStackSlot, zone);
459   for (StateValuesAccess::TypedNode input_node :
460        StateValuesAccess(parameters)) {
461     entries += AddOperandToStateValueDescriptor(values_descriptor, inputs, g,
462                                                 deduplicator, input_node.node,
463                                                 input_node.type, kind, zone);
464   }
465   if (descriptor->HasContext()) {
466     entries += AddOperandToStateValueDescriptor(
467         values_descriptor, inputs, g, deduplicator, context,
468         MachineType::AnyTagged(), FrameStateInputKind::kStackSlot, zone);
469   }
470   for (StateValuesAccess::TypedNode input_node : StateValuesAccess(locals)) {
471     entries += AddOperandToStateValueDescriptor(values_descriptor, inputs, g,
472                                                 deduplicator, input_node.node,
473                                                 input_node.type, kind, zone);
474   }
475   for (StateValuesAccess::TypedNode input_node : StateValuesAccess(stack)) {
476     entries += AddOperandToStateValueDescriptor(values_descriptor, inputs, g,
477                                                 deduplicator, input_node.node,
478                                                 input_node.type, kind, zone);
479   }
480   DCHECK_EQ(initial_size + entries, inputs->size());
481   return entries;
482 }
483 
484 }  // namespace
485 
486 
487 // An internal helper class for generating the operands to calls.
488 // TODO(bmeurer): Get rid of the CallBuffer business and make
489 // InstructionSelector::VisitCall platform independent instead.
490 struct CallBuffer {
CallBufferv8::internal::compiler::CallBuffer491   CallBuffer(Zone* zone, const CallDescriptor* descriptor,
492              FrameStateDescriptor* frame_state)
493       : descriptor(descriptor),
494         frame_state_descriptor(frame_state),
495         output_nodes(zone),
496         outputs(zone),
497         instruction_args(zone),
498         pushed_nodes(zone) {
499     output_nodes.reserve(descriptor->ReturnCount());
500     outputs.reserve(descriptor->ReturnCount());
501     pushed_nodes.reserve(input_count());
502     instruction_args.reserve(input_count() + frame_state_value_count());
503   }
504 
505 
506   const CallDescriptor* descriptor;
507   FrameStateDescriptor* frame_state_descriptor;
508   NodeVector output_nodes;
509   InstructionOperandVector outputs;
510   InstructionOperandVector instruction_args;
511   ZoneVector<PushParameter> pushed_nodes;
512 
input_countv8::internal::compiler::CallBuffer513   size_t input_count() const { return descriptor->InputCount(); }
514 
frame_state_countv8::internal::compiler::CallBuffer515   size_t frame_state_count() const { return descriptor->FrameStateCount(); }
516 
frame_state_value_countv8::internal::compiler::CallBuffer517   size_t frame_state_value_count() const {
518     return (frame_state_descriptor == nullptr)
519                ? 0
520                : (frame_state_descriptor->GetTotalSize() +
521                   1);  // Include deopt id.
522   }
523 };
524 
525 
526 // TODO(bmeurer): Get rid of the CallBuffer business and make
527 // InstructionSelector::VisitCall platform independent instead.
InitializeCallBuffer(Node * call,CallBuffer * buffer,CallBufferFlags flags,int stack_param_delta)528 void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer,
529                                                CallBufferFlags flags,
530                                                int stack_param_delta) {
531   OperandGenerator g(this);
532   DCHECK_LE(call->op()->ValueOutputCount(),
533             static_cast<int>(buffer->descriptor->ReturnCount()));
534   DCHECK_EQ(
535       call->op()->ValueInputCount(),
536       static_cast<int>(buffer->input_count() + buffer->frame_state_count()));
537 
538   if (buffer->descriptor->ReturnCount() > 0) {
539     // Collect the projections that represent multiple outputs from this call.
540     if (buffer->descriptor->ReturnCount() == 1) {
541       buffer->output_nodes.push_back(call);
542     } else {
543       buffer->output_nodes.resize(buffer->descriptor->ReturnCount(), nullptr);
544       for (auto use : call->uses()) {
545         if (use->opcode() != IrOpcode::kProjection) continue;
546         size_t const index = ProjectionIndexOf(use->op());
547         DCHECK_LT(index, buffer->output_nodes.size());
548         DCHECK(!buffer->output_nodes[index]);
549         buffer->output_nodes[index] = use;
550       }
551     }
552 
553     // Filter out the outputs that aren't live because no projection uses them.
554     size_t outputs_needed_by_framestate =
555         buffer->frame_state_descriptor == nullptr
556             ? 0
557             : buffer->frame_state_descriptor->state_combine()
558                   .ConsumedOutputCount();
559     for (size_t i = 0; i < buffer->output_nodes.size(); i++) {
560       bool output_is_live = buffer->output_nodes[i] != nullptr ||
561                             i < outputs_needed_by_framestate;
562       if (output_is_live) {
563         MachineType type =
564             buffer->descriptor->GetReturnType(static_cast<int>(i));
565         LinkageLocation location =
566             buffer->descriptor->GetReturnLocation(static_cast<int>(i));
567 
568         Node* output = buffer->output_nodes[i];
569         InstructionOperand op =
570             output == nullptr
571                 ? g.TempLocation(location, type.representation())
572                 : g.DefineAsLocation(output, location, type.representation());
573         MarkAsRepresentation(type.representation(), op);
574 
575         buffer->outputs.push_back(op);
576       }
577     }
578   }
579 
580   // The first argument is always the callee code.
581   Node* callee = call->InputAt(0);
582   bool call_code_immediate = (flags & kCallCodeImmediate) != 0;
583   bool call_address_immediate = (flags & kCallAddressImmediate) != 0;
584   switch (buffer->descriptor->kind()) {
585     case CallDescriptor::kCallCodeObject:
586       buffer->instruction_args.push_back(
587           (call_code_immediate && callee->opcode() == IrOpcode::kHeapConstant)
588               ? g.UseImmediate(callee)
589               : g.UseRegister(callee));
590       break;
591     case CallDescriptor::kCallAddress:
592       buffer->instruction_args.push_back(
593           (call_address_immediate &&
594            callee->opcode() == IrOpcode::kExternalConstant)
595               ? g.UseImmediate(callee)
596               : g.UseRegister(callee));
597       break;
598     case CallDescriptor::kCallJSFunction:
599       buffer->instruction_args.push_back(
600           g.UseLocation(callee, buffer->descriptor->GetInputLocation(0),
601                         buffer->descriptor->GetInputType(0).representation()));
602       break;
603   }
604   DCHECK_EQ(1u, buffer->instruction_args.size());
605 
606   // If the call needs a frame state, we insert the state information as
607   // follows (n is the number of value inputs to the frame state):
608   // arg 1               : deoptimization id.
609   // arg 2 - arg (n + 1) : value inputs to the frame state.
610   size_t frame_state_entries = 0;
611   USE(frame_state_entries);  // frame_state_entries is only used for debug.
612   if (buffer->frame_state_descriptor != nullptr) {
613     Node* frame_state =
614         call->InputAt(static_cast<int>(buffer->descriptor->InputCount()));
615 
616     // If it was a syntactic tail call we need to drop the current frame and
617     // all the frames on top of it that are either an arguments adaptor frame
618     // or a tail caller frame.
619     if (buffer->descriptor->SupportsTailCalls()) {
620       frame_state = NodeProperties::GetFrameStateInput(frame_state, 0);
621       buffer->frame_state_descriptor =
622           buffer->frame_state_descriptor->outer_state();
623       while (buffer->frame_state_descriptor != nullptr &&
624              (buffer->frame_state_descriptor->type() ==
625                   FrameStateType::kArgumentsAdaptor ||
626               buffer->frame_state_descriptor->type() ==
627                   FrameStateType::kTailCallerFunction)) {
628         frame_state = NodeProperties::GetFrameStateInput(frame_state, 0);
629         buffer->frame_state_descriptor =
630             buffer->frame_state_descriptor->outer_state();
631       }
632     }
633 
634     InstructionSequence::StateId state_id =
635         sequence()->AddFrameStateDescriptor(buffer->frame_state_descriptor);
636     buffer->instruction_args.push_back(g.TempImmediate(state_id.ToInt()));
637 
638     StateObjectDeduplicator deduplicator(instruction_zone());
639 
640     frame_state_entries =
641         1 + AddInputsToFrameStateDescriptor(
642                 buffer->frame_state_descriptor, frame_state, &g, &deduplicator,
643                 &buffer->instruction_args, FrameStateInputKind::kStackSlot,
644                 instruction_zone());
645 
646     DCHECK_EQ(1 + frame_state_entries, buffer->instruction_args.size());
647   }
648 
649   size_t input_count = static_cast<size_t>(buffer->input_count());
650 
651   // Split the arguments into pushed_nodes and instruction_args. Pushed
652   // arguments require an explicit push instruction before the call and do
653   // not appear as arguments to the call. Everything else ends up
654   // as an InstructionOperand argument to the call.
655   auto iter(call->inputs().begin());
656   size_t pushed_count = 0;
657   bool call_tail = (flags & kCallTail) != 0;
658   for (size_t index = 0; index < input_count; ++iter, ++index) {
659     DCHECK(iter != call->inputs().end());
660     DCHECK((*iter)->op()->opcode() != IrOpcode::kFrameState);
661     if (index == 0) continue;  // The first argument (callee) is already done.
662 
663     LinkageLocation location = buffer->descriptor->GetInputLocation(index);
664     if (call_tail) {
665       location = LinkageLocation::ConvertToTailCallerLocation(
666           location, stack_param_delta);
667     }
668     InstructionOperand op =
669         g.UseLocation(*iter, location,
670                       buffer->descriptor->GetInputType(index).representation());
671     if (UnallocatedOperand::cast(op).HasFixedSlotPolicy() && !call_tail) {
672       int stack_index = -UnallocatedOperand::cast(op).fixed_slot_index() - 1;
673       if (static_cast<size_t>(stack_index) >= buffer->pushed_nodes.size()) {
674         buffer->pushed_nodes.resize(stack_index + 1);
675       }
676       PushParameter parameter(*iter, buffer->descriptor->GetInputType(index));
677       buffer->pushed_nodes[stack_index] = parameter;
678       pushed_count++;
679     } else {
680       buffer->instruction_args.push_back(op);
681     }
682   }
683   DCHECK_EQ(input_count, buffer->instruction_args.size() + pushed_count -
684                              frame_state_entries);
685   if (V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK && call_tail &&
686       stack_param_delta != 0) {
687     // For tail calls that change the size of their parameter list and keep
688     // their return address on the stack, move the return address to just above
689     // the parameters.
690     LinkageLocation saved_return_location =
691         LinkageLocation::ForSavedCallerReturnAddress();
692     InstructionOperand return_address =
693         g.UsePointerLocation(LinkageLocation::ConvertToTailCallerLocation(
694                                  saved_return_location, stack_param_delta),
695                              saved_return_location);
696     buffer->instruction_args.push_back(return_address);
697   }
698 }
699 
700 
VisitBlock(BasicBlock * block)701 void InstructionSelector::VisitBlock(BasicBlock* block) {
702   DCHECK(!current_block_);
703   current_block_ = block;
704   int current_block_end = static_cast<int>(instructions_.size());
705 
706   int effect_level = 0;
707   for (Node* const node : *block) {
708     if (node->opcode() == IrOpcode::kStore ||
709         node->opcode() == IrOpcode::kCheckedStore ||
710         node->opcode() == IrOpcode::kCall) {
711       ++effect_level;
712     }
713     SetEffectLevel(node, effect_level);
714   }
715 
716   // We visit the control first, then the nodes in the block, so the block's
717   // control input should be on the same effect level as the last node.
718   if (block->control_input() != nullptr) {
719     SetEffectLevel(block->control_input(), effect_level);
720   }
721 
722   // Generate code for the block control "top down", but schedule the code
723   // "bottom up".
724   VisitControl(block);
725   std::reverse(instructions_.begin() + current_block_end, instructions_.end());
726 
727   // Visit code in reverse control flow order, because architecture-specific
728   // matching may cover more than one node at a time.
729   for (auto node : base::Reversed(*block)) {
730     // Skip nodes that are unused or already defined.
731     if (!IsUsed(node) || IsDefined(node)) continue;
732     // Generate code for this node "top down", but schedule the code "bottom
733     // up".
734     size_t current_node_end = instructions_.size();
735     VisitNode(node);
736     std::reverse(instructions_.begin() + current_node_end, instructions_.end());
737     if (instructions_.size() == current_node_end) continue;
738     // Mark source position on first instruction emitted.
739     SourcePosition source_position = source_positions_->GetSourcePosition(node);
740     if (source_position.IsKnown() &&
741         (source_position_mode_ == kAllSourcePositions ||
742          node->opcode() == IrOpcode::kCall)) {
743       sequence()->SetSourcePosition(instructions_[current_node_end],
744                                     source_position);
745     }
746   }
747 
748   // We're done with the block.
749   InstructionBlock* instruction_block =
750       sequence()->InstructionBlockAt(RpoNumber::FromInt(block->rpo_number()));
751   instruction_block->set_code_start(static_cast<int>(instructions_.size()));
752   instruction_block->set_code_end(current_block_end);
753 
754   current_block_ = nullptr;
755 }
756 
757 
VisitControl(BasicBlock * block)758 void InstructionSelector::VisitControl(BasicBlock* block) {
759 #ifdef DEBUG
760   // SSA deconstruction requires targets of branches not to have phis.
761   // Edge split form guarantees this property, but is more strict.
762   if (block->SuccessorCount() > 1) {
763     for (BasicBlock* const successor : block->successors()) {
764       for (Node* const node : *successor) {
765         CHECK(!IrOpcode::IsPhiOpcode(node->opcode()));
766       }
767     }
768   }
769 #endif
770 
771   Node* input = block->control_input();
772   switch (block->control()) {
773     case BasicBlock::kGoto:
774       return VisitGoto(block->SuccessorAt(0));
775     case BasicBlock::kCall: {
776       DCHECK_EQ(IrOpcode::kCall, input->opcode());
777       BasicBlock* success = block->SuccessorAt(0);
778       BasicBlock* exception = block->SuccessorAt(1);
779       return VisitCall(input, exception), VisitGoto(success);
780     }
781     case BasicBlock::kTailCall: {
782       DCHECK_EQ(IrOpcode::kTailCall, input->opcode());
783       return VisitTailCall(input);
784     }
785     case BasicBlock::kBranch: {
786       DCHECK_EQ(IrOpcode::kBranch, input->opcode());
787       BasicBlock* tbranch = block->SuccessorAt(0);
788       BasicBlock* fbranch = block->SuccessorAt(1);
789       if (tbranch == fbranch) return VisitGoto(tbranch);
790       return VisitBranch(input, tbranch, fbranch);
791     }
792     case BasicBlock::kSwitch: {
793       DCHECK_EQ(IrOpcode::kSwitch, input->opcode());
794       SwitchInfo sw;
795       // Last successor must be Default.
796       sw.default_branch = block->successors().back();
797       DCHECK_EQ(IrOpcode::kIfDefault, sw.default_branch->front()->opcode());
798       // All other successors must be cases.
799       sw.case_count = block->SuccessorCount() - 1;
800       sw.case_branches = &block->successors().front();
801       // Determine case values and their min/max.
802       sw.case_values = zone()->NewArray<int32_t>(sw.case_count);
803       sw.min_value = std::numeric_limits<int32_t>::max();
804       sw.max_value = std::numeric_limits<int32_t>::min();
805       for (size_t index = 0; index < sw.case_count; ++index) {
806         BasicBlock* branch = sw.case_branches[index];
807         int32_t value = OpParameter<int32_t>(branch->front()->op());
808         sw.case_values[index] = value;
809         if (sw.min_value > value) sw.min_value = value;
810         if (sw.max_value < value) sw.max_value = value;
811       }
812       DCHECK_LE(sw.min_value, sw.max_value);
813       // Note that {value_range} can be 0 if {min_value} is -2^31 and
814       // {max_value}
815       // is 2^31-1, so don't assume that it's non-zero below.
816       sw.value_range = 1u + bit_cast<uint32_t>(sw.max_value) -
817                        bit_cast<uint32_t>(sw.min_value);
818       return VisitSwitch(input, sw);
819     }
820     case BasicBlock::kReturn: {
821       DCHECK_EQ(IrOpcode::kReturn, input->opcode());
822       return VisitReturn(input);
823     }
824     case BasicBlock::kDeoptimize: {
825       DeoptimizeKind kind = DeoptimizeKindOf(input->op());
826       Node* value = input->InputAt(0);
827       return VisitDeoptimize(kind, value);
828     }
829     case BasicBlock::kThrow:
830       DCHECK_EQ(IrOpcode::kThrow, input->opcode());
831       return VisitThrow(input->InputAt(0));
832     case BasicBlock::kNone: {
833       // Exit block doesn't have control.
834       DCHECK_NULL(input);
835       break;
836     }
837     default:
838       UNREACHABLE();
839       break;
840   }
841 }
842 
843 
VisitNode(Node * node)844 void InstructionSelector::VisitNode(Node* node) {
845   DCHECK_NOT_NULL(schedule()->block(node));  // should only use scheduled nodes.
846   switch (node->opcode()) {
847     case IrOpcode::kStart:
848     case IrOpcode::kLoop:
849     case IrOpcode::kEnd:
850     case IrOpcode::kBranch:
851     case IrOpcode::kIfTrue:
852     case IrOpcode::kIfFalse:
853     case IrOpcode::kIfSuccess:
854     case IrOpcode::kSwitch:
855     case IrOpcode::kIfValue:
856     case IrOpcode::kIfDefault:
857     case IrOpcode::kEffectPhi:
858     case IrOpcode::kMerge:
859     case IrOpcode::kTerminate:
860     case IrOpcode::kBeginRegion:
861       // No code needed for these graph artifacts.
862       return;
863     case IrOpcode::kIfException:
864       return MarkAsReference(node), VisitIfException(node);
865     case IrOpcode::kFinishRegion:
866       return MarkAsReference(node), VisitFinishRegion(node);
867     case IrOpcode::kParameter: {
868       MachineType type =
869           linkage()->GetParameterType(ParameterIndexOf(node->op()));
870       MarkAsRepresentation(type.representation(), node);
871       return VisitParameter(node);
872     }
873     case IrOpcode::kOsrValue:
874       return MarkAsReference(node), VisitOsrValue(node);
875     case IrOpcode::kPhi: {
876       MachineRepresentation rep = PhiRepresentationOf(node->op());
877       MarkAsRepresentation(rep, node);
878       return VisitPhi(node);
879     }
880     case IrOpcode::kProjection:
881       return VisitProjection(node);
882     case IrOpcode::kInt32Constant:
883     case IrOpcode::kInt64Constant:
884     case IrOpcode::kExternalConstant:
885     case IrOpcode::kRelocatableInt32Constant:
886     case IrOpcode::kRelocatableInt64Constant:
887       return VisitConstant(node);
888     case IrOpcode::kFloat32Constant:
889       return MarkAsFloat32(node), VisitConstant(node);
890     case IrOpcode::kFloat64Constant:
891       return MarkAsFloat64(node), VisitConstant(node);
892     case IrOpcode::kHeapConstant:
893       return MarkAsReference(node), VisitConstant(node);
894     case IrOpcode::kNumberConstant: {
895       double value = OpParameter<double>(node);
896       if (!IsSmiDouble(value)) MarkAsReference(node);
897       return VisitConstant(node);
898     }
899     case IrOpcode::kCall:
900       return VisitCall(node);
901     case IrOpcode::kDeoptimizeIf:
902       return VisitDeoptimizeIf(node);
903     case IrOpcode::kDeoptimizeUnless:
904       return VisitDeoptimizeUnless(node);
905     case IrOpcode::kFrameState:
906     case IrOpcode::kStateValues:
907     case IrOpcode::kObjectState:
908       return;
909     case IrOpcode::kDebugBreak:
910       VisitDebugBreak(node);
911       return;
912     case IrOpcode::kComment:
913       VisitComment(node);
914       return;
915     case IrOpcode::kLoad: {
916       LoadRepresentation type = LoadRepresentationOf(node->op());
917       MarkAsRepresentation(type.representation(), node);
918       return VisitLoad(node);
919     }
920     case IrOpcode::kStore:
921       return VisitStore(node);
922     case IrOpcode::kWord32And:
923       return MarkAsWord32(node), VisitWord32And(node);
924     case IrOpcode::kWord32Or:
925       return MarkAsWord32(node), VisitWord32Or(node);
926     case IrOpcode::kWord32Xor:
927       return MarkAsWord32(node), VisitWord32Xor(node);
928     case IrOpcode::kWord32Shl:
929       return MarkAsWord32(node), VisitWord32Shl(node);
930     case IrOpcode::kWord32Shr:
931       return MarkAsWord32(node), VisitWord32Shr(node);
932     case IrOpcode::kWord32Sar:
933       return MarkAsWord32(node), VisitWord32Sar(node);
934     case IrOpcode::kWord32Ror:
935       return MarkAsWord32(node), VisitWord32Ror(node);
936     case IrOpcode::kWord32Equal:
937       return VisitWord32Equal(node);
938     case IrOpcode::kWord32Clz:
939       return MarkAsWord32(node), VisitWord32Clz(node);
940     case IrOpcode::kWord32Ctz:
941       return MarkAsWord32(node), VisitWord32Ctz(node);
942     case IrOpcode::kWord32ReverseBits:
943       return MarkAsWord32(node), VisitWord32ReverseBits(node);
944     case IrOpcode::kWord32Popcnt:
945       return MarkAsWord32(node), VisitWord32Popcnt(node);
946     case IrOpcode::kWord64Popcnt:
947       return MarkAsWord32(node), VisitWord64Popcnt(node);
948     case IrOpcode::kWord64And:
949       return MarkAsWord64(node), VisitWord64And(node);
950     case IrOpcode::kWord64Or:
951       return MarkAsWord64(node), VisitWord64Or(node);
952     case IrOpcode::kWord64Xor:
953       return MarkAsWord64(node), VisitWord64Xor(node);
954     case IrOpcode::kWord64Shl:
955       return MarkAsWord64(node), VisitWord64Shl(node);
956     case IrOpcode::kWord64Shr:
957       return MarkAsWord64(node), VisitWord64Shr(node);
958     case IrOpcode::kWord64Sar:
959       return MarkAsWord64(node), VisitWord64Sar(node);
960     case IrOpcode::kWord64Ror:
961       return MarkAsWord64(node), VisitWord64Ror(node);
962     case IrOpcode::kWord64Clz:
963       return MarkAsWord64(node), VisitWord64Clz(node);
964     case IrOpcode::kWord64Ctz:
965       return MarkAsWord64(node), VisitWord64Ctz(node);
966     case IrOpcode::kWord64ReverseBits:
967       return MarkAsWord64(node), VisitWord64ReverseBits(node);
968     case IrOpcode::kWord64Equal:
969       return VisitWord64Equal(node);
970     case IrOpcode::kInt32Add:
971       return MarkAsWord32(node), VisitInt32Add(node);
972     case IrOpcode::kInt32AddWithOverflow:
973       return MarkAsWord32(node), VisitInt32AddWithOverflow(node);
974     case IrOpcode::kInt32Sub:
975       return MarkAsWord32(node), VisitInt32Sub(node);
976     case IrOpcode::kInt32SubWithOverflow:
977       return VisitInt32SubWithOverflow(node);
978     case IrOpcode::kInt32Mul:
979       return MarkAsWord32(node), VisitInt32Mul(node);
980     case IrOpcode::kInt32MulHigh:
981       return VisitInt32MulHigh(node);
982     case IrOpcode::kInt32Div:
983       return MarkAsWord32(node), VisitInt32Div(node);
984     case IrOpcode::kInt32Mod:
985       return MarkAsWord32(node), VisitInt32Mod(node);
986     case IrOpcode::kInt32LessThan:
987       return VisitInt32LessThan(node);
988     case IrOpcode::kInt32LessThanOrEqual:
989       return VisitInt32LessThanOrEqual(node);
990     case IrOpcode::kUint32Div:
991       return MarkAsWord32(node), VisitUint32Div(node);
992     case IrOpcode::kUint32LessThan:
993       return VisitUint32LessThan(node);
994     case IrOpcode::kUint32LessThanOrEqual:
995       return VisitUint32LessThanOrEqual(node);
996     case IrOpcode::kUint32Mod:
997       return MarkAsWord32(node), VisitUint32Mod(node);
998     case IrOpcode::kUint32MulHigh:
999       return VisitUint32MulHigh(node);
1000     case IrOpcode::kInt64Add:
1001       return MarkAsWord64(node), VisitInt64Add(node);
1002     case IrOpcode::kInt64AddWithOverflow:
1003       return MarkAsWord64(node), VisitInt64AddWithOverflow(node);
1004     case IrOpcode::kInt64Sub:
1005       return MarkAsWord64(node), VisitInt64Sub(node);
1006     case IrOpcode::kInt64SubWithOverflow:
1007       return MarkAsWord64(node), VisitInt64SubWithOverflow(node);
1008     case IrOpcode::kInt64Mul:
1009       return MarkAsWord64(node), VisitInt64Mul(node);
1010     case IrOpcode::kInt64Div:
1011       return MarkAsWord64(node), VisitInt64Div(node);
1012     case IrOpcode::kInt64Mod:
1013       return MarkAsWord64(node), VisitInt64Mod(node);
1014     case IrOpcode::kInt64LessThan:
1015       return VisitInt64LessThan(node);
1016     case IrOpcode::kInt64LessThanOrEqual:
1017       return VisitInt64LessThanOrEqual(node);
1018     case IrOpcode::kUint64Div:
1019       return MarkAsWord64(node), VisitUint64Div(node);
1020     case IrOpcode::kUint64LessThan:
1021       return VisitUint64LessThan(node);
1022     case IrOpcode::kUint64LessThanOrEqual:
1023       return VisitUint64LessThanOrEqual(node);
1024     case IrOpcode::kUint64Mod:
1025       return MarkAsWord64(node), VisitUint64Mod(node);
1026     case IrOpcode::kBitcastWordToTagged:
1027       return MarkAsReference(node), VisitBitcastWordToTagged(node);
1028     case IrOpcode::kChangeFloat32ToFloat64:
1029       return MarkAsFloat64(node), VisitChangeFloat32ToFloat64(node);
1030     case IrOpcode::kChangeInt32ToFloat64:
1031       return MarkAsFloat64(node), VisitChangeInt32ToFloat64(node);
1032     case IrOpcode::kChangeUint32ToFloat64:
1033       return MarkAsFloat64(node), VisitChangeUint32ToFloat64(node);
1034     case IrOpcode::kChangeFloat64ToInt32:
1035       return MarkAsWord32(node), VisitChangeFloat64ToInt32(node);
1036     case IrOpcode::kChangeFloat64ToUint32:
1037       return MarkAsWord32(node), VisitChangeFloat64ToUint32(node);
1038     case IrOpcode::kFloat64SilenceNaN:
1039       MarkAsFloat64(node);
1040       if (CanProduceSignalingNaN(node->InputAt(0))) {
1041         return VisitFloat64SilenceNaN(node);
1042       } else {
1043         return EmitIdentity(node);
1044       }
1045     case IrOpcode::kTruncateFloat64ToUint32:
1046       return MarkAsWord32(node), VisitTruncateFloat64ToUint32(node);
1047     case IrOpcode::kTruncateFloat32ToInt32:
1048       return MarkAsWord32(node), VisitTruncateFloat32ToInt32(node);
1049     case IrOpcode::kTruncateFloat32ToUint32:
1050       return MarkAsWord32(node), VisitTruncateFloat32ToUint32(node);
1051     case IrOpcode::kTryTruncateFloat32ToInt64:
1052       return MarkAsWord64(node), VisitTryTruncateFloat32ToInt64(node);
1053     case IrOpcode::kTryTruncateFloat64ToInt64:
1054       return MarkAsWord64(node), VisitTryTruncateFloat64ToInt64(node);
1055     case IrOpcode::kTryTruncateFloat32ToUint64:
1056       return MarkAsWord64(node), VisitTryTruncateFloat32ToUint64(node);
1057     case IrOpcode::kTryTruncateFloat64ToUint64:
1058       return MarkAsWord64(node), VisitTryTruncateFloat64ToUint64(node);
1059     case IrOpcode::kChangeInt32ToInt64:
1060       return MarkAsWord64(node), VisitChangeInt32ToInt64(node);
1061     case IrOpcode::kChangeUint32ToUint64:
1062       return MarkAsWord64(node), VisitChangeUint32ToUint64(node);
1063     case IrOpcode::kTruncateFloat64ToFloat32:
1064       return MarkAsFloat32(node), VisitTruncateFloat64ToFloat32(node);
1065     case IrOpcode::kTruncateFloat64ToWord32:
1066       return MarkAsWord32(node), VisitTruncateFloat64ToWord32(node);
1067     case IrOpcode::kTruncateInt64ToInt32:
1068       return MarkAsWord32(node), VisitTruncateInt64ToInt32(node);
1069     case IrOpcode::kRoundFloat64ToInt32:
1070       return MarkAsWord32(node), VisitRoundFloat64ToInt32(node);
1071     case IrOpcode::kRoundInt64ToFloat32:
1072       return MarkAsFloat32(node), VisitRoundInt64ToFloat32(node);
1073     case IrOpcode::kRoundInt32ToFloat32:
1074       return MarkAsFloat32(node), VisitRoundInt32ToFloat32(node);
1075     case IrOpcode::kRoundInt64ToFloat64:
1076       return MarkAsFloat64(node), VisitRoundInt64ToFloat64(node);
1077     case IrOpcode::kBitcastFloat32ToInt32:
1078       return MarkAsWord32(node), VisitBitcastFloat32ToInt32(node);
1079     case IrOpcode::kRoundUint32ToFloat32:
1080       return MarkAsFloat32(node), VisitRoundUint32ToFloat32(node);
1081     case IrOpcode::kRoundUint64ToFloat32:
1082       return MarkAsFloat64(node), VisitRoundUint64ToFloat32(node);
1083     case IrOpcode::kRoundUint64ToFloat64:
1084       return MarkAsFloat64(node), VisitRoundUint64ToFloat64(node);
1085     case IrOpcode::kBitcastFloat64ToInt64:
1086       return MarkAsWord64(node), VisitBitcastFloat64ToInt64(node);
1087     case IrOpcode::kBitcastInt32ToFloat32:
1088       return MarkAsFloat32(node), VisitBitcastInt32ToFloat32(node);
1089     case IrOpcode::kBitcastInt64ToFloat64:
1090       return MarkAsFloat64(node), VisitBitcastInt64ToFloat64(node);
1091     case IrOpcode::kFloat32Add:
1092       return MarkAsFloat32(node), VisitFloat32Add(node);
1093     case IrOpcode::kFloat32Sub:
1094       return MarkAsFloat32(node), VisitFloat32Sub(node);
1095     case IrOpcode::kFloat32SubPreserveNan:
1096       return MarkAsFloat32(node), VisitFloat32SubPreserveNan(node);
1097     case IrOpcode::kFloat32Neg:
1098       return MarkAsFloat32(node), VisitFloat32Neg(node);
1099     case IrOpcode::kFloat32Mul:
1100       return MarkAsFloat32(node), VisitFloat32Mul(node);
1101     case IrOpcode::kFloat32Div:
1102       return MarkAsFloat32(node), VisitFloat32Div(node);
1103     case IrOpcode::kFloat32Min:
1104       return MarkAsFloat32(node), VisitFloat32Min(node);
1105     case IrOpcode::kFloat32Max:
1106       return MarkAsFloat32(node), VisitFloat32Max(node);
1107     case IrOpcode::kFloat32Abs:
1108       return MarkAsFloat32(node), VisitFloat32Abs(node);
1109     case IrOpcode::kFloat32Sqrt:
1110       return MarkAsFloat32(node), VisitFloat32Sqrt(node);
1111     case IrOpcode::kFloat32Equal:
1112       return VisitFloat32Equal(node);
1113     case IrOpcode::kFloat32LessThan:
1114       return VisitFloat32LessThan(node);
1115     case IrOpcode::kFloat32LessThanOrEqual:
1116       return VisitFloat32LessThanOrEqual(node);
1117     case IrOpcode::kFloat64Add:
1118       return MarkAsFloat64(node), VisitFloat64Add(node);
1119     case IrOpcode::kFloat64Sub:
1120       return MarkAsFloat64(node), VisitFloat64Sub(node);
1121     case IrOpcode::kFloat64SubPreserveNan:
1122       return MarkAsFloat64(node), VisitFloat64SubPreserveNan(node);
1123     case IrOpcode::kFloat64Neg:
1124       return MarkAsFloat64(node), VisitFloat64Neg(node);
1125     case IrOpcode::kFloat64Mul:
1126       return MarkAsFloat64(node), VisitFloat64Mul(node);
1127     case IrOpcode::kFloat64Div:
1128       return MarkAsFloat64(node), VisitFloat64Div(node);
1129     case IrOpcode::kFloat64Mod:
1130       return MarkAsFloat64(node), VisitFloat64Mod(node);
1131     case IrOpcode::kFloat64Min:
1132       return MarkAsFloat64(node), VisitFloat64Min(node);
1133     case IrOpcode::kFloat64Max:
1134       return MarkAsFloat64(node), VisitFloat64Max(node);
1135     case IrOpcode::kFloat64Abs:
1136       return MarkAsFloat64(node), VisitFloat64Abs(node);
1137     case IrOpcode::kFloat64Atan:
1138       return MarkAsFloat64(node), VisitFloat64Atan(node);
1139     case IrOpcode::kFloat64Atan2:
1140       return MarkAsFloat64(node), VisitFloat64Atan2(node);
1141     case IrOpcode::kFloat64Atanh:
1142       return MarkAsFloat64(node), VisitFloat64Atanh(node);
1143     case IrOpcode::kFloat64Cbrt:
1144       return MarkAsFloat64(node), VisitFloat64Cbrt(node);
1145     case IrOpcode::kFloat64Cos:
1146       return MarkAsFloat64(node), VisitFloat64Cos(node);
1147     case IrOpcode::kFloat64Exp:
1148       return MarkAsFloat64(node), VisitFloat64Exp(node);
1149     case IrOpcode::kFloat64Expm1:
1150       return MarkAsFloat64(node), VisitFloat64Expm1(node);
1151     case IrOpcode::kFloat64Log:
1152       return MarkAsFloat64(node), VisitFloat64Log(node);
1153     case IrOpcode::kFloat64Log1p:
1154       return MarkAsFloat64(node), VisitFloat64Log1p(node);
1155     case IrOpcode::kFloat64Log10:
1156       return MarkAsFloat64(node), VisitFloat64Log10(node);
1157     case IrOpcode::kFloat64Log2:
1158       return MarkAsFloat64(node), VisitFloat64Log2(node);
1159     case IrOpcode::kFloat64Sin:
1160       return MarkAsFloat64(node), VisitFloat64Sin(node);
1161     case IrOpcode::kFloat64Sqrt:
1162       return MarkAsFloat64(node), VisitFloat64Sqrt(node);
1163     case IrOpcode::kFloat64Tan:
1164       return MarkAsFloat64(node), VisitFloat64Tan(node);
1165     case IrOpcode::kFloat64Equal:
1166       return VisitFloat64Equal(node);
1167     case IrOpcode::kFloat64LessThan:
1168       return VisitFloat64LessThan(node);
1169     case IrOpcode::kFloat64LessThanOrEqual:
1170       return VisitFloat64LessThanOrEqual(node);
1171     case IrOpcode::kFloat32RoundDown:
1172       return MarkAsFloat32(node), VisitFloat32RoundDown(node);
1173     case IrOpcode::kFloat64RoundDown:
1174       return MarkAsFloat64(node), VisitFloat64RoundDown(node);
1175     case IrOpcode::kFloat32RoundUp:
1176       return MarkAsFloat32(node), VisitFloat32RoundUp(node);
1177     case IrOpcode::kFloat64RoundUp:
1178       return MarkAsFloat64(node), VisitFloat64RoundUp(node);
1179     case IrOpcode::kFloat32RoundTruncate:
1180       return MarkAsFloat32(node), VisitFloat32RoundTruncate(node);
1181     case IrOpcode::kFloat64RoundTruncate:
1182       return MarkAsFloat64(node), VisitFloat64RoundTruncate(node);
1183     case IrOpcode::kFloat64RoundTiesAway:
1184       return MarkAsFloat64(node), VisitFloat64RoundTiesAway(node);
1185     case IrOpcode::kFloat32RoundTiesEven:
1186       return MarkAsFloat32(node), VisitFloat32RoundTiesEven(node);
1187     case IrOpcode::kFloat64RoundTiesEven:
1188       return MarkAsFloat64(node), VisitFloat64RoundTiesEven(node);
1189     case IrOpcode::kFloat64ExtractLowWord32:
1190       return MarkAsWord32(node), VisitFloat64ExtractLowWord32(node);
1191     case IrOpcode::kFloat64ExtractHighWord32:
1192       return MarkAsWord32(node), VisitFloat64ExtractHighWord32(node);
1193     case IrOpcode::kFloat64InsertLowWord32:
1194       return MarkAsFloat64(node), VisitFloat64InsertLowWord32(node);
1195     case IrOpcode::kFloat64InsertHighWord32:
1196       return MarkAsFloat64(node), VisitFloat64InsertHighWord32(node);
1197     case IrOpcode::kStackSlot:
1198       return VisitStackSlot(node);
1199     case IrOpcode::kLoadStackPointer:
1200       return VisitLoadStackPointer(node);
1201     case IrOpcode::kLoadFramePointer:
1202       return VisitLoadFramePointer(node);
1203     case IrOpcode::kLoadParentFramePointer:
1204       return VisitLoadParentFramePointer(node);
1205     case IrOpcode::kCheckedLoad: {
1206       MachineRepresentation rep =
1207           CheckedLoadRepresentationOf(node->op()).representation();
1208       MarkAsRepresentation(rep, node);
1209       return VisitCheckedLoad(node);
1210     }
1211     case IrOpcode::kCheckedStore:
1212       return VisitCheckedStore(node);
1213     case IrOpcode::kInt32PairAdd:
1214       MarkAsWord32(NodeProperties::FindProjection(node, 0));
1215       MarkAsWord32(NodeProperties::FindProjection(node, 1));
1216       return VisitInt32PairAdd(node);
1217     case IrOpcode::kInt32PairSub:
1218       MarkAsWord32(NodeProperties::FindProjection(node, 0));
1219       MarkAsWord32(NodeProperties::FindProjection(node, 1));
1220       return VisitInt32PairSub(node);
1221     case IrOpcode::kInt32PairMul:
1222       MarkAsWord32(NodeProperties::FindProjection(node, 0));
1223       MarkAsWord32(NodeProperties::FindProjection(node, 1));
1224       return VisitInt32PairMul(node);
1225     case IrOpcode::kWord32PairShl:
1226       MarkAsWord32(NodeProperties::FindProjection(node, 0));
1227       MarkAsWord32(NodeProperties::FindProjection(node, 1));
1228       return VisitWord32PairShl(node);
1229     case IrOpcode::kWord32PairShr:
1230       MarkAsWord32(NodeProperties::FindProjection(node, 0));
1231       MarkAsWord32(NodeProperties::FindProjection(node, 1));
1232       return VisitWord32PairShr(node);
1233     case IrOpcode::kWord32PairSar:
1234       MarkAsWord32(NodeProperties::FindProjection(node, 0));
1235       MarkAsWord32(NodeProperties::FindProjection(node, 1));
1236       return VisitWord32PairSar(node);
1237     case IrOpcode::kAtomicLoad: {
1238       LoadRepresentation type = LoadRepresentationOf(node->op());
1239       MarkAsRepresentation(type.representation(), node);
1240       return VisitAtomicLoad(node);
1241     }
1242     case IrOpcode::kAtomicStore:
1243       return VisitAtomicStore(node);
1244     default:
1245       V8_Fatal(__FILE__, __LINE__, "Unexpected operator #%d:%s @ node #%d",
1246                node->opcode(), node->op()->mnemonic(), node->id());
1247       break;
1248   }
1249 }
1250 
1251 
VisitLoadStackPointer(Node * node)1252 void InstructionSelector::VisitLoadStackPointer(Node* node) {
1253   OperandGenerator g(this);
1254   Emit(kArchStackPointer, g.DefineAsRegister(node));
1255 }
1256 
1257 
VisitLoadFramePointer(Node * node)1258 void InstructionSelector::VisitLoadFramePointer(Node* node) {
1259   OperandGenerator g(this);
1260   Emit(kArchFramePointer, g.DefineAsRegister(node));
1261 }
1262 
VisitLoadParentFramePointer(Node * node)1263 void InstructionSelector::VisitLoadParentFramePointer(Node* node) {
1264   OperandGenerator g(this);
1265   Emit(kArchParentFramePointer, g.DefineAsRegister(node));
1266 }
1267 
VisitFloat64Atan(Node * node)1268 void InstructionSelector::VisitFloat64Atan(Node* node) {
1269   VisitFloat64Ieee754Unop(node, kIeee754Float64Atan);
1270 }
1271 
VisitFloat64Atan2(Node * node)1272 void InstructionSelector::VisitFloat64Atan2(Node* node) {
1273   VisitFloat64Ieee754Binop(node, kIeee754Float64Atan2);
1274 }
1275 
VisitFloat64Atanh(Node * node)1276 void InstructionSelector::VisitFloat64Atanh(Node* node) {
1277   VisitFloat64Ieee754Unop(node, kIeee754Float64Atanh);
1278 }
1279 
VisitFloat64Cbrt(Node * node)1280 void InstructionSelector::VisitFloat64Cbrt(Node* node) {
1281   VisitFloat64Ieee754Unop(node, kIeee754Float64Cbrt);
1282 }
1283 
VisitFloat64Cos(Node * node)1284 void InstructionSelector::VisitFloat64Cos(Node* node) {
1285   VisitFloat64Ieee754Unop(node, kIeee754Float64Cos);
1286 }
1287 
VisitFloat64Exp(Node * node)1288 void InstructionSelector::VisitFloat64Exp(Node* node) {
1289   VisitFloat64Ieee754Unop(node, kIeee754Float64Exp);
1290 }
1291 
VisitFloat64Expm1(Node * node)1292 void InstructionSelector::VisitFloat64Expm1(Node* node) {
1293   VisitFloat64Ieee754Unop(node, kIeee754Float64Expm1);
1294 }
1295 
VisitFloat64Log(Node * node)1296 void InstructionSelector::VisitFloat64Log(Node* node) {
1297   VisitFloat64Ieee754Unop(node, kIeee754Float64Log);
1298 }
1299 
VisitFloat64Log1p(Node * node)1300 void InstructionSelector::VisitFloat64Log1p(Node* node) {
1301   VisitFloat64Ieee754Unop(node, kIeee754Float64Log1p);
1302 }
1303 
VisitFloat64Log2(Node * node)1304 void InstructionSelector::VisitFloat64Log2(Node* node) {
1305   VisitFloat64Ieee754Unop(node, kIeee754Float64Log2);
1306 }
1307 
VisitFloat64Log10(Node * node)1308 void InstructionSelector::VisitFloat64Log10(Node* node) {
1309   VisitFloat64Ieee754Unop(node, kIeee754Float64Log10);
1310 }
1311 
VisitFloat64Sin(Node * node)1312 void InstructionSelector::VisitFloat64Sin(Node* node) {
1313   VisitFloat64Ieee754Unop(node, kIeee754Float64Sin);
1314 }
1315 
VisitFloat64Tan(Node * node)1316 void InstructionSelector::VisitFloat64Tan(Node* node) {
1317   VisitFloat64Ieee754Unop(node, kIeee754Float64Tan);
1318 }
1319 
EmitTableSwitch(const SwitchInfo & sw,InstructionOperand & index_operand)1320 void InstructionSelector::EmitTableSwitch(const SwitchInfo& sw,
1321                                           InstructionOperand& index_operand) {
1322   OperandGenerator g(this);
1323   size_t input_count = 2 + sw.value_range;
1324   auto* inputs = zone()->NewArray<InstructionOperand>(input_count);
1325   inputs[0] = index_operand;
1326   InstructionOperand default_operand = g.Label(sw.default_branch);
1327   std::fill(&inputs[1], &inputs[input_count], default_operand);
1328   for (size_t index = 0; index < sw.case_count; ++index) {
1329     size_t value = sw.case_values[index] - sw.min_value;
1330     BasicBlock* branch = sw.case_branches[index];
1331     DCHECK_LE(0u, value);
1332     DCHECK_LT(value + 2, input_count);
1333     inputs[value + 2] = g.Label(branch);
1334   }
1335   Emit(kArchTableSwitch, 0, nullptr, input_count, inputs, 0, nullptr);
1336 }
1337 
1338 
EmitLookupSwitch(const SwitchInfo & sw,InstructionOperand & value_operand)1339 void InstructionSelector::EmitLookupSwitch(const SwitchInfo& sw,
1340                                            InstructionOperand& value_operand) {
1341   OperandGenerator g(this);
1342   size_t input_count = 2 + sw.case_count * 2;
1343   auto* inputs = zone()->NewArray<InstructionOperand>(input_count);
1344   inputs[0] = value_operand;
1345   inputs[1] = g.Label(sw.default_branch);
1346   for (size_t index = 0; index < sw.case_count; ++index) {
1347     int32_t value = sw.case_values[index];
1348     BasicBlock* branch = sw.case_branches[index];
1349     inputs[index * 2 + 2 + 0] = g.TempImmediate(value);
1350     inputs[index * 2 + 2 + 1] = g.Label(branch);
1351   }
1352   Emit(kArchLookupSwitch, 0, nullptr, input_count, inputs, 0, nullptr);
1353 }
1354 
VisitStackSlot(Node * node)1355 void InstructionSelector::VisitStackSlot(Node* node) {
1356   int size = 1 << ElementSizeLog2Of(StackSlotRepresentationOf(node->op()));
1357   int slot = frame_->AllocateSpillSlot(size);
1358   OperandGenerator g(this);
1359 
1360   Emit(kArchStackSlot, g.DefineAsRegister(node),
1361        sequence()->AddImmediate(Constant(slot)), 0, nullptr);
1362 }
1363 
VisitBitcastWordToTagged(Node * node)1364 void InstructionSelector::VisitBitcastWordToTagged(Node* node) {
1365   EmitIdentity(node);
1366 }
1367 
1368 // 32 bit targets do not implement the following instructions.
1369 #if V8_TARGET_ARCH_32_BIT
1370 
VisitWord64And(Node * node)1371 void InstructionSelector::VisitWord64And(Node* node) { UNIMPLEMENTED(); }
1372 
1373 
VisitWord64Or(Node * node)1374 void InstructionSelector::VisitWord64Or(Node* node) { UNIMPLEMENTED(); }
1375 
1376 
VisitWord64Xor(Node * node)1377 void InstructionSelector::VisitWord64Xor(Node* node) { UNIMPLEMENTED(); }
1378 
1379 
VisitWord64Shl(Node * node)1380 void InstructionSelector::VisitWord64Shl(Node* node) { UNIMPLEMENTED(); }
1381 
1382 
VisitWord64Shr(Node * node)1383 void InstructionSelector::VisitWord64Shr(Node* node) { UNIMPLEMENTED(); }
1384 
1385 
VisitWord64Sar(Node * node)1386 void InstructionSelector::VisitWord64Sar(Node* node) { UNIMPLEMENTED(); }
1387 
1388 
VisitWord64Ror(Node * node)1389 void InstructionSelector::VisitWord64Ror(Node* node) { UNIMPLEMENTED(); }
1390 
1391 
VisitWord64Clz(Node * node)1392 void InstructionSelector::VisitWord64Clz(Node* node) { UNIMPLEMENTED(); }
1393 
1394 
VisitWord64Ctz(Node * node)1395 void InstructionSelector::VisitWord64Ctz(Node* node) { UNIMPLEMENTED(); }
1396 
1397 
VisitWord64ReverseBits(Node * node)1398 void InstructionSelector::VisitWord64ReverseBits(Node* node) {
1399   UNIMPLEMENTED();
1400 }
1401 
1402 
VisitWord64Popcnt(Node * node)1403 void InstructionSelector::VisitWord64Popcnt(Node* node) { UNIMPLEMENTED(); }
1404 
1405 
VisitWord64Equal(Node * node)1406 void InstructionSelector::VisitWord64Equal(Node* node) { UNIMPLEMENTED(); }
1407 
1408 
VisitInt64Add(Node * node)1409 void InstructionSelector::VisitInt64Add(Node* node) { UNIMPLEMENTED(); }
1410 
1411 
VisitInt64AddWithOverflow(Node * node)1412 void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
1413   UNIMPLEMENTED();
1414 }
1415 
1416 
VisitInt64Sub(Node * node)1417 void InstructionSelector::VisitInt64Sub(Node* node) { UNIMPLEMENTED(); }
1418 
1419 
VisitInt64SubWithOverflow(Node * node)1420 void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
1421   UNIMPLEMENTED();
1422 }
1423 
1424 
VisitInt64Mul(Node * node)1425 void InstructionSelector::VisitInt64Mul(Node* node) { UNIMPLEMENTED(); }
1426 
1427 
VisitInt64Div(Node * node)1428 void InstructionSelector::VisitInt64Div(Node* node) { UNIMPLEMENTED(); }
1429 
1430 
VisitInt64LessThan(Node * node)1431 void InstructionSelector::VisitInt64LessThan(Node* node) { UNIMPLEMENTED(); }
1432 
1433 
VisitInt64LessThanOrEqual(Node * node)1434 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
1435   UNIMPLEMENTED();
1436 }
1437 
1438 
VisitUint64Div(Node * node)1439 void InstructionSelector::VisitUint64Div(Node* node) { UNIMPLEMENTED(); }
1440 
1441 
VisitInt64Mod(Node * node)1442 void InstructionSelector::VisitInt64Mod(Node* node) { UNIMPLEMENTED(); }
1443 
1444 
VisitUint64LessThan(Node * node)1445 void InstructionSelector::VisitUint64LessThan(Node* node) { UNIMPLEMENTED(); }
1446 
1447 
VisitUint64LessThanOrEqual(Node * node)1448 void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
1449   UNIMPLEMENTED();
1450 }
1451 
1452 
VisitUint64Mod(Node * node)1453 void InstructionSelector::VisitUint64Mod(Node* node) { UNIMPLEMENTED(); }
1454 
1455 
VisitChangeInt32ToInt64(Node * node)1456 void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
1457   UNIMPLEMENTED();
1458 }
1459 
1460 
VisitChangeUint32ToUint64(Node * node)1461 void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1462   UNIMPLEMENTED();
1463 }
1464 
1465 
VisitTryTruncateFloat32ToInt64(Node * node)1466 void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
1467   UNIMPLEMENTED();
1468 }
1469 
1470 
VisitTryTruncateFloat64ToInt64(Node * node)1471 void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1472   UNIMPLEMENTED();
1473 }
1474 
1475 
VisitTryTruncateFloat32ToUint64(Node * node)1476 void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1477   UNIMPLEMENTED();
1478 }
1479 
1480 
VisitTryTruncateFloat64ToUint64(Node * node)1481 void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1482   UNIMPLEMENTED();
1483 }
1484 
1485 
VisitTruncateInt64ToInt32(Node * node)1486 void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1487   UNIMPLEMENTED();
1488 }
1489 
1490 
VisitRoundInt64ToFloat32(Node * node)1491 void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1492   UNIMPLEMENTED();
1493 }
1494 
1495 
VisitRoundInt64ToFloat64(Node * node)1496 void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1497   UNIMPLEMENTED();
1498 }
1499 
1500 
VisitRoundUint64ToFloat32(Node * node)1501 void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1502   UNIMPLEMENTED();
1503 }
1504 
1505 
VisitRoundUint64ToFloat64(Node * node)1506 void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1507   UNIMPLEMENTED();
1508 }
1509 
1510 
VisitBitcastFloat64ToInt64(Node * node)1511 void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1512   UNIMPLEMENTED();
1513 }
1514 
1515 
VisitBitcastInt64ToFloat64(Node * node)1516 void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1517   UNIMPLEMENTED();
1518 }
1519 
1520 #endif  // V8_TARGET_ARCH_32_BIT
1521 
1522 // 64 bit targets do not implement the following instructions.
1523 #if V8_TARGET_ARCH_64_BIT
VisitInt32PairAdd(Node * node)1524 void InstructionSelector::VisitInt32PairAdd(Node* node) { UNIMPLEMENTED(); }
1525 
VisitInt32PairSub(Node * node)1526 void InstructionSelector::VisitInt32PairSub(Node* node) { UNIMPLEMENTED(); }
1527 
VisitInt32PairMul(Node * node)1528 void InstructionSelector::VisitInt32PairMul(Node* node) { UNIMPLEMENTED(); }
1529 
VisitWord32PairShl(Node * node)1530 void InstructionSelector::VisitWord32PairShl(Node* node) { UNIMPLEMENTED(); }
1531 
VisitWord32PairShr(Node * node)1532 void InstructionSelector::VisitWord32PairShr(Node* node) { UNIMPLEMENTED(); }
1533 
VisitWord32PairSar(Node * node)1534 void InstructionSelector::VisitWord32PairSar(Node* node) { UNIMPLEMENTED(); }
1535 #endif  // V8_TARGET_ARCH_64_BIT
1536 
VisitFinishRegion(Node * node)1537 void InstructionSelector::VisitFinishRegion(Node* node) { EmitIdentity(node); }
1538 
VisitParameter(Node * node)1539 void InstructionSelector::VisitParameter(Node* node) {
1540   OperandGenerator g(this);
1541   int index = ParameterIndexOf(node->op());
1542   InstructionOperand op =
1543       linkage()->ParameterHasSecondaryLocation(index)
1544           ? g.DefineAsDualLocation(
1545                 node, linkage()->GetParameterLocation(index),
1546                 linkage()->GetParameterSecondaryLocation(index))
1547           : g.DefineAsLocation(
1548                 node, linkage()->GetParameterLocation(index),
1549                 linkage()->GetParameterType(index).representation());
1550 
1551   Emit(kArchNop, op);
1552 }
1553 
1554 
VisitIfException(Node * node)1555 void InstructionSelector::VisitIfException(Node* node) {
1556   OperandGenerator g(this);
1557   Node* call = node->InputAt(1);
1558   DCHECK_EQ(IrOpcode::kCall, call->opcode());
1559   const CallDescriptor* descriptor = CallDescriptorOf(call->op());
1560   Emit(kArchNop,
1561        g.DefineAsLocation(node, descriptor->GetReturnLocation(0),
1562                           descriptor->GetReturnType(0).representation()));
1563 }
1564 
1565 
VisitOsrValue(Node * node)1566 void InstructionSelector::VisitOsrValue(Node* node) {
1567   OperandGenerator g(this);
1568   int index = OpParameter<int>(node);
1569   Emit(kArchNop, g.DefineAsLocation(node, linkage()->GetOsrValueLocation(index),
1570                                     MachineRepresentation::kTagged));
1571 }
1572 
1573 
VisitPhi(Node * node)1574 void InstructionSelector::VisitPhi(Node* node) {
1575   const int input_count = node->op()->ValueInputCount();
1576   PhiInstruction* phi = new (instruction_zone())
1577       PhiInstruction(instruction_zone(), GetVirtualRegister(node),
1578                      static_cast<size_t>(input_count));
1579   sequence()
1580       ->InstructionBlockAt(RpoNumber::FromInt(current_block_->rpo_number()))
1581       ->AddPhi(phi);
1582   for (int i = 0; i < input_count; ++i) {
1583     Node* const input = node->InputAt(i);
1584     MarkAsUsed(input);
1585     phi->SetInput(static_cast<size_t>(i), GetVirtualRegister(input));
1586   }
1587 }
1588 
1589 
VisitProjection(Node * node)1590 void InstructionSelector::VisitProjection(Node* node) {
1591   OperandGenerator g(this);
1592   Node* value = node->InputAt(0);
1593   switch (value->opcode()) {
1594     case IrOpcode::kInt32AddWithOverflow:
1595     case IrOpcode::kInt32SubWithOverflow:
1596     case IrOpcode::kInt64AddWithOverflow:
1597     case IrOpcode::kInt64SubWithOverflow:
1598     case IrOpcode::kTryTruncateFloat32ToInt64:
1599     case IrOpcode::kTryTruncateFloat64ToInt64:
1600     case IrOpcode::kTryTruncateFloat32ToUint64:
1601     case IrOpcode::kTryTruncateFloat64ToUint64:
1602     case IrOpcode::kInt32PairAdd:
1603     case IrOpcode::kInt32PairSub:
1604     case IrOpcode::kInt32PairMul:
1605     case IrOpcode::kWord32PairShl:
1606     case IrOpcode::kWord32PairShr:
1607     case IrOpcode::kWord32PairSar:
1608       if (ProjectionIndexOf(node->op()) == 0u) {
1609         Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
1610       } else {
1611         DCHECK(ProjectionIndexOf(node->op()) == 1u);
1612         MarkAsUsed(value);
1613       }
1614       break;
1615     default:
1616       break;
1617   }
1618 }
1619 
1620 
VisitConstant(Node * node)1621 void InstructionSelector::VisitConstant(Node* node) {
1622   // We must emit a NOP here because every live range needs a defining
1623   // instruction in the register allocator.
1624   OperandGenerator g(this);
1625   Emit(kArchNop, g.DefineAsConstant(node));
1626 }
1627 
1628 
VisitCall(Node * node,BasicBlock * handler)1629 void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
1630   OperandGenerator g(this);
1631   const CallDescriptor* descriptor = CallDescriptorOf(node->op());
1632 
1633   FrameStateDescriptor* frame_state_descriptor = nullptr;
1634   if (descriptor->NeedsFrameState()) {
1635     frame_state_descriptor = GetFrameStateDescriptor(
1636         node->InputAt(static_cast<int>(descriptor->InputCount())));
1637   }
1638 
1639   CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
1640 
1641   // Compute InstructionOperands for inputs and outputs.
1642   // TODO(turbofan): on some architectures it's probably better to use
1643   // the code object in a register if there are multiple uses of it.
1644   // Improve constant pool and the heuristics in the register allocator
1645   // for where to emit constants.
1646   CallBufferFlags call_buffer_flags(kCallCodeImmediate | kCallAddressImmediate);
1647   InitializeCallBuffer(node, &buffer, call_buffer_flags);
1648 
1649   EmitPrepareArguments(&(buffer.pushed_nodes), descriptor, node);
1650 
1651   // Pass label of exception handler block.
1652   CallDescriptor::Flags flags = descriptor->flags();
1653   if (handler) {
1654     DCHECK_EQ(IrOpcode::kIfException, handler->front()->opcode());
1655     IfExceptionHint hint = OpParameter<IfExceptionHint>(handler->front());
1656     if (hint == IfExceptionHint::kLocallyCaught) {
1657       flags |= CallDescriptor::kHasLocalCatchHandler;
1658     }
1659     flags |= CallDescriptor::kHasExceptionHandler;
1660     buffer.instruction_args.push_back(g.Label(handler));
1661   }
1662 
1663   bool from_native_stack = linkage()->GetIncomingDescriptor()->UseNativeStack();
1664   bool to_native_stack = descriptor->UseNativeStack();
1665   if (from_native_stack != to_native_stack) {
1666     // (arm64 only) Mismatch in the use of stack pointers. One or the other
1667     // has to be restored manually by the code generator.
1668     flags |= to_native_stack ? CallDescriptor::kRestoreJSSP
1669                              : CallDescriptor::kRestoreCSP;
1670   }
1671 
1672   // Select the appropriate opcode based on the call type.
1673   InstructionCode opcode = kArchNop;
1674   switch (descriptor->kind()) {
1675     case CallDescriptor::kCallAddress:
1676       opcode =
1677           kArchCallCFunction |
1678           MiscField::encode(static_cast<int>(descriptor->CParameterCount()));
1679       break;
1680     case CallDescriptor::kCallCodeObject:
1681       opcode = kArchCallCodeObject | MiscField::encode(flags);
1682       break;
1683     case CallDescriptor::kCallJSFunction:
1684       opcode = kArchCallJSFunction | MiscField::encode(flags);
1685       break;
1686   }
1687 
1688   // Emit the call instruction.
1689   size_t const output_count = buffer.outputs.size();
1690   auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
1691   Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
1692        &buffer.instruction_args.front())
1693       ->MarkAsCall();
1694 }
1695 
1696 
VisitTailCall(Node * node)1697 void InstructionSelector::VisitTailCall(Node* node) {
1698   OperandGenerator g(this);
1699   CallDescriptor const* descriptor = CallDescriptorOf(node->op());
1700   DCHECK_NE(0, descriptor->flags() & CallDescriptor::kSupportsTailCalls);
1701 
1702   // TODO(turbofan): Relax restriction for stack parameters.
1703 
1704   int stack_param_delta = 0;
1705   if (linkage()->GetIncomingDescriptor()->CanTailCall(node,
1706                                                       &stack_param_delta)) {
1707     CallBuffer buffer(zone(), descriptor, nullptr);
1708 
1709     // Compute InstructionOperands for inputs and outputs.
1710     CallBufferFlags flags(kCallCodeImmediate | kCallTail);
1711     if (IsTailCallAddressImmediate()) {
1712       flags |= kCallAddressImmediate;
1713     }
1714     InitializeCallBuffer(node, &buffer, flags, stack_param_delta);
1715 
1716     // Select the appropriate opcode based on the call type.
1717     InstructionCode opcode;
1718     InstructionOperandVector temps(zone());
1719     if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
1720       switch (descriptor->kind()) {
1721         case CallDescriptor::kCallCodeObject:
1722           opcode = kArchTailCallCodeObjectFromJSFunction;
1723           break;
1724         case CallDescriptor::kCallJSFunction:
1725           opcode = kArchTailCallJSFunctionFromJSFunction;
1726           break;
1727         default:
1728           UNREACHABLE();
1729           return;
1730       }
1731       int temps_count = GetTempsCountForTailCallFromJSFunction();
1732       for (int i = 0; i < temps_count; i++) {
1733         temps.push_back(g.TempRegister());
1734       }
1735     } else {
1736       switch (descriptor->kind()) {
1737         case CallDescriptor::kCallCodeObject:
1738           opcode = kArchTailCallCodeObject;
1739           break;
1740         case CallDescriptor::kCallJSFunction:
1741           opcode = kArchTailCallJSFunction;
1742           break;
1743         case CallDescriptor::kCallAddress:
1744           opcode = kArchTailCallAddress;
1745           break;
1746         default:
1747           UNREACHABLE();
1748           return;
1749       }
1750     }
1751     opcode |= MiscField::encode(descriptor->flags());
1752 
1753     buffer.instruction_args.push_back(g.TempImmediate(stack_param_delta));
1754 
1755     Emit(kArchPrepareTailCall, g.NoOutput(),
1756          g.TempImmediate(stack_param_delta));
1757 
1758     // Emit the tailcall instruction.
1759     Emit(opcode, 0, nullptr, buffer.instruction_args.size(),
1760          &buffer.instruction_args.front(), temps.size(),
1761          temps.empty() ? nullptr : &temps.front());
1762   } else {
1763     FrameStateDescriptor* frame_state_descriptor =
1764         descriptor->NeedsFrameState()
1765             ? GetFrameStateDescriptor(
1766                   node->InputAt(static_cast<int>(descriptor->InputCount())))
1767             : nullptr;
1768 
1769     CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
1770 
1771     // Compute InstructionOperands for inputs and outputs.
1772     CallBufferFlags flags = kCallCodeImmediate;
1773     if (IsTailCallAddressImmediate()) {
1774       flags |= kCallAddressImmediate;
1775     }
1776     InitializeCallBuffer(node, &buffer, flags);
1777 
1778     EmitPrepareArguments(&(buffer.pushed_nodes), descriptor, node);
1779 
1780     // Select the appropriate opcode based on the call type.
1781     InstructionCode opcode;
1782     switch (descriptor->kind()) {
1783       case CallDescriptor::kCallCodeObject:
1784         opcode = kArchCallCodeObject;
1785         break;
1786       case CallDescriptor::kCallJSFunction:
1787         opcode = kArchCallJSFunction;
1788         break;
1789       default:
1790         UNREACHABLE();
1791         return;
1792     }
1793     opcode |= MiscField::encode(descriptor->flags());
1794 
1795     // Emit the call instruction.
1796     size_t output_count = buffer.outputs.size();
1797     auto* outputs = &buffer.outputs.front();
1798     Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
1799          &buffer.instruction_args.front())
1800         ->MarkAsCall();
1801     Emit(kArchRet, 0, nullptr, output_count, outputs);
1802   }
1803 }
1804 
1805 
VisitGoto(BasicBlock * target)1806 void InstructionSelector::VisitGoto(BasicBlock* target) {
1807   // jump to the next block.
1808   OperandGenerator g(this);
1809   Emit(kArchJmp, g.NoOutput(), g.Label(target));
1810 }
1811 
1812 
VisitReturn(Node * ret)1813 void InstructionSelector::VisitReturn(Node* ret) {
1814   OperandGenerator g(this);
1815   if (linkage()->GetIncomingDescriptor()->ReturnCount() == 0) {
1816     Emit(kArchRet, g.NoOutput());
1817   } else {
1818     const int ret_count = ret->op()->ValueInputCount();
1819     auto value_locations = zone()->NewArray<InstructionOperand>(ret_count);
1820     for (int i = 0; i < ret_count; ++i) {
1821       value_locations[i] =
1822           g.UseLocation(ret->InputAt(i), linkage()->GetReturnLocation(i),
1823                         linkage()->GetReturnType(i).representation());
1824     }
1825     Emit(kArchRet, 0, nullptr, ret_count, value_locations);
1826   }
1827 }
1828 
EmitDeoptimize(InstructionCode opcode,InstructionOperand output,InstructionOperand a,InstructionOperand b,Node * frame_state)1829 Instruction* InstructionSelector::EmitDeoptimize(InstructionCode opcode,
1830                                                  InstructionOperand output,
1831                                                  InstructionOperand a,
1832                                                  InstructionOperand b,
1833                                                  Node* frame_state) {
1834   size_t output_count = output.IsInvalid() ? 0 : 1;
1835   InstructionOperand inputs[] = {a, b};
1836   size_t input_count = arraysize(inputs);
1837   return EmitDeoptimize(opcode, output_count, &output, input_count, inputs,
1838                         frame_state);
1839 }
1840 
EmitDeoptimize(InstructionCode opcode,size_t output_count,InstructionOperand * outputs,size_t input_count,InstructionOperand * inputs,Node * frame_state)1841 Instruction* InstructionSelector::EmitDeoptimize(
1842     InstructionCode opcode, size_t output_count, InstructionOperand* outputs,
1843     size_t input_count, InstructionOperand* inputs, Node* frame_state) {
1844   OperandGenerator g(this);
1845   FrameStateDescriptor* const descriptor = GetFrameStateDescriptor(frame_state);
1846   InstructionOperandVector args(instruction_zone());
1847   args.reserve(input_count + 1 + descriptor->GetTotalSize());
1848   for (size_t i = 0; i < input_count; ++i) {
1849     args.push_back(inputs[i]);
1850   }
1851   opcode |= MiscField::encode(static_cast<int>(input_count));
1852   InstructionSequence::StateId const state_id =
1853       sequence()->AddFrameStateDescriptor(descriptor);
1854   args.push_back(g.TempImmediate(state_id.ToInt()));
1855   StateObjectDeduplicator deduplicator(instruction_zone());
1856   AddInputsToFrameStateDescriptor(descriptor, frame_state, &g, &deduplicator,
1857                                   &args, FrameStateInputKind::kAny,
1858                                   instruction_zone());
1859   return Emit(opcode, output_count, outputs, args.size(), &args.front(), 0,
1860               nullptr);
1861 }
1862 
EmitIdentity(Node * node)1863 void InstructionSelector::EmitIdentity(Node* node) {
1864   OperandGenerator g(this);
1865   Node* value = node->InputAt(0);
1866   Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
1867 }
1868 
VisitDeoptimize(DeoptimizeKind kind,Node * value)1869 void InstructionSelector::VisitDeoptimize(DeoptimizeKind kind, Node* value) {
1870   InstructionCode opcode = kArchDeoptimize;
1871   switch (kind) {
1872     case DeoptimizeKind::kEager:
1873       opcode |= MiscField::encode(Deoptimizer::EAGER);
1874       break;
1875     case DeoptimizeKind::kSoft:
1876       opcode |= MiscField::encode(Deoptimizer::SOFT);
1877       break;
1878   }
1879   EmitDeoptimize(opcode, 0, nullptr, 0, nullptr, value);
1880 }
1881 
1882 
VisitThrow(Node * value)1883 void InstructionSelector::VisitThrow(Node* value) {
1884   OperandGenerator g(this);
1885   Emit(kArchThrowTerminator, g.NoOutput());
1886 }
1887 
VisitDebugBreak(Node * node)1888 void InstructionSelector::VisitDebugBreak(Node* node) {
1889   OperandGenerator g(this);
1890   Emit(kArchDebugBreak, g.NoOutput());
1891 }
1892 
VisitComment(Node * node)1893 void InstructionSelector::VisitComment(Node* node) {
1894   OperandGenerator g(this);
1895   InstructionOperand operand(g.UseImmediate(node));
1896   Emit(kArchComment, 0, nullptr, 1, &operand);
1897 }
1898 
CanProduceSignalingNaN(Node * node)1899 bool InstructionSelector::CanProduceSignalingNaN(Node* node) {
1900   // TODO(jarin) Improve the heuristic here.
1901   if (node->opcode() == IrOpcode::kFloat64Add ||
1902       node->opcode() == IrOpcode::kFloat64Sub ||
1903       node->opcode() == IrOpcode::kFloat64Mul) {
1904     return false;
1905   }
1906   return true;
1907 }
1908 
GetFrameStateDescriptor(Node * state)1909 FrameStateDescriptor* InstructionSelector::GetFrameStateDescriptor(
1910     Node* state) {
1911   DCHECK(state->opcode() == IrOpcode::kFrameState);
1912   DCHECK_EQ(kFrameStateInputCount, state->InputCount());
1913   FrameStateInfo state_info = OpParameter<FrameStateInfo>(state);
1914 
1915   int parameters = static_cast<int>(
1916       StateValuesAccess(state->InputAt(kFrameStateParametersInput)).size());
1917   int locals = static_cast<int>(
1918       StateValuesAccess(state->InputAt(kFrameStateLocalsInput)).size());
1919   int stack = static_cast<int>(
1920       StateValuesAccess(state->InputAt(kFrameStateStackInput)).size());
1921 
1922   DCHECK_EQ(parameters, state_info.parameter_count());
1923   DCHECK_EQ(locals, state_info.local_count());
1924 
1925   FrameStateDescriptor* outer_state = nullptr;
1926   Node* outer_node = state->InputAt(kFrameStateOuterStateInput);
1927   if (outer_node->opcode() == IrOpcode::kFrameState) {
1928     outer_state = GetFrameStateDescriptor(outer_node);
1929   }
1930 
1931   return new (instruction_zone()) FrameStateDescriptor(
1932       instruction_zone(), state_info.type(), state_info.bailout_id(),
1933       state_info.state_combine(), parameters, locals, stack,
1934       state_info.shared_info(), outer_state);
1935 }
1936 
1937 
1938 }  // namespace compiler
1939 }  // namespace internal
1940 }  // namespace v8
1941