1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/raw-machine-assembler.h"
6
7 #include "src/base/small-vector.h"
8 #include "src/compiler/compiler-source-position-table.h"
9 #include "src/compiler/node-properties.h"
10 #include "src/compiler/pipeline.h"
11 #include "src/compiler/scheduler.h"
12 #include "src/heap/factory-inl.h"
13
14 namespace v8 {
15 namespace internal {
16 namespace compiler {
17
RawMachineAssembler(Isolate * isolate,Graph * graph,CallDescriptor * call_descriptor,MachineRepresentation word,MachineOperatorBuilder::Flags flags,MachineOperatorBuilder::AlignmentRequirements alignment_requirements,PoisoningMitigationLevel poisoning_level)18 RawMachineAssembler::RawMachineAssembler(
19 Isolate* isolate, Graph* graph, CallDescriptor* call_descriptor,
20 MachineRepresentation word, MachineOperatorBuilder::Flags flags,
21 MachineOperatorBuilder::AlignmentRequirements alignment_requirements,
22 PoisoningMitigationLevel poisoning_level)
23 : isolate_(isolate),
24 graph_(graph),
25 schedule_(zone()->New<Schedule>(zone())),
26 source_positions_(zone()->New<SourcePositionTable>(graph)),
27 machine_(zone(), word, flags, alignment_requirements),
28 common_(zone()),
29 simplified_(zone()),
30 call_descriptor_(call_descriptor),
31 target_parameter_(nullptr),
32 parameters_(parameter_count(), zone()),
33 current_block_(schedule()->start()),
34 poisoning_level_(poisoning_level) {
35 int param_count = static_cast<int>(parameter_count());
36 // Add an extra input for the JSFunction parameter to the start node.
37 graph->SetStart(graph->NewNode(common_.Start(param_count + 1)));
38 if (call_descriptor->IsJSFunctionCall()) {
39 target_parameter_ = AddNode(
40 common()->Parameter(Linkage::kJSCallClosureParamIndex), graph->start());
41 }
42 for (size_t i = 0; i < parameter_count(); ++i) {
43 parameters_[i] =
44 AddNode(common()->Parameter(static_cast<int>(i)), graph->start());
45 }
46 graph->SetEnd(graph->NewNode(common_.End(0)));
47 source_positions_->AddDecorator();
48 }
49
SetCurrentExternalSourcePosition(FileAndLine file_and_line)50 void RawMachineAssembler::SetCurrentExternalSourcePosition(
51 FileAndLine file_and_line) {
52 int file_id =
53 isolate()->LookupOrAddExternallyCompiledFilename(file_and_line.first);
54 SourcePosition p = SourcePosition::External(file_and_line.second, file_id);
55 DCHECK(p.ExternalLine() == file_and_line.second);
56 source_positions()->SetCurrentPosition(p);
57 }
58
GetCurrentExternalSourcePosition() const59 FileAndLine RawMachineAssembler::GetCurrentExternalSourcePosition() const {
60 SourcePosition p = source_positions_->GetCurrentPosition();
61 if (!p.IsKnown()) return {nullptr, -1};
62 int file_id = p.ExternalFileId();
63 const char* file_name = isolate()->GetExternallyCompiledFilename(file_id);
64 int line = p.ExternalLine();
65 return {file_name, line};
66 }
67
NullConstant()68 Node* RawMachineAssembler::NullConstant() {
69 return HeapConstant(isolate()->factory()->null_value());
70 }
71
UndefinedConstant()72 Node* RawMachineAssembler::UndefinedConstant() {
73 return HeapConstant(isolate()->factory()->undefined_value());
74 }
75
RelocatableIntPtrConstant(intptr_t value,RelocInfo::Mode rmode)76 Node* RawMachineAssembler::RelocatableIntPtrConstant(intptr_t value,
77 RelocInfo::Mode rmode) {
78 return kSystemPointerSize == 8
79 ? RelocatableInt64Constant(value, rmode)
80 : RelocatableInt32Constant(static_cast<int>(value), rmode);
81 }
82
OptimizedAllocate(Node * size,AllocationType allocation,AllowLargeObjects allow_large_objects)83 Node* RawMachineAssembler::OptimizedAllocate(
84 Node* size, AllocationType allocation,
85 AllowLargeObjects allow_large_objects) {
86 return AddNode(
87 simplified()->AllocateRaw(Type::Any(), allocation, allow_large_objects),
88 size);
89 }
90
ExportForTest()91 Schedule* RawMachineAssembler::ExportForTest() {
92 // Compute the correct codegen order.
93 DCHECK(schedule_->rpo_order()->empty());
94 if (FLAG_trace_turbo_scheduler) {
95 PrintF("--- RAW SCHEDULE -------------------------------------------\n");
96 StdoutStream{} << *schedule_;
97 }
98 schedule_->EnsureCFGWellFormedness();
99 Scheduler::ComputeSpecialRPO(zone(), schedule_);
100 schedule_->PropagateDeferredMark();
101 if (FLAG_trace_turbo_scheduler) {
102 PrintF("--- EDGE SPLIT AND PROPAGATED DEFERRED SCHEDULE ------------\n");
103 StdoutStream{} << *schedule_;
104 }
105 // Invalidate RawMachineAssembler.
106 source_positions_->RemoveDecorator();
107 Schedule* schedule = schedule_;
108 schedule_ = nullptr;
109 return schedule;
110 }
111
ExportForOptimization()112 Graph* RawMachineAssembler::ExportForOptimization() {
113 // Compute the correct codegen order.
114 DCHECK(schedule_->rpo_order()->empty());
115 if (FLAG_trace_turbo_scheduler) {
116 PrintF("--- RAW SCHEDULE -------------------------------------------\n");
117 StdoutStream{} << *schedule_;
118 }
119 schedule_->EnsureCFGWellFormedness();
120 OptimizeControlFlow(schedule_, graph(), common());
121 Scheduler::ComputeSpecialRPO(zone(), schedule_);
122 if (FLAG_trace_turbo_scheduler) {
123 PrintF("--- SCHEDULE BEFORE GRAPH CREATION -------------------------\n");
124 StdoutStream{} << *schedule_;
125 }
126 MakeReschedulable();
127 // Invalidate RawMachineAssembler.
128 schedule_ = nullptr;
129 return graph();
130 }
131
OptimizeControlFlow(Schedule * schedule,Graph * graph,CommonOperatorBuilder * common)132 void RawMachineAssembler::OptimizeControlFlow(Schedule* schedule, Graph* graph,
133 CommonOperatorBuilder* common) {
134 for (bool changed = true; changed;) {
135 changed = false;
136 for (size_t i = 0; i < schedule->all_blocks()->size(); ++i) {
137 BasicBlock* block = (*schedule->all_blocks())[i];
138 if (block == nullptr) continue;
139
140 // Short-circuit a goto if the succeeding block is not a control-flow
141 // merge. This is not really useful on it's own since graph construction
142 // has the same effect, but combining blocks improves the pattern-match on
143 // their structure below.
144 if (block->control() == BasicBlock::kGoto) {
145 DCHECK_EQ(block->SuccessorCount(), 1);
146 BasicBlock* successor = block->SuccessorAt(0);
147 if (successor->PredecessorCount() == 1) {
148 DCHECK_EQ(successor->PredecessorAt(0), block);
149 for (Node* node : *successor) {
150 schedule->SetBlockForNode(nullptr, node);
151 schedule->AddNode(block, node);
152 }
153 block->set_control(successor->control());
154 Node* control_input = successor->control_input();
155 block->set_control_input(control_input);
156 if (control_input) {
157 schedule->SetBlockForNode(block, control_input);
158 }
159 if (successor->deferred()) block->set_deferred(true);
160 block->ClearSuccessors();
161 schedule->MoveSuccessors(successor, block);
162 schedule->ClearBlockById(successor->id());
163 changed = true;
164 --i;
165 continue;
166 }
167 }
168 // Block-cloning in the simple case where a block consists only of a phi
169 // node and a branch on that phi. This just duplicates the branch block
170 // for each predecessor, replacing the phi node with the corresponding phi
171 // input.
172 if (block->control() == BasicBlock::kBranch && block->NodeCount() == 1) {
173 Node* phi = block->NodeAt(0);
174 if (phi->opcode() != IrOpcode::kPhi) continue;
175 Node* branch = block->control_input();
176 DCHECK_EQ(branch->opcode(), IrOpcode::kBranch);
177 if (NodeProperties::GetValueInput(branch, 0) != phi) continue;
178 if (phi->UseCount() != 1) continue;
179 DCHECK_EQ(phi->op()->ValueInputCount(), block->PredecessorCount());
180
181 // Turn projection blocks into normal blocks.
182 DCHECK_EQ(block->SuccessorCount(), 2);
183 BasicBlock* true_block = block->SuccessorAt(0);
184 BasicBlock* false_block = block->SuccessorAt(1);
185 DCHECK_EQ(true_block->NodeAt(0)->opcode(), IrOpcode::kIfTrue);
186 DCHECK_EQ(false_block->NodeAt(0)->opcode(), IrOpcode::kIfFalse);
187 (*true_block->begin())->Kill();
188 true_block->RemoveNode(true_block->begin());
189 (*false_block->begin())->Kill();
190 false_block->RemoveNode(false_block->begin());
191 true_block->ClearPredecessors();
192 false_block->ClearPredecessors();
193
194 size_t arity = block->PredecessorCount();
195 for (size_t i = 0; i < arity; ++i) {
196 BasicBlock* predecessor = block->PredecessorAt(i);
197 predecessor->ClearSuccessors();
198 if (block->deferred()) predecessor->set_deferred(true);
199 Node* branch_clone = graph->CloneNode(branch);
200 int phi_input = static_cast<int>(i);
201 NodeProperties::ReplaceValueInput(
202 branch_clone, NodeProperties::GetValueInput(phi, phi_input), 0);
203 BasicBlock* new_true_block = schedule->NewBasicBlock();
204 BasicBlock* new_false_block = schedule->NewBasicBlock();
205 new_true_block->AddNode(
206 graph->NewNode(common->IfTrue(), branch_clone));
207 new_false_block->AddNode(
208 graph->NewNode(common->IfFalse(), branch_clone));
209 schedule->AddGoto(new_true_block, true_block);
210 schedule->AddGoto(new_false_block, false_block);
211 DCHECK_EQ(predecessor->control(), BasicBlock::kGoto);
212 predecessor->set_control(BasicBlock::kNone);
213 schedule->AddBranch(predecessor, branch_clone, new_true_block,
214 new_false_block);
215 }
216 branch->Kill();
217 schedule->ClearBlockById(block->id());
218 changed = true;
219 continue;
220 }
221 }
222 }
223 }
224
MakeReschedulable()225 void RawMachineAssembler::MakeReschedulable() {
226 std::vector<Node*> block_final_control(schedule_->all_blocks_.size());
227 std::vector<Node*> block_final_effect(schedule_->all_blocks_.size());
228
229 struct LoopHeader {
230 BasicBlock* block;
231 Node* loop_node;
232 Node* effect_phi;
233 };
234 std::vector<LoopHeader> loop_headers;
235
236 // These are hoisted outside of the loop to avoid re-allocation.
237 std::vector<Node*> merge_inputs;
238 std::vector<Node*> effect_phi_inputs;
239
240 for (BasicBlock* block : *schedule_->rpo_order()) {
241 Node* current_control;
242 Node* current_effect;
243 if (block == schedule_->start()) {
244 current_control = current_effect = graph()->start();
245 } else if (block == schedule_->end()) {
246 for (size_t i = 0; i < block->PredecessorCount(); ++i) {
247 NodeProperties::MergeControlToEnd(
248 graph(), common(), block->PredecessorAt(i)->control_input());
249 }
250 } else if (block->IsLoopHeader()) {
251 // The graph()->start() inputs are just placeholders until we computed the
252 // real back-edges and re-structure the control flow so the loop has
253 // exactly two predecessors.
254 current_control = graph()->NewNode(common()->Loop(2), graph()->start(),
255 graph()->start());
256 current_effect =
257 graph()->NewNode(common()->EffectPhi(2), graph()->start(),
258 graph()->start(), current_control);
259
260 Node* terminate = graph()->NewNode(common()->Terminate(), current_effect,
261 current_control);
262 NodeProperties::MergeControlToEnd(graph(), common(), terminate);
263 loop_headers.push_back(
264 LoopHeader{block, current_control, current_effect});
265 } else if (block->PredecessorCount() == 1) {
266 BasicBlock* predecessor = block->PredecessorAt(0);
267 DCHECK_LT(predecessor->rpo_number(), block->rpo_number());
268 current_effect = block_final_effect[predecessor->id().ToSize()];
269 current_control = block_final_control[predecessor->id().ToSize()];
270 } else {
271 // Create control merge nodes and effect phis for all predecessor blocks.
272 merge_inputs.clear();
273 effect_phi_inputs.clear();
274 int predecessor_count = static_cast<int>(block->PredecessorCount());
275 for (int i = 0; i < predecessor_count; ++i) {
276 BasicBlock* predecessor = block->PredecessorAt(i);
277 DCHECK_LT(predecessor->rpo_number(), block->rpo_number());
278 merge_inputs.push_back(block_final_control[predecessor->id().ToSize()]);
279 effect_phi_inputs.push_back(
280 block_final_effect[predecessor->id().ToSize()]);
281 }
282 current_control = graph()->NewNode(common()->Merge(predecessor_count),
283 static_cast<int>(merge_inputs.size()),
284 merge_inputs.data());
285 effect_phi_inputs.push_back(current_control);
286 current_effect = graph()->NewNode(
287 common()->EffectPhi(predecessor_count),
288 static_cast<int>(effect_phi_inputs.size()), effect_phi_inputs.data());
289 }
290
291 auto update_current_control_and_effect = [&](Node* node) {
292 bool existing_effect_and_control =
293 IrOpcode::IsIfProjectionOpcode(node->opcode()) ||
294 IrOpcode::IsPhiOpcode(node->opcode());
295 if (node->op()->EffectInputCount() > 0) {
296 DCHECK_EQ(1, node->op()->EffectInputCount());
297 if (existing_effect_and_control) {
298 NodeProperties::ReplaceEffectInput(node, current_effect);
299 } else {
300 node->AppendInput(graph()->zone(), current_effect);
301 }
302 }
303 if (node->op()->ControlInputCount() > 0) {
304 DCHECK_EQ(1, node->op()->ControlInputCount());
305 if (existing_effect_and_control) {
306 NodeProperties::ReplaceControlInput(node, current_control);
307 } else {
308 node->AppendInput(graph()->zone(), current_control);
309 }
310 }
311 if (node->op()->EffectOutputCount() > 0) {
312 DCHECK_EQ(1, node->op()->EffectOutputCount());
313 current_effect = node;
314 }
315 if (node->op()->ControlOutputCount() > 0) {
316 current_control = node;
317 }
318 };
319
320 for (Node* node : *block) {
321 update_current_control_and_effect(node);
322 }
323 if (block->deferred()) MarkControlDeferred(current_control);
324
325 if (Node* block_terminator = block->control_input()) {
326 update_current_control_and_effect(block_terminator);
327 }
328
329 block_final_effect[block->id().ToSize()] = current_effect;
330 block_final_control[block->id().ToSize()] = current_control;
331 }
332
333 // Fix-up loop backedges and re-structure control flow so that loop nodes have
334 // exactly two control predecessors.
335 for (const LoopHeader& loop_header : loop_headers) {
336 BasicBlock* block = loop_header.block;
337 std::vector<BasicBlock*> loop_entries;
338 std::vector<BasicBlock*> loop_backedges;
339 for (size_t i = 0; i < block->PredecessorCount(); ++i) {
340 BasicBlock* predecessor = block->PredecessorAt(i);
341 if (block->LoopContains(predecessor)) {
342 loop_backedges.push_back(predecessor);
343 } else {
344 DCHECK(loop_backedges.empty());
345 loop_entries.push_back(predecessor);
346 }
347 }
348 DCHECK(!loop_entries.empty());
349 DCHECK(!loop_backedges.empty());
350
351 int entrance_count = static_cast<int>(loop_entries.size());
352 int backedge_count = static_cast<int>(loop_backedges.size());
353 Node* control_loop_entry = CreateNodeFromPredecessors(
354 loop_entries, block_final_control, common()->Merge(entrance_count), {});
355 Node* control_backedge =
356 CreateNodeFromPredecessors(loop_backedges, block_final_control,
357 common()->Merge(backedge_count), {});
358 Node* effect_loop_entry = CreateNodeFromPredecessors(
359 loop_entries, block_final_effect, common()->EffectPhi(entrance_count),
360 {control_loop_entry});
361 Node* effect_backedge = CreateNodeFromPredecessors(
362 loop_backedges, block_final_effect, common()->EffectPhi(backedge_count),
363 {control_backedge});
364
365 loop_header.loop_node->ReplaceInput(0, control_loop_entry);
366 loop_header.loop_node->ReplaceInput(1, control_backedge);
367 loop_header.effect_phi->ReplaceInput(0, effect_loop_entry);
368 loop_header.effect_phi->ReplaceInput(1, effect_backedge);
369
370 for (Node* node : *block) {
371 if (node->opcode() == IrOpcode::kPhi) {
372 MakePhiBinary(node, static_cast<int>(loop_entries.size()),
373 control_loop_entry, control_backedge);
374 }
375 }
376 }
377 }
378
CreateNodeFromPredecessors(const std::vector<BasicBlock * > & predecessors,const std::vector<Node * > & sidetable,const Operator * op,const std::vector<Node * > & additional_inputs)379 Node* RawMachineAssembler::CreateNodeFromPredecessors(
380 const std::vector<BasicBlock*>& predecessors,
381 const std::vector<Node*>& sidetable, const Operator* op,
382 const std::vector<Node*>& additional_inputs) {
383 if (predecessors.size() == 1) {
384 return sidetable[predecessors.front()->id().ToSize()];
385 }
386 std::vector<Node*> inputs;
387 for (BasicBlock* predecessor : predecessors) {
388 inputs.push_back(sidetable[predecessor->id().ToSize()]);
389 }
390 for (Node* additional_input : additional_inputs) {
391 inputs.push_back(additional_input);
392 }
393 return graph()->NewNode(op, static_cast<int>(inputs.size()), inputs.data());
394 }
395
MakePhiBinary(Node * phi,int split_point,Node * left_control,Node * right_control)396 void RawMachineAssembler::MakePhiBinary(Node* phi, int split_point,
397 Node* left_control,
398 Node* right_control) {
399 int value_count = phi->op()->ValueInputCount();
400 if (value_count == 2) return;
401 DCHECK_LT(split_point, value_count);
402 DCHECK_GT(split_point, 0);
403
404 MachineRepresentation rep = PhiRepresentationOf(phi->op());
405 int left_input_count = split_point;
406 int right_input_count = value_count - split_point;
407
408 Node* left_input;
409 if (left_input_count == 1) {
410 left_input = NodeProperties::GetValueInput(phi, 0);
411 } else {
412 std::vector<Node*> inputs;
413 for (int i = 0; i < left_input_count; ++i) {
414 inputs.push_back(NodeProperties::GetValueInput(phi, i));
415 }
416 inputs.push_back(left_control);
417 left_input =
418 graph()->NewNode(common()->Phi(rep, static_cast<int>(left_input_count)),
419 static_cast<int>(inputs.size()), inputs.data());
420 }
421
422 Node* right_input;
423 if (right_input_count == 1) {
424 right_input = NodeProperties::GetValueInput(phi, split_point);
425 } else {
426 std::vector<Node*> inputs;
427 for (int i = split_point; i < value_count; ++i) {
428 inputs.push_back(NodeProperties::GetValueInput(phi, i));
429 }
430 inputs.push_back(right_control);
431 right_input = graph()->NewNode(
432 common()->Phi(rep, static_cast<int>(right_input_count)),
433 static_cast<int>(inputs.size()), inputs.data());
434 }
435
436 Node* control = NodeProperties::GetControlInput(phi);
437 phi->TrimInputCount(3);
438 phi->ReplaceInput(0, left_input);
439 phi->ReplaceInput(1, right_input);
440 phi->ReplaceInput(2, control);
441 NodeProperties::ChangeOp(phi, common()->Phi(rep, 2));
442 }
443
MarkControlDeferred(Node * control_node)444 void RawMachineAssembler::MarkControlDeferred(Node* control_node) {
445 BranchHint new_branch_hint;
446 Node* responsible_branch = nullptr;
447 while (responsible_branch == nullptr) {
448 switch (control_node->opcode()) {
449 case IrOpcode::kIfException:
450 // IfException projections are deferred by default.
451 return;
452 case IrOpcode::kIfSuccess:
453 control_node = NodeProperties::GetControlInput(control_node);
454 continue;
455 case IrOpcode::kIfValue: {
456 IfValueParameters parameters = IfValueParametersOf(control_node->op());
457 if (parameters.hint() != BranchHint::kFalse) {
458 NodeProperties::ChangeOp(
459 control_node, common()->IfValue(parameters.value(),
460 parameters.comparison_order(),
461 BranchHint::kFalse));
462 }
463 return;
464 }
465 case IrOpcode::kIfDefault:
466 if (BranchHintOf(control_node->op()) != BranchHint::kFalse) {
467 NodeProperties::ChangeOp(control_node,
468 common()->IfDefault(BranchHint::kFalse));
469 }
470 return;
471 case IrOpcode::kIfTrue: {
472 Node* branch = NodeProperties::GetControlInput(control_node);
473 BranchHint hint = BranchOperatorInfoOf(branch->op()).hint;
474 if (hint == BranchHint::kTrue) {
475 // The other possibility is also deferred, so the responsible branch
476 // has to be before.
477 control_node = NodeProperties::GetControlInput(branch);
478 continue;
479 }
480 new_branch_hint = BranchHint::kFalse;
481 responsible_branch = branch;
482 break;
483 }
484 case IrOpcode::kIfFalse: {
485 Node* branch = NodeProperties::GetControlInput(control_node);
486 BranchHint hint = BranchOperatorInfoOf(branch->op()).hint;
487 if (hint == BranchHint::kFalse) {
488 // The other possibility is also deferred, so the responsible branch
489 // has to be before.
490 control_node = NodeProperties::GetControlInput(branch);
491 continue;
492 }
493 new_branch_hint = BranchHint::kTrue;
494 responsible_branch = branch;
495 break;
496 }
497 case IrOpcode::kMerge:
498 for (int i = 0; i < control_node->op()->ControlInputCount(); ++i) {
499 MarkControlDeferred(NodeProperties::GetControlInput(control_node, i));
500 }
501 return;
502 case IrOpcode::kLoop:
503 control_node = NodeProperties::GetControlInput(control_node, 0);
504 continue;
505 case IrOpcode::kBranch:
506 case IrOpcode::kSwitch:
507 UNREACHABLE();
508 case IrOpcode::kStart:
509 return;
510 default:
511 DCHECK_EQ(1, control_node->op()->ControlInputCount());
512 control_node = NodeProperties::GetControlInput(control_node);
513 continue;
514 }
515 }
516
517 BranchOperatorInfo info = BranchOperatorInfoOf(responsible_branch->op());
518 if (info.hint == new_branch_hint) return;
519 NodeProperties::ChangeOp(
520 responsible_branch,
521 common()->Branch(new_branch_hint, info.is_safety_check));
522 }
523
TargetParameter()524 Node* RawMachineAssembler::TargetParameter() {
525 DCHECK_NOT_NULL(target_parameter_);
526 return target_parameter_;
527 }
528
Parameter(size_t index)529 Node* RawMachineAssembler::Parameter(size_t index) {
530 DCHECK_LT(index, parameter_count());
531 return parameters_[index];
532 }
533
534
Goto(RawMachineLabel * label)535 void RawMachineAssembler::Goto(RawMachineLabel* label) {
536 DCHECK(current_block_ != schedule()->end());
537 schedule()->AddGoto(CurrentBlock(), Use(label));
538 current_block_ = nullptr;
539 }
540
541
Branch(Node * condition,RawMachineLabel * true_val,RawMachineLabel * false_val)542 void RawMachineAssembler::Branch(Node* condition, RawMachineLabel* true_val,
543 RawMachineLabel* false_val) {
544 DCHECK(current_block_ != schedule()->end());
545 Node* branch = MakeNode(
546 common()->Branch(BranchHint::kNone, IsSafetyCheck::kNoSafetyCheck), 1,
547 &condition);
548 BasicBlock* true_block = schedule()->NewBasicBlock();
549 BasicBlock* false_block = schedule()->NewBasicBlock();
550 schedule()->AddBranch(CurrentBlock(), branch, true_block, false_block);
551
552 true_block->AddNode(MakeNode(common()->IfTrue(), 1, &branch));
553 schedule()->AddGoto(true_block, Use(true_val));
554
555 false_block->AddNode(MakeNode(common()->IfFalse(), 1, &branch));
556 schedule()->AddGoto(false_block, Use(false_val));
557
558 current_block_ = nullptr;
559 }
560
Continuations(Node * call,RawMachineLabel * if_success,RawMachineLabel * if_exception)561 void RawMachineAssembler::Continuations(Node* call, RawMachineLabel* if_success,
562 RawMachineLabel* if_exception) {
563 DCHECK_NOT_NULL(schedule_);
564 DCHECK_NOT_NULL(current_block_);
565 schedule()->AddCall(CurrentBlock(), call, Use(if_success), Use(if_exception));
566 current_block_ = nullptr;
567 }
568
Switch(Node * index,RawMachineLabel * default_label,const int32_t * case_values,RawMachineLabel ** case_labels,size_t case_count)569 void RawMachineAssembler::Switch(Node* index, RawMachineLabel* default_label,
570 const int32_t* case_values,
571 RawMachineLabel** case_labels,
572 size_t case_count) {
573 DCHECK_NE(schedule()->end(), current_block_);
574 size_t succ_count = case_count + 1;
575 Node* switch_node = MakeNode(common()->Switch(succ_count), 1, &index);
576 BasicBlock** succ_blocks = zone()->NewArray<BasicBlock*>(succ_count);
577 for (size_t index = 0; index < case_count; ++index) {
578 int32_t case_value = case_values[index];
579 BasicBlock* case_block = schedule()->NewBasicBlock();
580 Node* case_node =
581 graph()->NewNode(common()->IfValue(case_value), switch_node);
582 schedule()->AddNode(case_block, case_node);
583 schedule()->AddGoto(case_block, Use(case_labels[index]));
584 succ_blocks[index] = case_block;
585 }
586 BasicBlock* default_block = schedule()->NewBasicBlock();
587 Node* default_node = graph()->NewNode(common()->IfDefault(), switch_node);
588 schedule()->AddNode(default_block, default_node);
589 schedule()->AddGoto(default_block, Use(default_label));
590 succ_blocks[case_count] = default_block;
591 schedule()->AddSwitch(CurrentBlock(), switch_node, succ_blocks, succ_count);
592 current_block_ = nullptr;
593 }
594
Return(Node * value)595 void RawMachineAssembler::Return(Node* value) {
596 Node* values[] = {Int32Constant(0), value};
597 Node* ret = MakeNode(common()->Return(1), 2, values);
598 schedule()->AddReturn(CurrentBlock(), ret);
599 current_block_ = nullptr;
600 }
601
Return(Node * v1,Node * v2)602 void RawMachineAssembler::Return(Node* v1, Node* v2) {
603 Node* values[] = {Int32Constant(0), v1, v2};
604 Node* ret = MakeNode(common()->Return(2), 3, values);
605 schedule()->AddReturn(CurrentBlock(), ret);
606 current_block_ = nullptr;
607 }
608
Return(Node * v1,Node * v2,Node * v3)609 void RawMachineAssembler::Return(Node* v1, Node* v2, Node* v3) {
610 Node* values[] = {Int32Constant(0), v1, v2, v3};
611 Node* ret = MakeNode(common()->Return(3), 4, values);
612 schedule()->AddReturn(CurrentBlock(), ret);
613 current_block_ = nullptr;
614 }
615
Return(Node * v1,Node * v2,Node * v3,Node * v4)616 void RawMachineAssembler::Return(Node* v1, Node* v2, Node* v3, Node* v4) {
617 Node* values[] = {Int32Constant(0), v1, v2, v3, v4};
618 Node* ret = MakeNode(common()->Return(4), 5, values);
619 schedule()->AddReturn(CurrentBlock(), ret);
620 current_block_ = nullptr;
621 }
622
Return(int count,Node * vs[])623 void RawMachineAssembler::Return(int count, Node* vs[]) {
624 using Node_ptr = Node*;
625 Node** values = new Node_ptr[count + 1];
626 values[0] = Int32Constant(0);
627 for (int i = 0; i < count; ++i) values[i + 1] = vs[i];
628 Node* ret = MakeNode(common()->Return(count), count + 1, values);
629 schedule()->AddReturn(CurrentBlock(), ret);
630 current_block_ = nullptr;
631 delete[] values;
632 }
633
PopAndReturn(Node * pop,Node * value)634 void RawMachineAssembler::PopAndReturn(Node* pop, Node* value) {
635 // PopAndReturn is supposed to be using ONLY in CSA/Torque builtins for
636 // dropping ALL JS arguments that are currently located on the stack.
637 // The check below ensures that there are no directly accessible stack
638 // parameters from current builtin, which implies that the builtin with
639 // JS calling convention (TFJ) was created with kDontAdaptArgumentsSentinel.
640 // This simplifies semantics of this instruction because in case of presence
641 // of directly accessible stack parameters it's impossible to distinguish
642 // the following cases:
643 // 1) stack parameter is included in JS arguments (and therefore it will be
644 // dropped as a part of 'pop' number of arguments),
645 // 2) stack parameter is NOT included in JS arguments (and therefore it should
646 // be dropped in ADDITION to the 'pop' number of arguments).
647 // Additionally, in order to simplify assembly code, PopAndReturn is also
648 // not allowed in builtins with stub linkage and parameters on stack.
649 CHECK_EQ(call_descriptor()->StackParameterCount(), 0);
650 Node* values[] = {pop, value};
651 Node* ret = MakeNode(common()->Return(1), 2, values);
652 schedule()->AddReturn(CurrentBlock(), ret);
653 current_block_ = nullptr;
654 }
655
PopAndReturn(Node * pop,Node * v1,Node * v2)656 void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2) {
657 Node* values[] = {pop, v1, v2};
658 Node* ret = MakeNode(common()->Return(2), 3, values);
659 schedule()->AddReturn(CurrentBlock(), ret);
660 current_block_ = nullptr;
661 }
662
PopAndReturn(Node * pop,Node * v1,Node * v2,Node * v3)663 void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2,
664 Node* v3) {
665 Node* values[] = {pop, v1, v2, v3};
666 Node* ret = MakeNode(common()->Return(3), 4, values);
667 schedule()->AddReturn(CurrentBlock(), ret);
668 current_block_ = nullptr;
669 }
670
PopAndReturn(Node * pop,Node * v1,Node * v2,Node * v3,Node * v4)671 void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2, Node* v3,
672 Node* v4) {
673 Node* values[] = {pop, v1, v2, v3, v4};
674 Node* ret = MakeNode(common()->Return(4), 5, values);
675 schedule()->AddReturn(CurrentBlock(), ret);
676 current_block_ = nullptr;
677 }
678
AbortCSAAssert(Node * message)679 void RawMachineAssembler::AbortCSAAssert(Node* message) {
680 AddNode(machine()->AbortCSAAssert(), message);
681 }
682
DebugBreak()683 void RawMachineAssembler::DebugBreak() { AddNode(machine()->DebugBreak()); }
684
Unreachable()685 void RawMachineAssembler::Unreachable() {
686 Node* ret = MakeNode(common()->Throw(), 0, nullptr);
687 schedule()->AddThrow(CurrentBlock(), ret);
688 current_block_ = nullptr;
689 }
690
Comment(const std::string & msg)691 void RawMachineAssembler::Comment(const std::string& msg) {
692 size_t length = msg.length() + 1;
693 char* zone_buffer = zone()->NewArray<char>(length);
694 MemCopy(zone_buffer, msg.c_str(), length);
695 AddNode(machine()->Comment(zone_buffer));
696 }
697
StaticAssert(Node * value,const char * source)698 void RawMachineAssembler::StaticAssert(Node* value, const char* source) {
699 AddNode(common()->StaticAssert(source), value);
700 }
701
CallN(CallDescriptor * call_descriptor,int input_count,Node * const * inputs)702 Node* RawMachineAssembler::CallN(CallDescriptor* call_descriptor,
703 int input_count, Node* const* inputs) {
704 DCHECK(!call_descriptor->NeedsFrameState());
705 // +1 is for target.
706 DCHECK_EQ(input_count, call_descriptor->ParameterCount() + 1);
707 return AddNode(common()->Call(call_descriptor), input_count, inputs);
708 }
709
CallNWithFrameState(CallDescriptor * call_descriptor,int input_count,Node * const * inputs)710 Node* RawMachineAssembler::CallNWithFrameState(CallDescriptor* call_descriptor,
711 int input_count,
712 Node* const* inputs) {
713 DCHECK(call_descriptor->NeedsFrameState());
714 // +2 is for target and frame state.
715 DCHECK_EQ(input_count, call_descriptor->ParameterCount() + 2);
716 return AddNode(common()->Call(call_descriptor), input_count, inputs);
717 }
718
TailCallN(CallDescriptor * call_descriptor,int input_count,Node * const * inputs)719 void RawMachineAssembler::TailCallN(CallDescriptor* call_descriptor,
720 int input_count, Node* const* inputs) {
721 // +1 is for target.
722 DCHECK_EQ(input_count, call_descriptor->ParameterCount() + 1);
723 Node* tail_call =
724 MakeNode(common()->TailCall(call_descriptor), input_count, inputs);
725 schedule()->AddTailCall(CurrentBlock(), tail_call);
726 current_block_ = nullptr;
727 }
728
729 namespace {
730
731 enum FunctionDescriptorMode { kHasFunctionDescriptor, kNoFunctionDescriptor };
732
CallCFunctionImpl(RawMachineAssembler * rasm,Node * function,MachineType return_type,std::initializer_list<RawMachineAssembler::CFunctionArg> args,bool caller_saved_regs,SaveFPRegsMode mode,FunctionDescriptorMode no_function_descriptor)733 Node* CallCFunctionImpl(
734 RawMachineAssembler* rasm, Node* function, MachineType return_type,
735 std::initializer_list<RawMachineAssembler::CFunctionArg> args,
736 bool caller_saved_regs, SaveFPRegsMode mode,
737 FunctionDescriptorMode no_function_descriptor) {
738 static constexpr std::size_t kNumCArgs = 10;
739
740 MachineSignature::Builder builder(rasm->zone(), 1, args.size());
741 builder.AddReturn(return_type);
742 for (const auto& arg : args) builder.AddParam(arg.first);
743
744 bool caller_saved_fp_regs = caller_saved_regs && (mode == kSaveFPRegs);
745 CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
746 if (caller_saved_regs) flags |= CallDescriptor::kCallerSavedRegisters;
747 if (caller_saved_fp_regs) flags |= CallDescriptor::kCallerSavedFPRegisters;
748 if (no_function_descriptor) flags |= CallDescriptor::kNoFunctionDescriptor;
749 auto call_descriptor =
750 Linkage::GetSimplifiedCDescriptor(rasm->zone(), builder.Build(), flags);
751
752 base::SmallVector<Node*, kNumCArgs> nodes(args.size() + 1);
753 nodes[0] = function;
754 std::transform(
755 args.begin(), args.end(), std::next(nodes.begin()),
756 [](const RawMachineAssembler::CFunctionArg& arg) { return arg.second; });
757
758 auto common = rasm->common();
759 return rasm->AddNode(common->Call(call_descriptor),
760 static_cast<int>(nodes.size()), nodes.begin());
761 }
762
763 } // namespace
764
CallCFunction(Node * function,MachineType return_type,std::initializer_list<RawMachineAssembler::CFunctionArg> args)765 Node* RawMachineAssembler::CallCFunction(
766 Node* function, MachineType return_type,
767 std::initializer_list<RawMachineAssembler::CFunctionArg> args) {
768 return CallCFunctionImpl(this, function, return_type, args, false,
769 kDontSaveFPRegs, kHasFunctionDescriptor);
770 }
771
CallCFunctionWithoutFunctionDescriptor(Node * function,MachineType return_type,std::initializer_list<RawMachineAssembler::CFunctionArg> args)772 Node* RawMachineAssembler::CallCFunctionWithoutFunctionDescriptor(
773 Node* function, MachineType return_type,
774 std::initializer_list<RawMachineAssembler::CFunctionArg> args) {
775 return CallCFunctionImpl(this, function, return_type, args, false,
776 kDontSaveFPRegs, kNoFunctionDescriptor);
777 }
778
CallCFunctionWithCallerSavedRegisters(Node * function,MachineType return_type,SaveFPRegsMode mode,std::initializer_list<RawMachineAssembler::CFunctionArg> args)779 Node* RawMachineAssembler::CallCFunctionWithCallerSavedRegisters(
780 Node* function, MachineType return_type, SaveFPRegsMode mode,
781 std::initializer_list<RawMachineAssembler::CFunctionArg> args) {
782 return CallCFunctionImpl(this, function, return_type, args, true, mode,
783 kHasFunctionDescriptor);
784 }
785
Use(RawMachineLabel * label)786 BasicBlock* RawMachineAssembler::Use(RawMachineLabel* label) {
787 label->used_ = true;
788 return EnsureBlock(label);
789 }
790
EnsureBlock(RawMachineLabel * label)791 BasicBlock* RawMachineAssembler::EnsureBlock(RawMachineLabel* label) {
792 if (label->block_ == nullptr) {
793 label->block_ = schedule()->NewBasicBlock();
794 }
795 return label->block_;
796 }
797
Bind(RawMachineLabel * label)798 void RawMachineAssembler::Bind(RawMachineLabel* label) {
799 DCHECK_NULL(current_block_);
800 DCHECK(!label->bound_);
801 label->bound_ = true;
802 current_block_ = EnsureBlock(label);
803 current_block_->set_deferred(label->deferred_);
804 }
805
806 #if DEBUG
Bind(RawMachineLabel * label,AssemblerDebugInfo info)807 void RawMachineAssembler::Bind(RawMachineLabel* label,
808 AssemblerDebugInfo info) {
809 if (current_block_ != nullptr) {
810 std::stringstream str;
811 str << "Binding label without closing previous block:"
812 << "\n# label: " << info
813 << "\n# previous block: " << *current_block_;
814 FATAL("%s", str.str().c_str());
815 }
816 Bind(label);
817 current_block_->set_debug_info(info);
818 }
819
PrintCurrentBlock(std::ostream & os)820 void RawMachineAssembler::PrintCurrentBlock(std::ostream& os) {
821 os << CurrentBlock();
822 }
823
SetInitialDebugInformation(AssemblerDebugInfo debug_info)824 void RawMachineAssembler::SetInitialDebugInformation(
825 AssemblerDebugInfo debug_info) {
826 CurrentBlock()->set_debug_info(debug_info);
827 }
828 #endif // DEBUG
829
InsideBlock()830 bool RawMachineAssembler::InsideBlock() { return current_block_ != nullptr; }
831
CurrentBlock()832 BasicBlock* RawMachineAssembler::CurrentBlock() {
833 DCHECK(current_block_);
834 return current_block_;
835 }
836
Phi(MachineRepresentation rep,int input_count,Node * const * inputs)837 Node* RawMachineAssembler::Phi(MachineRepresentation rep, int input_count,
838 Node* const* inputs) {
839 Node** buffer = zone()->NewArray<Node*>(input_count + 1);
840 std::copy(inputs, inputs + input_count, buffer);
841 buffer[input_count] = graph()->start();
842 return AddNode(common()->Phi(rep, input_count), input_count + 1, buffer);
843 }
844
AppendPhiInput(Node * phi,Node * new_input)845 void RawMachineAssembler::AppendPhiInput(Node* phi, Node* new_input) {
846 const Operator* op = phi->op();
847 const Operator* new_op = common()->ResizeMergeOrPhi(op, phi->InputCount());
848 phi->InsertInput(zone(), phi->InputCount() - 1, new_input);
849 NodeProperties::ChangeOp(phi, new_op);
850 }
851
AddNode(const Operator * op,int input_count,Node * const * inputs)852 Node* RawMachineAssembler::AddNode(const Operator* op, int input_count,
853 Node* const* inputs) {
854 DCHECK_NOT_NULL(schedule_);
855 DCHECK_NOT_NULL(current_block_);
856 Node* node = MakeNode(op, input_count, inputs);
857 schedule()->AddNode(CurrentBlock(), node);
858 return node;
859 }
860
MakeNode(const Operator * op,int input_count,Node * const * inputs)861 Node* RawMachineAssembler::MakeNode(const Operator* op, int input_count,
862 Node* const* inputs) {
863 // The raw machine assembler nodes do not have effect and control inputs,
864 // so we disable checking input counts here.
865 return graph()->NewNodeUnchecked(op, input_count, inputs);
866 }
867
~RawMachineLabel()868 RawMachineLabel::~RawMachineLabel() {
869 #if DEBUG
870 if (bound_ == used_) return;
871 std::stringstream str;
872 if (bound_) {
873 str << "A label has been bound but it's not used."
874 << "\n# label: " << *block_;
875 } else {
876 str << "A label has been used but it's not bound.";
877 }
878 FATAL("%s", str.str().c_str());
879 #endif // DEBUG
880 }
881
882 } // namespace compiler
883 } // namespace internal
884 } // namespace v8
885