1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/backend/instruction.h"
6
7 #include <cstddef>
8 #include <iomanip>
9
10 #include "src/codegen/interface-descriptors.h"
11 #include "src/codegen/register-configuration.h"
12 #include "src/codegen/source-position.h"
13 #include "src/compiler/common-operator.h"
14 #include "src/compiler/graph.h"
15 #include "src/compiler/node.h"
16 #include "src/compiler/schedule.h"
17 #include "src/execution/frames.h"
18 #include "src/utils/ostreams.h"
19
20 namespace v8 {
21 namespace internal {
22 namespace compiler {
23
24 const RegisterConfiguration* (*GetRegConfig)() = RegisterConfiguration::Default;
25
CommuteFlagsCondition(FlagsCondition condition)26 FlagsCondition CommuteFlagsCondition(FlagsCondition condition) {
27 switch (condition) {
28 case kSignedLessThan:
29 return kSignedGreaterThan;
30 case kSignedGreaterThanOrEqual:
31 return kSignedLessThanOrEqual;
32 case kSignedLessThanOrEqual:
33 return kSignedGreaterThanOrEqual;
34 case kSignedGreaterThan:
35 return kSignedLessThan;
36 case kUnsignedLessThan:
37 return kUnsignedGreaterThan;
38 case kUnsignedGreaterThanOrEqual:
39 return kUnsignedLessThanOrEqual;
40 case kUnsignedLessThanOrEqual:
41 return kUnsignedGreaterThanOrEqual;
42 case kUnsignedGreaterThan:
43 return kUnsignedLessThan;
44 case kFloatLessThanOrUnordered:
45 return kFloatGreaterThanOrUnordered;
46 case kFloatGreaterThanOrEqual:
47 return kFloatLessThanOrEqual;
48 case kFloatLessThanOrEqual:
49 return kFloatGreaterThanOrEqual;
50 case kFloatGreaterThanOrUnordered:
51 return kFloatLessThanOrUnordered;
52 case kFloatLessThan:
53 return kFloatGreaterThan;
54 case kFloatGreaterThanOrEqualOrUnordered:
55 return kFloatLessThanOrEqualOrUnordered;
56 case kFloatLessThanOrEqualOrUnordered:
57 return kFloatGreaterThanOrEqualOrUnordered;
58 case kFloatGreaterThan:
59 return kFloatLessThan;
60 case kPositiveOrZero:
61 case kNegative:
62 UNREACHABLE();
63 case kEqual:
64 case kNotEqual:
65 case kOverflow:
66 case kNotOverflow:
67 case kUnorderedEqual:
68 case kUnorderedNotEqual:
69 return condition;
70 }
71 UNREACHABLE();
72 }
73
InterferesWith(const InstructionOperand & other) const74 bool InstructionOperand::InterferesWith(const InstructionOperand& other) const {
75 if (kSimpleFPAliasing || !this->IsFPLocationOperand() ||
76 !other.IsFPLocationOperand())
77 return EqualsCanonicalized(other);
78 // Aliasing is complex and both operands are fp locations.
79 const LocationOperand& loc = *LocationOperand::cast(this);
80 const LocationOperand& other_loc = LocationOperand::cast(other);
81 LocationOperand::LocationKind kind = loc.location_kind();
82 LocationOperand::LocationKind other_kind = other_loc.location_kind();
83 if (kind != other_kind) return false;
84 MachineRepresentation rep = loc.representation();
85 MachineRepresentation other_rep = other_loc.representation();
86 if (rep == other_rep) return EqualsCanonicalized(other);
87 if (kind == LocationOperand::REGISTER) {
88 // FP register-register interference.
89 return GetRegConfig()->AreAliases(rep, loc.register_code(), other_rep,
90 other_loc.register_code());
91 } else {
92 // FP slot-slot interference. Slots of different FP reps can alias because
93 // the gap resolver may break a move into 2 or 4 equivalent smaller moves.
94 DCHECK_EQ(LocationOperand::STACK_SLOT, kind);
95 int index_hi = loc.index();
96 int index_lo =
97 index_hi - (1 << ElementSizeLog2Of(rep)) / kSystemPointerSize + 1;
98 int other_index_hi = other_loc.index();
99 int other_index_lo =
100 other_index_hi -
101 (1 << ElementSizeLog2Of(other_rep)) / kSystemPointerSize + 1;
102 return other_index_hi >= index_lo && index_hi >= other_index_lo;
103 }
104 return false;
105 }
106
IsCompatible(LocationOperand * op)107 bool LocationOperand::IsCompatible(LocationOperand* op) {
108 if (IsRegister() || IsStackSlot()) {
109 return op->IsRegister() || op->IsStackSlot();
110 } else if (kSimpleFPAliasing) {
111 // A backend may choose to generate the same instruction sequence regardless
112 // of the FP representation. As a result, we can relax the compatibility and
113 // allow a Double to be moved in a Float for example. However, this is only
114 // allowed if registers do not overlap.
115 return (IsFPRegister() || IsFPStackSlot()) &&
116 (op->IsFPRegister() || op->IsFPStackSlot());
117 } else if (IsFloatRegister() || IsFloatStackSlot()) {
118 return op->IsFloatRegister() || op->IsFloatStackSlot();
119 } else if (IsDoubleRegister() || IsDoubleStackSlot()) {
120 return op->IsDoubleRegister() || op->IsDoubleStackSlot();
121 } else {
122 return (IsSimd128Register() || IsSimd128StackSlot()) &&
123 (op->IsSimd128Register() || op->IsSimd128StackSlot());
124 }
125 }
126
Print() const127 void InstructionOperand::Print() const { StdoutStream{} << *this << std::endl; }
128
operator <<(std::ostream & os,const InstructionOperand & op)129 std::ostream& operator<<(std::ostream& os, const InstructionOperand& op) {
130 switch (op.kind()) {
131 case InstructionOperand::UNALLOCATED: {
132 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(&op);
133 os << "v" << unalloc->virtual_register();
134 if (unalloc->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
135 return os << "(=" << unalloc->fixed_slot_index() << "S)";
136 }
137 switch (unalloc->extended_policy()) {
138 case UnallocatedOperand::NONE:
139 return os;
140 case UnallocatedOperand::FIXED_REGISTER:
141 return os << "(="
142 << Register::from_code(unalloc->fixed_register_index())
143 << ")";
144 case UnallocatedOperand::FIXED_FP_REGISTER:
145 return os << "(="
146 << DoubleRegister::from_code(
147 unalloc->fixed_register_index())
148 << ")";
149 case UnallocatedOperand::MUST_HAVE_REGISTER:
150 return os << "(R)";
151 case UnallocatedOperand::MUST_HAVE_SLOT:
152 return os << "(S)";
153 case UnallocatedOperand::SAME_AS_FIRST_INPUT:
154 return os << "(1)";
155 case UnallocatedOperand::REGISTER_OR_SLOT:
156 return os << "(-)";
157 case UnallocatedOperand::REGISTER_OR_SLOT_OR_CONSTANT:
158 return os << "(*)";
159 }
160 }
161 case InstructionOperand::CONSTANT:
162 return os << "[constant:" << ConstantOperand::cast(op).virtual_register()
163 << "]";
164 case InstructionOperand::IMMEDIATE: {
165 ImmediateOperand imm = ImmediateOperand::cast(op);
166 switch (imm.type()) {
167 case ImmediateOperand::INLINE:
168 return os << "#" << imm.inline_value();
169 case ImmediateOperand::INDEXED:
170 return os << "[immediate:" << imm.indexed_value() << "]";
171 }
172 }
173 case InstructionOperand::PENDING:
174 return os << "[pending: " << PendingOperand::cast(op).next() << "]";
175 case InstructionOperand::ALLOCATED: {
176 LocationOperand allocated = LocationOperand::cast(op);
177 if (op.IsStackSlot()) {
178 os << "[stack:" << allocated.index();
179 } else if (op.IsFPStackSlot()) {
180 os << "[fp_stack:" << allocated.index();
181 } else if (op.IsRegister()) {
182 const char* name =
183 allocated.register_code() < Register::kNumRegisters
184 ? RegisterName(Register::from_code(allocated.register_code()))
185 : Register::GetSpecialRegisterName(allocated.register_code());
186 os << "[" << name << "|R";
187 } else if (op.IsDoubleRegister()) {
188 os << "[" << DoubleRegister::from_code(allocated.register_code())
189 << "|R";
190 } else if (op.IsFloatRegister()) {
191 os << "[" << FloatRegister::from_code(allocated.register_code())
192 << "|R";
193 } else {
194 DCHECK(op.IsSimd128Register());
195 os << "[" << Simd128Register::from_code(allocated.register_code())
196 << "|R";
197 }
198 switch (allocated.representation()) {
199 case MachineRepresentation::kNone:
200 os << "|-";
201 break;
202 case MachineRepresentation::kBit:
203 os << "|b";
204 break;
205 case MachineRepresentation::kWord8:
206 os << "|w8";
207 break;
208 case MachineRepresentation::kWord16:
209 os << "|w16";
210 break;
211 case MachineRepresentation::kWord32:
212 os << "|w32";
213 break;
214 case MachineRepresentation::kWord64:
215 os << "|w64";
216 break;
217 case MachineRepresentation::kFloat32:
218 os << "|f32";
219 break;
220 case MachineRepresentation::kFloat64:
221 os << "|f64";
222 break;
223 case MachineRepresentation::kSimd128:
224 os << "|s128";
225 break;
226 case MachineRepresentation::kTaggedSigned:
227 os << "|ts";
228 break;
229 case MachineRepresentation::kTaggedPointer:
230 os << "|tp";
231 break;
232 case MachineRepresentation::kTagged:
233 os << "|t";
234 break;
235 case MachineRepresentation::kCompressedPointer:
236 os << "|cp";
237 break;
238 case MachineRepresentation::kCompressed:
239 os << "|c";
240 break;
241 }
242 return os << "]";
243 }
244 case InstructionOperand::INVALID:
245 return os << "(x)";
246 }
247 UNREACHABLE();
248 }
249
Print() const250 void MoveOperands::Print() const {
251 StdoutStream{} << destination() << " = " << source() << std::endl;
252 }
253
operator <<(std::ostream & os,const MoveOperands & mo)254 std::ostream& operator<<(std::ostream& os, const MoveOperands& mo) {
255 os << mo.destination();
256 if (!mo.source().Equals(mo.destination())) {
257 os << " = " << mo.source();
258 }
259 return os << ";";
260 }
261
IsRedundant() const262 bool ParallelMove::IsRedundant() const {
263 for (MoveOperands* move : *this) {
264 if (!move->IsRedundant()) return false;
265 }
266 return true;
267 }
268
PrepareInsertAfter(MoveOperands * move,ZoneVector<MoveOperands * > * to_eliminate) const269 void ParallelMove::PrepareInsertAfter(
270 MoveOperands* move, ZoneVector<MoveOperands*>* to_eliminate) const {
271 bool no_aliasing =
272 kSimpleFPAliasing || !move->destination().IsFPLocationOperand();
273 MoveOperands* replacement = nullptr;
274 MoveOperands* eliminated = nullptr;
275 for (MoveOperands* curr : *this) {
276 if (curr->IsEliminated()) continue;
277 if (curr->destination().EqualsCanonicalized(move->source())) {
278 // We must replace move's source with curr's destination in order to
279 // insert it into this ParallelMove.
280 DCHECK(!replacement);
281 replacement = curr;
282 if (no_aliasing && eliminated != nullptr) break;
283 } else if (curr->destination().InterferesWith(move->destination())) {
284 // We can eliminate curr, since move overwrites at least a part of its
285 // destination, implying its value is no longer live.
286 eliminated = curr;
287 to_eliminate->push_back(curr);
288 if (no_aliasing && replacement != nullptr) break;
289 }
290 }
291 if (replacement != nullptr) move->set_source(replacement->source());
292 }
293
Instruction(InstructionCode opcode)294 Instruction::Instruction(InstructionCode opcode)
295 : opcode_(opcode),
296 bit_field_(OutputCountField::encode(0) | InputCountField::encode(0) |
297 TempCountField::encode(0) | IsCallField::encode(false)),
298 reference_map_(nullptr),
299 block_(nullptr) {
300 parallel_moves_[0] = nullptr;
301 parallel_moves_[1] = nullptr;
302
303 // PendingOperands are required to be 8 byte aligned.
304 STATIC_ASSERT(offsetof(Instruction, operands_) % 8 == 0);
305 }
306
Instruction(InstructionCode opcode,size_t output_count,InstructionOperand * outputs,size_t input_count,InstructionOperand * inputs,size_t temp_count,InstructionOperand * temps)307 Instruction::Instruction(InstructionCode opcode, size_t output_count,
308 InstructionOperand* outputs, size_t input_count,
309 InstructionOperand* inputs, size_t temp_count,
310 InstructionOperand* temps)
311 : opcode_(opcode),
312 bit_field_(OutputCountField::encode(output_count) |
313 InputCountField::encode(input_count) |
314 TempCountField::encode(temp_count) |
315 IsCallField::encode(false)),
316 reference_map_(nullptr),
317 block_(nullptr) {
318 parallel_moves_[0] = nullptr;
319 parallel_moves_[1] = nullptr;
320 size_t offset = 0;
321 for (size_t i = 0; i < output_count; ++i) {
322 DCHECK(!outputs[i].IsInvalid());
323 operands_[offset++] = outputs[i];
324 }
325 for (size_t i = 0; i < input_count; ++i) {
326 DCHECK(!inputs[i].IsInvalid());
327 operands_[offset++] = inputs[i];
328 }
329 for (size_t i = 0; i < temp_count; ++i) {
330 DCHECK(!temps[i].IsInvalid());
331 operands_[offset++] = temps[i];
332 }
333 }
334
AreMovesRedundant() const335 bool Instruction::AreMovesRedundant() const {
336 for (int i = Instruction::FIRST_GAP_POSITION;
337 i <= Instruction::LAST_GAP_POSITION; i++) {
338 if (parallel_moves_[i] != nullptr && !parallel_moves_[i]->IsRedundant()) {
339 return false;
340 }
341 }
342 return true;
343 }
344
Print() const345 void Instruction::Print() const { StdoutStream{} << *this << std::endl; }
346
operator <<(std::ostream & os,const ParallelMove & pm)347 std::ostream& operator<<(std::ostream& os, const ParallelMove& pm) {
348 const char* space = "";
349 for (MoveOperands* move : pm) {
350 if (move->IsEliminated()) continue;
351 os << space << *move;
352 space = " ";
353 }
354 return os;
355 }
356
RecordReference(const AllocatedOperand & op)357 void ReferenceMap::RecordReference(const AllocatedOperand& op) {
358 // Do not record arguments as pointers.
359 if (op.IsStackSlot() && LocationOperand::cast(op).index() < 0) return;
360 DCHECK(!op.IsFPRegister() && !op.IsFPStackSlot());
361 reference_operands_.push_back(op);
362 }
363
operator <<(std::ostream & os,const ReferenceMap & pm)364 std::ostream& operator<<(std::ostream& os, const ReferenceMap& pm) {
365 os << "{";
366 const char* separator = "";
367 for (const InstructionOperand& op : pm.reference_operands_) {
368 os << separator << op;
369 separator = ";";
370 }
371 return os << "}";
372 }
373
operator <<(std::ostream & os,const ArchOpcode & ao)374 std::ostream& operator<<(std::ostream& os, const ArchOpcode& ao) {
375 switch (ao) {
376 #define CASE(Name) \
377 case k##Name: \
378 return os << #Name;
379 ARCH_OPCODE_LIST(CASE)
380 #undef CASE
381 }
382 UNREACHABLE();
383 }
384
operator <<(std::ostream & os,const AddressingMode & am)385 std::ostream& operator<<(std::ostream& os, const AddressingMode& am) {
386 switch (am) {
387 case kMode_None:
388 return os;
389 #define CASE(Name) \
390 case kMode_##Name: \
391 return os << #Name;
392 TARGET_ADDRESSING_MODE_LIST(CASE)
393 #undef CASE
394 }
395 UNREACHABLE();
396 }
397
operator <<(std::ostream & os,const FlagsMode & fm)398 std::ostream& operator<<(std::ostream& os, const FlagsMode& fm) {
399 switch (fm) {
400 case kFlags_none:
401 return os;
402 case kFlags_branch:
403 return os << "branch";
404 case kFlags_branch_and_poison:
405 return os << "branch_and_poison";
406 case kFlags_deoptimize:
407 return os << "deoptimize";
408 case kFlags_deoptimize_and_poison:
409 return os << "deoptimize_and_poison";
410 case kFlags_set:
411 return os << "set";
412 case kFlags_trap:
413 return os << "trap";
414 }
415 UNREACHABLE();
416 }
417
operator <<(std::ostream & os,const FlagsCondition & fc)418 std::ostream& operator<<(std::ostream& os, const FlagsCondition& fc) {
419 switch (fc) {
420 case kEqual:
421 return os << "equal";
422 case kNotEqual:
423 return os << "not equal";
424 case kSignedLessThan:
425 return os << "signed less than";
426 case kSignedGreaterThanOrEqual:
427 return os << "signed greater than or equal";
428 case kSignedLessThanOrEqual:
429 return os << "signed less than or equal";
430 case kSignedGreaterThan:
431 return os << "signed greater than";
432 case kUnsignedLessThan:
433 return os << "unsigned less than";
434 case kUnsignedGreaterThanOrEqual:
435 return os << "unsigned greater than or equal";
436 case kUnsignedLessThanOrEqual:
437 return os << "unsigned less than or equal";
438 case kUnsignedGreaterThan:
439 return os << "unsigned greater than";
440 case kFloatLessThanOrUnordered:
441 return os << "less than or unordered (FP)";
442 case kFloatGreaterThanOrEqual:
443 return os << "greater than or equal (FP)";
444 case kFloatLessThanOrEqual:
445 return os << "less than or equal (FP)";
446 case kFloatGreaterThanOrUnordered:
447 return os << "greater than or unordered (FP)";
448 case kFloatLessThan:
449 return os << "less than (FP)";
450 case kFloatGreaterThanOrEqualOrUnordered:
451 return os << "greater than, equal or unordered (FP)";
452 case kFloatLessThanOrEqualOrUnordered:
453 return os << "less than, equal or unordered (FP)";
454 case kFloatGreaterThan:
455 return os << "greater than (FP)";
456 case kUnorderedEqual:
457 return os << "unordered equal";
458 case kUnorderedNotEqual:
459 return os << "unordered not equal";
460 case kOverflow:
461 return os << "overflow";
462 case kNotOverflow:
463 return os << "not overflow";
464 case kPositiveOrZero:
465 return os << "positive or zero";
466 case kNegative:
467 return os << "negative";
468 }
469 UNREACHABLE();
470 }
471
operator <<(std::ostream & os,const Instruction & instr)472 std::ostream& operator<<(std::ostream& os, const Instruction& instr) {
473 os << "gap ";
474 for (int i = Instruction::FIRST_GAP_POSITION;
475 i <= Instruction::LAST_GAP_POSITION; i++) {
476 os << "(";
477 if (instr.parallel_moves()[i] != nullptr) {
478 os << *instr.parallel_moves()[i];
479 }
480 os << ") ";
481 }
482 os << "\n ";
483
484 if (instr.OutputCount() == 1) {
485 os << *instr.OutputAt(0) << " = ";
486 } else if (instr.OutputCount() > 1) {
487 os << "(" << *instr.OutputAt(0);
488 for (size_t i = 1; i < instr.OutputCount(); i++) {
489 os << ", " << *instr.OutputAt(i);
490 }
491 os << ") = ";
492 }
493
494 os << ArchOpcodeField::decode(instr.opcode());
495 AddressingMode am = AddressingModeField::decode(instr.opcode());
496 if (am != kMode_None) {
497 os << " : " << AddressingModeField::decode(instr.opcode());
498 }
499 FlagsMode fm = FlagsModeField::decode(instr.opcode());
500 if (fm != kFlags_none) {
501 os << " && " << fm << " if " << FlagsConditionField::decode(instr.opcode());
502 }
503 for (size_t i = 0; i < instr.InputCount(); i++) {
504 os << " " << *instr.InputAt(i);
505 }
506 return os;
507 }
508
Constant(int32_t v)509 Constant::Constant(int32_t v) : type_(kInt32), value_(v) {}
510
Constant(RelocatablePtrConstantInfo info)511 Constant::Constant(RelocatablePtrConstantInfo info) {
512 if (info.type() == RelocatablePtrConstantInfo::kInt32) {
513 type_ = kInt32;
514 } else if (info.type() == RelocatablePtrConstantInfo::kInt64) {
515 type_ = kInt64;
516 } else {
517 UNREACHABLE();
518 }
519 value_ = info.value();
520 rmode_ = info.rmode();
521 }
522
ToHeapObject() const523 Handle<HeapObject> Constant::ToHeapObject() const {
524 DCHECK(kHeapObject == type() || kCompressedHeapObject == type());
525 Handle<HeapObject> value(
526 reinterpret_cast<Address*>(static_cast<intptr_t>(value_)));
527 return value;
528 }
529
ToCode() const530 Handle<Code> Constant::ToCode() const {
531 DCHECK_EQ(kHeapObject, type());
532 Handle<Code> value(reinterpret_cast<Address*>(static_cast<intptr_t>(value_)));
533 return value;
534 }
535
ToDelayedStringConstant() const536 const StringConstantBase* Constant::ToDelayedStringConstant() const {
537 DCHECK_EQ(kDelayedStringConstant, type());
538 const StringConstantBase* value =
539 bit_cast<StringConstantBase*>(static_cast<intptr_t>(value_));
540 return value;
541 }
542
operator <<(std::ostream & os,const Constant & constant)543 std::ostream& operator<<(std::ostream& os, const Constant& constant) {
544 switch (constant.type()) {
545 case Constant::kInt32:
546 return os << constant.ToInt32();
547 case Constant::kInt64:
548 return os << constant.ToInt64() << "l";
549 case Constant::kFloat32:
550 return os << constant.ToFloat32() << "f";
551 case Constant::kFloat64:
552 return os << constant.ToFloat64().value();
553 case Constant::kExternalReference:
554 return os << constant.ToExternalReference().address();
555 case Constant::kHeapObject: // Fall through.
556 case Constant::kCompressedHeapObject:
557 return os << Brief(*constant.ToHeapObject());
558 case Constant::kRpoNumber:
559 return os << "RPO" << constant.ToRpoNumber().ToInt();
560 case Constant::kDelayedStringConstant:
561 return os << "DelayedStringConstant: "
562 << constant.ToDelayedStringConstant();
563 }
564 UNREACHABLE();
565 }
566
PhiInstruction(Zone * zone,int virtual_register,size_t input_count)567 PhiInstruction::PhiInstruction(Zone* zone, int virtual_register,
568 size_t input_count)
569 : virtual_register_(virtual_register),
570 output_(UnallocatedOperand(UnallocatedOperand::NONE, virtual_register)),
571 operands_(input_count, InstructionOperand::kInvalidVirtualRegister,
572 zone) {}
573
SetInput(size_t offset,int virtual_register)574 void PhiInstruction::SetInput(size_t offset, int virtual_register) {
575 DCHECK_EQ(InstructionOperand::kInvalidVirtualRegister, operands_[offset]);
576 operands_[offset] = virtual_register;
577 }
578
RenameInput(size_t offset,int virtual_register)579 void PhiInstruction::RenameInput(size_t offset, int virtual_register) {
580 DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, operands_[offset]);
581 operands_[offset] = virtual_register;
582 }
583
InstructionBlock(Zone * zone,RpoNumber rpo_number,RpoNumber loop_header,RpoNumber loop_end,RpoNumber dominator,bool deferred,bool handler)584 InstructionBlock::InstructionBlock(Zone* zone, RpoNumber rpo_number,
585 RpoNumber loop_header, RpoNumber loop_end,
586 RpoNumber dominator, bool deferred,
587 bool handler)
588 : successors_(zone),
589 predecessors_(zone),
590 phis_(zone),
591 ao_number_(RpoNumber::Invalid()),
592 rpo_number_(rpo_number),
593 loop_header_(loop_header),
594 loop_end_(loop_end),
595 dominator_(dominator),
596 deferred_(deferred),
597 handler_(handler) {}
598
PredecessorIndexOf(RpoNumber rpo_number) const599 size_t InstructionBlock::PredecessorIndexOf(RpoNumber rpo_number) const {
600 size_t j = 0;
601 for (InstructionBlock::Predecessors::const_iterator i = predecessors_.begin();
602 i != predecessors_.end(); ++i, ++j) {
603 if (*i == rpo_number) break;
604 }
605 return j;
606 }
607
GetRpo(const BasicBlock * block)608 static RpoNumber GetRpo(const BasicBlock* block) {
609 if (block == nullptr) return RpoNumber::Invalid();
610 return RpoNumber::FromInt(block->rpo_number());
611 }
612
GetLoopEndRpo(const BasicBlock * block)613 static RpoNumber GetLoopEndRpo(const BasicBlock* block) {
614 if (!block->IsLoopHeader()) return RpoNumber::Invalid();
615 return RpoNumber::FromInt(block->loop_end()->rpo_number());
616 }
617
InstructionBlockFor(Zone * zone,const BasicBlock * block)618 static InstructionBlock* InstructionBlockFor(Zone* zone,
619 const BasicBlock* block) {
620 bool is_handler =
621 !block->empty() && block->front()->opcode() == IrOpcode::kIfException;
622 InstructionBlock* instr_block = zone->New<InstructionBlock>(
623 zone, GetRpo(block), GetRpo(block->loop_header()), GetLoopEndRpo(block),
624 GetRpo(block->dominator()), block->deferred(), is_handler);
625 // Map successors and precessors
626 instr_block->successors().reserve(block->SuccessorCount());
627 for (BasicBlock* successor : block->successors()) {
628 instr_block->successors().push_back(GetRpo(successor));
629 }
630 instr_block->predecessors().reserve(block->PredecessorCount());
631 for (BasicBlock* predecessor : block->predecessors()) {
632 instr_block->predecessors().push_back(GetRpo(predecessor));
633 }
634 if (block->PredecessorCount() == 1 &&
635 block->predecessors()[0]->control() == BasicBlock::Control::kSwitch) {
636 instr_block->set_switch_target(true);
637 }
638 return instr_block;
639 }
640
operator <<(std::ostream & os,const PrintableInstructionBlock & printable_block)641 std::ostream& operator<<(std::ostream& os,
642 const PrintableInstructionBlock& printable_block) {
643 const InstructionBlock* block = printable_block.block_;
644 const InstructionSequence* code = printable_block.code_;
645
646 os << "B" << block->rpo_number();
647 if (block->ao_number().IsValid()) {
648 os << ": AO#" << block->ao_number();
649 } else {
650 os << ": AO#?";
651 }
652 if (block->IsDeferred()) os << " (deferred)";
653 if (!block->needs_frame()) os << " (no frame)";
654 if (block->must_construct_frame()) os << " (construct frame)";
655 if (block->must_deconstruct_frame()) os << " (deconstruct frame)";
656 if (block->IsLoopHeader()) {
657 os << " loop blocks: [" << block->rpo_number() << ", " << block->loop_end()
658 << ")";
659 }
660 os << " instructions: [" << block->code_start() << ", " << block->code_end()
661 << ")" << std::endl
662 << " predecessors:";
663
664 for (RpoNumber pred : block->predecessors()) {
665 os << " B" << pred.ToInt();
666 }
667 os << std::endl;
668
669 for (const PhiInstruction* phi : block->phis()) {
670 os << " phi: " << phi->output() << " =";
671 for (int input : phi->operands()) {
672 os << " v" << input;
673 }
674 os << std::endl;
675 }
676
677 for (int j = block->first_instruction_index();
678 j <= block->last_instruction_index(); j++) {
679 os << " " << std::setw(5) << j << ": " << *code->InstructionAt(j)
680 << std::endl;
681 }
682
683 os << " successors:";
684 for (RpoNumber succ : block->successors()) {
685 os << " B" << succ.ToInt();
686 }
687 os << std::endl;
688 return os;
689 }
690
InstructionBlocksFor(Zone * zone,const Schedule * schedule)691 InstructionBlocks* InstructionSequence::InstructionBlocksFor(
692 Zone* zone, const Schedule* schedule) {
693 InstructionBlocks* blocks = zone->NewArray<InstructionBlocks>(1);
694 new (blocks) InstructionBlocks(
695 static_cast<int>(schedule->rpo_order()->size()), nullptr, zone);
696 size_t rpo_number = 0;
697 for (BasicBlockVector::const_iterator it = schedule->rpo_order()->begin();
698 it != schedule->rpo_order()->end(); ++it, ++rpo_number) {
699 DCHECK(!(*blocks)[rpo_number]);
700 DCHECK(GetRpo(*it).ToSize() == rpo_number);
701 (*blocks)[rpo_number] = InstructionBlockFor(zone, *it);
702 }
703 return blocks;
704 }
705
ValidateEdgeSplitForm() const706 void InstructionSequence::ValidateEdgeSplitForm() const {
707 // Validate blocks are in edge-split form: no block with multiple successors
708 // has an edge to a block (== a successor) with more than one predecessors.
709 for (const InstructionBlock* block : instruction_blocks()) {
710 if (block->SuccessorCount() > 1) {
711 for (const RpoNumber& successor_id : block->successors()) {
712 const InstructionBlock* successor = InstructionBlockAt(successor_id);
713 // Expect precisely one predecessor: "block".
714 CHECK(successor->PredecessorCount() == 1 &&
715 successor->predecessors()[0] == block->rpo_number());
716 }
717 }
718 }
719 }
720
ValidateDeferredBlockExitPaths() const721 void InstructionSequence::ValidateDeferredBlockExitPaths() const {
722 // A deferred block with more than one successor must have all its successors
723 // deferred.
724 for (const InstructionBlock* block : instruction_blocks()) {
725 if (!block->IsDeferred() || block->SuccessorCount() <= 1) continue;
726 for (RpoNumber successor_id : block->successors()) {
727 CHECK(InstructionBlockAt(successor_id)->IsDeferred());
728 }
729 }
730 }
731
ValidateDeferredBlockEntryPaths() const732 void InstructionSequence::ValidateDeferredBlockEntryPaths() const {
733 // If a deferred block has multiple predecessors, they have to
734 // all be deferred. Otherwise, we can run into a situation where a range
735 // that spills only in deferred blocks inserts its spill in the block, but
736 // other ranges need moves inserted by ResolveControlFlow in the predecessors,
737 // which may clobber the register of this range.
738 for (const InstructionBlock* block : instruction_blocks()) {
739 if (!block->IsDeferred() || block->PredecessorCount() <= 1) continue;
740 for (RpoNumber predecessor_id : block->predecessors()) {
741 CHECK(InstructionBlockAt(predecessor_id)->IsDeferred());
742 }
743 }
744 }
745
ValidateSSA() const746 void InstructionSequence::ValidateSSA() const {
747 // TODO(mtrofin): We could use a local zone here instead.
748 BitVector definitions(VirtualRegisterCount(), zone());
749 for (const Instruction* instruction : *this) {
750 for (size_t i = 0; i < instruction->OutputCount(); ++i) {
751 const InstructionOperand* output = instruction->OutputAt(i);
752 int vreg = (output->IsConstant())
753 ? ConstantOperand::cast(output)->virtual_register()
754 : UnallocatedOperand::cast(output)->virtual_register();
755 CHECK(!definitions.Contains(vreg));
756 definitions.Add(vreg);
757 }
758 }
759 }
760
ComputeAssemblyOrder()761 void InstructionSequence::ComputeAssemblyOrder() {
762 int ao = 0;
763 RpoNumber invalid = RpoNumber::Invalid();
764
765 ao_blocks_ = zone()->NewArray<InstructionBlocks>(1);
766 new (ao_blocks_) InstructionBlocks(zone());
767 ao_blocks_->reserve(instruction_blocks_->size());
768
769 // Place non-deferred blocks.
770 for (InstructionBlock* const block : *instruction_blocks_) {
771 DCHECK_NOT_NULL(block);
772 if (block->IsDeferred()) continue; // skip deferred blocks.
773 if (block->ao_number() != invalid) continue; // loop rotated.
774 if (block->IsLoopHeader()) {
775 bool header_align = true;
776 if (FLAG_turbo_loop_rotation) {
777 // Perform loop rotation for non-deferred loops.
778 InstructionBlock* loop_end =
779 instruction_blocks_->at(block->loop_end().ToSize() - 1);
780 if (loop_end->SuccessorCount() == 1 && /* ends with goto */
781 loop_end != block /* not a degenerate infinite loop */) {
782 // If the last block has an unconditional jump back to the header,
783 // then move it to be in front of the header in the assembly order.
784 DCHECK_EQ(block->rpo_number(), loop_end->successors()[0]);
785 loop_end->set_ao_number(RpoNumber::FromInt(ao++));
786 ao_blocks_->push_back(loop_end);
787 // This block will be the new machine-level loop header, so align
788 // this block instead of the loop header block.
789 loop_end->set_alignment(true);
790 header_align = false;
791 }
792 }
793 block->set_alignment(header_align);
794 }
795 if (block->loop_header().IsValid() && block->IsSwitchTarget()) {
796 block->set_alignment(true);
797 }
798 block->set_ao_number(RpoNumber::FromInt(ao++));
799 ao_blocks_->push_back(block);
800 }
801 // Add all leftover (deferred) blocks.
802 for (InstructionBlock* const block : *instruction_blocks_) {
803 if (block->ao_number() == invalid) {
804 block->set_ao_number(RpoNumber::FromInt(ao++));
805 ao_blocks_->push_back(block);
806 }
807 }
808 DCHECK_EQ(instruction_blocks_->size(), ao);
809 }
810
RecomputeAssemblyOrderForTesting()811 void InstructionSequence::RecomputeAssemblyOrderForTesting() {
812 RpoNumber invalid = RpoNumber::Invalid();
813 for (InstructionBlock* block : *instruction_blocks_) {
814 block->set_ao_number(invalid);
815 }
816 ComputeAssemblyOrder();
817 }
818
InstructionSequence(Isolate * isolate,Zone * instruction_zone,InstructionBlocks * instruction_blocks)819 InstructionSequence::InstructionSequence(Isolate* isolate,
820 Zone* instruction_zone,
821 InstructionBlocks* instruction_blocks)
822 : isolate_(isolate),
823 zone_(instruction_zone),
824 instruction_blocks_(instruction_blocks),
825 ao_blocks_(nullptr),
826 source_positions_(zone()),
827 constants_(ConstantMap::key_compare(),
828 ConstantMap::allocator_type(zone())),
829 immediates_(zone()),
830 instructions_(zone()),
831 next_virtual_register_(0),
832 reference_maps_(zone()),
833 representations_(zone()),
834 representation_mask_(0),
835 deoptimization_entries_(zone()),
836 current_block_(nullptr) {
837 ComputeAssemblyOrder();
838 }
839
NextVirtualRegister()840 int InstructionSequence::NextVirtualRegister() {
841 int virtual_register = next_virtual_register_++;
842 CHECK_NE(virtual_register, InstructionOperand::kInvalidVirtualRegister);
843 return virtual_register;
844 }
845
GetBlockStart(RpoNumber rpo) const846 Instruction* InstructionSequence::GetBlockStart(RpoNumber rpo) const {
847 const InstructionBlock* block = InstructionBlockAt(rpo);
848 return InstructionAt(block->code_start());
849 }
850
StartBlock(RpoNumber rpo)851 void InstructionSequence::StartBlock(RpoNumber rpo) {
852 DCHECK_NULL(current_block_);
853 current_block_ = InstructionBlockAt(rpo);
854 int code_start = static_cast<int>(instructions_.size());
855 current_block_->set_code_start(code_start);
856 }
857
EndBlock(RpoNumber rpo)858 void InstructionSequence::EndBlock(RpoNumber rpo) {
859 int end = static_cast<int>(instructions_.size());
860 DCHECK_EQ(current_block_->rpo_number(), rpo);
861 CHECK(current_block_->code_start() >= 0 &&
862 current_block_->code_start() < end);
863 current_block_->set_code_end(end);
864 current_block_ = nullptr;
865 }
866
AddInstruction(Instruction * instr)867 int InstructionSequence::AddInstruction(Instruction* instr) {
868 DCHECK_NOT_NULL(current_block_);
869 int index = static_cast<int>(instructions_.size());
870 instr->set_block(current_block_);
871 instructions_.push_back(instr);
872 if (instr->NeedsReferenceMap()) {
873 DCHECK_NULL(instr->reference_map());
874 ReferenceMap* reference_map = zone()->New<ReferenceMap>(zone());
875 reference_map->set_instruction_position(index);
876 instr->set_reference_map(reference_map);
877 reference_maps_.push_back(reference_map);
878 }
879 return index;
880 }
881
GetInstructionBlock(int instruction_index) const882 InstructionBlock* InstructionSequence::GetInstructionBlock(
883 int instruction_index) const {
884 return instructions()[instruction_index]->block();
885 }
886
FilterRepresentation(MachineRepresentation rep)887 static MachineRepresentation FilterRepresentation(MachineRepresentation rep) {
888 switch (rep) {
889 case MachineRepresentation::kBit:
890 case MachineRepresentation::kWord8:
891 case MachineRepresentation::kWord16:
892 return InstructionSequence::DefaultRepresentation();
893 case MachineRepresentation::kWord32:
894 case MachineRepresentation::kWord64:
895 case MachineRepresentation::kTaggedSigned:
896 case MachineRepresentation::kTaggedPointer:
897 case MachineRepresentation::kTagged:
898 case MachineRepresentation::kFloat32:
899 case MachineRepresentation::kFloat64:
900 case MachineRepresentation::kSimd128:
901 case MachineRepresentation::kCompressedPointer:
902 case MachineRepresentation::kCompressed:
903 return rep;
904 case MachineRepresentation::kNone:
905 break;
906 }
907
908 UNREACHABLE();
909 }
910
GetRepresentation(int virtual_register) const911 MachineRepresentation InstructionSequence::GetRepresentation(
912 int virtual_register) const {
913 DCHECK_LE(0, virtual_register);
914 DCHECK_LT(virtual_register, VirtualRegisterCount());
915 if (virtual_register >= static_cast<int>(representations_.size())) {
916 return DefaultRepresentation();
917 }
918 return representations_[virtual_register];
919 }
920
MarkAsRepresentation(MachineRepresentation rep,int virtual_register)921 void InstructionSequence::MarkAsRepresentation(MachineRepresentation rep,
922 int virtual_register) {
923 DCHECK_LE(0, virtual_register);
924 DCHECK_LT(virtual_register, VirtualRegisterCount());
925 if (virtual_register >= static_cast<int>(representations_.size())) {
926 representations_.resize(VirtualRegisterCount(), DefaultRepresentation());
927 }
928 rep = FilterRepresentation(rep);
929 DCHECK_IMPLIES(representations_[virtual_register] != rep,
930 representations_[virtual_register] == DefaultRepresentation());
931 representations_[virtual_register] = rep;
932 representation_mask_ |= RepresentationBit(rep);
933 }
934
AddDeoptimizationEntry(FrameStateDescriptor * descriptor,DeoptimizeKind kind,DeoptimizeReason reason,FeedbackSource const & feedback)935 int InstructionSequence::AddDeoptimizationEntry(
936 FrameStateDescriptor* descriptor, DeoptimizeKind kind,
937 DeoptimizeReason reason, FeedbackSource const& feedback) {
938 int deoptimization_id = static_cast<int>(deoptimization_entries_.size());
939 deoptimization_entries_.push_back(
940 DeoptimizationEntry(descriptor, kind, reason, feedback));
941 return deoptimization_id;
942 }
943
GetDeoptimizationEntry(int state_id)944 DeoptimizationEntry const& InstructionSequence::GetDeoptimizationEntry(
945 int state_id) {
946 return deoptimization_entries_[state_id];
947 }
948
InputRpo(Instruction * instr,size_t index)949 RpoNumber InstructionSequence::InputRpo(Instruction* instr, size_t index) {
950 InstructionOperand* operand = instr->InputAt(index);
951 Constant constant =
952 operand->IsImmediate()
953 ? GetImmediate(ImmediateOperand::cast(operand))
954 : GetConstant(ConstantOperand::cast(operand)->virtual_register());
955 return constant.ToRpoNumber();
956 }
957
GetSourcePosition(const Instruction * instr,SourcePosition * result) const958 bool InstructionSequence::GetSourcePosition(const Instruction* instr,
959 SourcePosition* result) const {
960 auto it = source_positions_.find(instr);
961 if (it == source_positions_.end()) return false;
962 *result = it->second;
963 return true;
964 }
965
SetSourcePosition(const Instruction * instr,SourcePosition value)966 void InstructionSequence::SetSourcePosition(const Instruction* instr,
967 SourcePosition value) {
968 source_positions_.insert(std::make_pair(instr, value));
969 }
970
Print() const971 void InstructionSequence::Print() const {
972 StdoutStream{} << *this << std::endl;
973 }
974
PrintBlock(int block_id) const975 void InstructionSequence::PrintBlock(int block_id) const {
976 RpoNumber rpo = RpoNumber::FromInt(block_id);
977 const InstructionBlock* block = InstructionBlockAt(rpo);
978 CHECK(block->rpo_number() == rpo);
979 StdoutStream{} << PrintableInstructionBlock{block, this} << std::endl;
980 }
981
982 const RegisterConfiguration*
983 InstructionSequence::registerConfigurationForTesting_ = nullptr;
984
985 const RegisterConfiguration*
RegisterConfigurationForTesting()986 InstructionSequence::RegisterConfigurationForTesting() {
987 DCHECK_NOT_NULL(registerConfigurationForTesting_);
988 return registerConfigurationForTesting_;
989 }
990
SetRegisterConfigurationForTesting(const RegisterConfiguration * regConfig)991 void InstructionSequence::SetRegisterConfigurationForTesting(
992 const RegisterConfiguration* regConfig) {
993 registerConfigurationForTesting_ = regConfig;
994 GetRegConfig = InstructionSequence::RegisterConfigurationForTesting;
995 }
996
997 namespace {
998
GetConservativeFrameSizeInBytes(FrameStateType type,size_t parameters_count,size_t locals_count,BailoutId bailout_id)999 size_t GetConservativeFrameSizeInBytes(FrameStateType type,
1000 size_t parameters_count,
1001 size_t locals_count,
1002 BailoutId bailout_id) {
1003 switch (type) {
1004 case FrameStateType::kInterpretedFunction: {
1005 auto info = InterpretedFrameInfo::Conservative(
1006 static_cast<int>(parameters_count), static_cast<int>(locals_count));
1007 return info.frame_size_in_bytes();
1008 }
1009 case FrameStateType::kArgumentsAdaptor: {
1010 auto info = ArgumentsAdaptorFrameInfo::Conservative(
1011 static_cast<int>(parameters_count));
1012 return info.frame_size_in_bytes();
1013 }
1014 case FrameStateType::kConstructStub: {
1015 auto info = ConstructStubFrameInfo::Conservative(
1016 static_cast<int>(parameters_count));
1017 return info.frame_size_in_bytes();
1018 }
1019 case FrameStateType::kBuiltinContinuation:
1020 case FrameStateType::kJavaScriptBuiltinContinuation:
1021 case FrameStateType::kJavaScriptBuiltinContinuationWithCatch: {
1022 const RegisterConfiguration* config = RegisterConfiguration::Default();
1023 auto info = BuiltinContinuationFrameInfo::Conservative(
1024 static_cast<int>(parameters_count),
1025 Builtins::CallInterfaceDescriptorFor(
1026 Builtins::GetBuiltinFromBailoutId(bailout_id)),
1027 config);
1028 return info.frame_size_in_bytes();
1029 }
1030 }
1031 UNREACHABLE();
1032 }
1033
GetTotalConservativeFrameSizeInBytes(FrameStateType type,size_t parameters_count,size_t locals_count,BailoutId bailout_id,FrameStateDescriptor * outer_state)1034 size_t GetTotalConservativeFrameSizeInBytes(FrameStateType type,
1035 size_t parameters_count,
1036 size_t locals_count,
1037 BailoutId bailout_id,
1038 FrameStateDescriptor* outer_state) {
1039 size_t outer_total_conservative_frame_size_in_bytes =
1040 (outer_state == nullptr)
1041 ? 0
1042 : outer_state->total_conservative_frame_size_in_bytes();
1043 return GetConservativeFrameSizeInBytes(type, parameters_count, locals_count,
1044 bailout_id) +
1045 outer_total_conservative_frame_size_in_bytes;
1046 }
1047
1048 } // namespace
1049
FrameStateDescriptor(Zone * zone,FrameStateType type,BailoutId bailout_id,OutputFrameStateCombine state_combine,size_t parameters_count,size_t locals_count,size_t stack_count,MaybeHandle<SharedFunctionInfo> shared_info,FrameStateDescriptor * outer_state)1050 FrameStateDescriptor::FrameStateDescriptor(
1051 Zone* zone, FrameStateType type, BailoutId bailout_id,
1052 OutputFrameStateCombine state_combine, size_t parameters_count,
1053 size_t locals_count, size_t stack_count,
1054 MaybeHandle<SharedFunctionInfo> shared_info,
1055 FrameStateDescriptor* outer_state)
1056 : type_(type),
1057 bailout_id_(bailout_id),
1058 frame_state_combine_(state_combine),
1059 parameters_count_(parameters_count),
1060 locals_count_(locals_count),
1061 stack_count_(stack_count),
1062 total_conservative_frame_size_in_bytes_(
1063 GetTotalConservativeFrameSizeInBytes(
1064 type, parameters_count, locals_count, bailout_id, outer_state)),
1065 values_(zone),
1066 shared_info_(shared_info),
1067 outer_state_(outer_state) {}
1068
GetHeight() const1069 size_t FrameStateDescriptor::GetHeight() const {
1070 switch (type()) {
1071 case FrameStateType::kInterpretedFunction:
1072 return locals_count(); // The accumulator is *not* included.
1073 case FrameStateType::kBuiltinContinuation:
1074 // Custom, non-JS calling convention (that does not have a notion of
1075 // a receiver or context).
1076 return parameters_count();
1077 case FrameStateType::kArgumentsAdaptor:
1078 case FrameStateType::kConstructStub:
1079 case FrameStateType::kJavaScriptBuiltinContinuation:
1080 case FrameStateType::kJavaScriptBuiltinContinuationWithCatch:
1081 // JS linkage. The parameters count
1082 // - includes the receiver (input 1 in CreateArtificialFrameState, and
1083 // passed as part of stack parameters to
1084 // CreateJavaScriptBuiltinContinuationFrameState), and
1085 // - does *not* include the context.
1086 return parameters_count();
1087 }
1088 UNREACHABLE();
1089 }
1090
GetSize() const1091 size_t FrameStateDescriptor::GetSize() const {
1092 return 1 + parameters_count() + locals_count() + stack_count() +
1093 (HasContext() ? 1 : 0);
1094 }
1095
GetTotalSize() const1096 size_t FrameStateDescriptor::GetTotalSize() const {
1097 size_t total_size = 0;
1098 for (const FrameStateDescriptor* iter = this; iter != nullptr;
1099 iter = iter->outer_state_) {
1100 total_size += iter->GetSize();
1101 }
1102 return total_size;
1103 }
1104
GetFrameCount() const1105 size_t FrameStateDescriptor::GetFrameCount() const {
1106 size_t count = 0;
1107 for (const FrameStateDescriptor* iter = this; iter != nullptr;
1108 iter = iter->outer_state_) {
1109 ++count;
1110 }
1111 return count;
1112 }
1113
GetJSFrameCount() const1114 size_t FrameStateDescriptor::GetJSFrameCount() const {
1115 size_t count = 0;
1116 for (const FrameStateDescriptor* iter = this; iter != nullptr;
1117 iter = iter->outer_state_) {
1118 if (FrameStateFunctionInfo::IsJSFunctionType(iter->type_)) {
1119 ++count;
1120 }
1121 }
1122 return count;
1123 }
1124
operator <<(std::ostream & os,const RpoNumber & rpo)1125 std::ostream& operator<<(std::ostream& os, const RpoNumber& rpo) {
1126 return os << rpo.ToSize();
1127 }
1128
operator <<(std::ostream & os,const InstructionSequence & code)1129 std::ostream& operator<<(std::ostream& os, const InstructionSequence& code) {
1130 for (size_t i = 0; i < code.immediates_.size(); ++i) {
1131 Constant constant = code.immediates_[i];
1132 os << "IMM#" << i << ": " << constant << "\n";
1133 }
1134 int i = 0;
1135 for (ConstantMap::const_iterator it = code.constants_.begin();
1136 it != code.constants_.end(); ++i, ++it) {
1137 os << "CST#" << i << ": v" << it->first << " = " << it->second << "\n";
1138 }
1139 for (int i = 0; i < code.InstructionBlockCount(); i++) {
1140 auto* block = code.InstructionBlockAt(RpoNumber::FromInt(i));
1141 os << PrintableInstructionBlock{block, &code};
1142 }
1143 return os;
1144 }
1145
1146 } // namespace compiler
1147 } // namespace internal
1148 } // namespace v8
1149