• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/backend/instruction.h"
6 
7 #include <cstddef>
8 #include <iomanip>
9 
10 #include "src/codegen/aligned-slot-allocator.h"
11 #include "src/codegen/interface-descriptors.h"
12 #include "src/codegen/machine-type.h"
13 #include "src/codegen/register-configuration.h"
14 #include "src/codegen/source-position.h"
15 #include "src/compiler/common-operator.h"
16 #include "src/compiler/graph.h"
17 #include "src/compiler/node.h"
18 #include "src/compiler/schedule.h"
19 #include "src/deoptimizer/deoptimizer.h"
20 #include "src/execution/frames.h"
21 #include "src/execution/isolate-utils-inl.h"
22 #include "src/objects/instance-type-inl.h"
23 #include "src/utils/ostreams.h"
24 
25 #if V8_ENABLE_WEBASSEMBLY
26 #include "src/wasm/value-type.h"
27 #endif  // V8_ENABLE_WEBASSEMBLY
28 
29 namespace v8 {
30 namespace internal {
31 namespace compiler {
32 
33 const RegisterConfiguration* (*GetRegConfig)() = RegisterConfiguration::Default;
34 
CommuteFlagsCondition(FlagsCondition condition)35 FlagsCondition CommuteFlagsCondition(FlagsCondition condition) {
36   switch (condition) {
37     case kSignedLessThan:
38       return kSignedGreaterThan;
39     case kSignedGreaterThanOrEqual:
40       return kSignedLessThanOrEqual;
41     case kSignedLessThanOrEqual:
42       return kSignedGreaterThanOrEqual;
43     case kSignedGreaterThan:
44       return kSignedLessThan;
45     case kUnsignedLessThan:
46       return kUnsignedGreaterThan;
47     case kUnsignedGreaterThanOrEqual:
48       return kUnsignedLessThanOrEqual;
49     case kUnsignedLessThanOrEqual:
50       return kUnsignedGreaterThanOrEqual;
51     case kUnsignedGreaterThan:
52       return kUnsignedLessThan;
53     case kFloatLessThanOrUnordered:
54       return kFloatGreaterThanOrUnordered;
55     case kFloatGreaterThanOrEqual:
56       return kFloatLessThanOrEqual;
57     case kFloatLessThanOrEqual:
58       return kFloatGreaterThanOrEqual;
59     case kFloatGreaterThanOrUnordered:
60       return kFloatLessThanOrUnordered;
61     case kFloatLessThan:
62       return kFloatGreaterThan;
63     case kFloatGreaterThanOrEqualOrUnordered:
64       return kFloatLessThanOrEqualOrUnordered;
65     case kFloatLessThanOrEqualOrUnordered:
66       return kFloatGreaterThanOrEqualOrUnordered;
67     case kFloatGreaterThan:
68       return kFloatLessThan;
69     case kPositiveOrZero:
70     case kNegative:
71       UNREACHABLE();
72     case kEqual:
73     case kNotEqual:
74     case kOverflow:
75     case kNotOverflow:
76     case kUnorderedEqual:
77     case kUnorderedNotEqual:
78       return condition;
79   }
80   UNREACHABLE();
81 }
82 
InterferesWith(const InstructionOperand & other) const83 bool InstructionOperand::InterferesWith(const InstructionOperand& other) const {
84   const bool kCombineFPAliasing = kFPAliasing == AliasingKind::kCombine &&
85                                   this->IsFPLocationOperand() &&
86                                   other.IsFPLocationOperand();
87   const bool kComplexS128SlotAliasing =
88       (this->IsSimd128StackSlot() && other.IsAnyStackSlot()) ||
89       (other.IsSimd128StackSlot() && this->IsAnyStackSlot());
90   if (!kCombineFPAliasing && !kComplexS128SlotAliasing) {
91     return EqualsCanonicalized(other);
92   }
93   const LocationOperand& loc = *LocationOperand::cast(this);
94   const LocationOperand& other_loc = LocationOperand::cast(other);
95   LocationOperand::LocationKind kind = loc.location_kind();
96   LocationOperand::LocationKind other_kind = other_loc.location_kind();
97   if (kind != other_kind) return false;
98   MachineRepresentation rep = loc.representation();
99   MachineRepresentation other_rep = other_loc.representation();
100 
101   if (kCombineFPAliasing && !kComplexS128SlotAliasing) {
102     if (rep == other_rep) return EqualsCanonicalized(other);
103     if (kind == LocationOperand::REGISTER) {
104       // FP register-register interference.
105       return GetRegConfig()->AreAliases(rep, loc.register_code(), other_rep,
106                                         other_loc.register_code());
107     }
108   }
109 
110   // Complex multi-slot operand interference:
111   // - slots of different FP reps can alias because the gap resolver may break a
112   // move into 2 or 4 equivalent smaller moves,
113   // - stack layout can be rearranged for tail calls
114   DCHECK_EQ(LocationOperand::STACK_SLOT, kind);
115   int index_hi = loc.index();
116   int index_lo =
117       index_hi -
118       AlignedSlotAllocator::NumSlotsForWidth(ElementSizeInBytes(rep)) + 1;
119   int other_index_hi = other_loc.index();
120   int other_index_lo =
121       other_index_hi -
122       AlignedSlotAllocator::NumSlotsForWidth(ElementSizeInBytes(other_rep)) + 1;
123   return other_index_hi >= index_lo && index_hi >= other_index_lo;
124 }
125 
IsCompatible(LocationOperand * op)126 bool LocationOperand::IsCompatible(LocationOperand* op) {
127   if (IsRegister() || IsStackSlot()) {
128     return op->IsRegister() || op->IsStackSlot();
129   } else if (kFPAliasing != AliasingKind::kCombine) {
130     // A backend may choose to generate the same instruction sequence regardless
131     // of the FP representation. As a result, we can relax the compatibility and
132     // allow a Double to be moved in a Float for example. However, this is only
133     // allowed if registers do not overlap.
134     return (IsFPRegister() || IsFPStackSlot()) &&
135            (op->IsFPRegister() || op->IsFPStackSlot());
136   } else if (IsFloatRegister() || IsFloatStackSlot()) {
137     return op->IsFloatRegister() || op->IsFloatStackSlot();
138   } else if (IsDoubleRegister() || IsDoubleStackSlot()) {
139     return op->IsDoubleRegister() || op->IsDoubleStackSlot();
140   } else {
141     return (IsSimd128Register() || IsSimd128StackSlot()) &&
142            (op->IsSimd128Register() || op->IsSimd128StackSlot());
143   }
144 }
145 
Print() const146 void InstructionOperand::Print() const { StdoutStream{} << *this << std::endl; }
147 
operator <<(std::ostream & os,const InstructionOperand & op)148 std::ostream& operator<<(std::ostream& os, const InstructionOperand& op) {
149   switch (op.kind()) {
150     case InstructionOperand::UNALLOCATED: {
151       const UnallocatedOperand* unalloc = UnallocatedOperand::cast(&op);
152       os << "v" << unalloc->virtual_register();
153       if (unalloc->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
154         return os << "(=" << unalloc->fixed_slot_index() << "S)";
155       }
156       switch (unalloc->extended_policy()) {
157         case UnallocatedOperand::NONE:
158           return os;
159         case UnallocatedOperand::FIXED_REGISTER:
160           return os << "(="
161                     << Register::from_code(unalloc->fixed_register_index())
162                     << ")";
163         case UnallocatedOperand::FIXED_FP_REGISTER:
164           return os << "(="
165                     << (unalloc->IsSimd128Register()
166                             ? i::RegisterName((Simd128Register::from_code(
167                                   unalloc->fixed_register_index())))
168                             : i::RegisterName(DoubleRegister::from_code(
169                                   unalloc->fixed_register_index())))
170                     << ")";
171         case UnallocatedOperand::MUST_HAVE_REGISTER:
172           return os << "(R)";
173         case UnallocatedOperand::MUST_HAVE_SLOT:
174           return os << "(S)";
175         case UnallocatedOperand::SAME_AS_INPUT:
176           return os << "(" << unalloc->input_index() << ")";
177         case UnallocatedOperand::REGISTER_OR_SLOT:
178           return os << "(-)";
179         case UnallocatedOperand::REGISTER_OR_SLOT_OR_CONSTANT:
180           return os << "(*)";
181       }
182     }
183     case InstructionOperand::CONSTANT:
184       return os << "[constant:v" << ConstantOperand::cast(op).virtual_register()
185                 << "]";
186     case InstructionOperand::IMMEDIATE: {
187       ImmediateOperand imm = ImmediateOperand::cast(op);
188       switch (imm.type()) {
189         case ImmediateOperand::INLINE_INT32:
190           return os << "#" << imm.inline_int32_value();
191         case ImmediateOperand::INLINE_INT64:
192           return os << "#" << imm.inline_int64_value();
193         case ImmediateOperand::INDEXED_RPO:
194           return os << "[rpo_immediate:" << imm.indexed_value() << "]";
195         case ImmediateOperand::INDEXED_IMM:
196           return os << "[immediate:" << imm.indexed_value() << "]";
197       }
198     }
199     case InstructionOperand::PENDING:
200       return os << "[pending: " << PendingOperand::cast(op).next() << "]";
201     case InstructionOperand::ALLOCATED: {
202       LocationOperand allocated = LocationOperand::cast(op);
203       if (op.IsStackSlot()) {
204         os << "[stack:" << allocated.index();
205       } else if (op.IsFPStackSlot()) {
206         os << "[fp_stack:" << allocated.index();
207       } else if (op.IsRegister()) {
208         const char* name =
209             allocated.register_code() < Register::kNumRegisters
210                 ? RegisterName(Register::from_code(allocated.register_code()))
211                 : Register::GetSpecialRegisterName(allocated.register_code());
212         os << "[" << name << "|R";
213       } else if (op.IsDoubleRegister()) {
214         os << "[" << DoubleRegister::from_code(allocated.register_code())
215            << "|R";
216       } else if (op.IsFloatRegister()) {
217         os << "[" << FloatRegister::from_code(allocated.register_code())
218            << "|R";
219       } else {
220         DCHECK(op.IsSimd128Register());
221         os << "[" << Simd128Register::from_code(allocated.register_code())
222            << "|R";
223       }
224       switch (allocated.representation()) {
225         case MachineRepresentation::kNone:
226           os << "|-";
227           break;
228         case MachineRepresentation::kBit:
229           os << "|b";
230           break;
231         case MachineRepresentation::kWord8:
232           os << "|w8";
233           break;
234         case MachineRepresentation::kWord16:
235           os << "|w16";
236           break;
237         case MachineRepresentation::kWord32:
238           os << "|w32";
239           break;
240         case MachineRepresentation::kWord64:
241           os << "|w64";
242           break;
243         case MachineRepresentation::kFloat32:
244           os << "|f32";
245           break;
246         case MachineRepresentation::kFloat64:
247           os << "|f64";
248           break;
249         case MachineRepresentation::kSimd128:
250           os << "|s128";
251           break;
252         case MachineRepresentation::kTaggedSigned:
253           os << "|ts";
254           break;
255         case MachineRepresentation::kTaggedPointer:
256           os << "|tp";
257           break;
258         case MachineRepresentation::kTagged:
259           os << "|t";
260           break;
261         case MachineRepresentation::kCompressedPointer:
262           os << "|cp";
263           break;
264         case MachineRepresentation::kCompressed:
265           os << "|c";
266           break;
267         case MachineRepresentation::kSandboxedPointer:
268           os << "|sb";
269           break;
270         case MachineRepresentation::kMapWord:
271           UNREACHABLE();
272       }
273       return os << "]";
274     }
275     case InstructionOperand::INVALID:
276       return os << "(x)";
277   }
278   UNREACHABLE();
279 }
280 
Print() const281 void MoveOperands::Print() const {
282   StdoutStream{} << destination() << " = " << source() << std::endl;
283 }
284 
operator <<(std::ostream & os,const MoveOperands & mo)285 std::ostream& operator<<(std::ostream& os, const MoveOperands& mo) {
286   os << mo.destination();
287   if (!mo.source().Equals(mo.destination())) {
288     os << " = " << mo.source();
289   }
290   return os;
291 }
292 
IsRedundant() const293 bool ParallelMove::IsRedundant() const {
294   for (MoveOperands* move : *this) {
295     if (!move->IsRedundant()) return false;
296   }
297   return true;
298 }
299 
PrepareInsertAfter(MoveOperands * move,ZoneVector<MoveOperands * > * to_eliminate) const300 void ParallelMove::PrepareInsertAfter(
301     MoveOperands* move, ZoneVector<MoveOperands*>* to_eliminate) const {
302   bool no_aliasing = kFPAliasing != AliasingKind::kCombine ||
303                      !move->destination().IsFPLocationOperand();
304   MoveOperands* replacement = nullptr;
305   MoveOperands* eliminated = nullptr;
306   for (MoveOperands* curr : *this) {
307     if (curr->IsEliminated()) continue;
308     if (curr->destination().EqualsCanonicalized(move->source())) {
309       // We must replace move's source with curr's destination in order to
310       // insert it into this ParallelMove.
311       DCHECK(!replacement);
312       replacement = curr;
313       if (no_aliasing && eliminated != nullptr) break;
314     } else if (curr->destination().InterferesWith(move->destination())) {
315       // We can eliminate curr, since move overwrites at least a part of its
316       // destination, implying its value is no longer live.
317       eliminated = curr;
318       to_eliminate->push_back(curr);
319       if (no_aliasing && replacement != nullptr) break;
320     }
321   }
322   if (replacement != nullptr) move->set_source(replacement->source());
323 }
324 
Instruction(InstructionCode opcode)325 Instruction::Instruction(InstructionCode opcode)
326     : opcode_(opcode),
327       bit_field_(OutputCountField::encode(0) | InputCountField::encode(0) |
328                  TempCountField::encode(0) | IsCallField::encode(false)),
329       reference_map_(nullptr),
330       block_(nullptr) {
331   parallel_moves_[0] = nullptr;
332   parallel_moves_[1] = nullptr;
333 
334   // PendingOperands are required to be 8 byte aligned.
335   STATIC_ASSERT(offsetof(Instruction, operands_) % 8 == 0);
336 }
337 
Instruction(InstructionCode opcode,size_t output_count,InstructionOperand * outputs,size_t input_count,InstructionOperand * inputs,size_t temp_count,InstructionOperand * temps)338 Instruction::Instruction(InstructionCode opcode, size_t output_count,
339                          InstructionOperand* outputs, size_t input_count,
340                          InstructionOperand* inputs, size_t temp_count,
341                          InstructionOperand* temps)
342     : opcode_(opcode),
343       bit_field_(OutputCountField::encode(output_count) |
344                  InputCountField::encode(input_count) |
345                  TempCountField::encode(temp_count) |
346                  IsCallField::encode(false)),
347       reference_map_(nullptr),
348       block_(nullptr) {
349   parallel_moves_[0] = nullptr;
350   parallel_moves_[1] = nullptr;
351   size_t offset = 0;
352   for (size_t i = 0; i < output_count; ++i) {
353     DCHECK(!outputs[i].IsInvalid());
354     operands_[offset++] = outputs[i];
355   }
356   for (size_t i = 0; i < input_count; ++i) {
357     DCHECK(!inputs[i].IsInvalid());
358     operands_[offset++] = inputs[i];
359   }
360   for (size_t i = 0; i < temp_count; ++i) {
361     DCHECK(!temps[i].IsInvalid());
362     operands_[offset++] = temps[i];
363   }
364 }
365 
AreMovesRedundant() const366 bool Instruction::AreMovesRedundant() const {
367   for (int i = Instruction::FIRST_GAP_POSITION;
368        i <= Instruction::LAST_GAP_POSITION; i++) {
369     if (parallel_moves_[i] != nullptr && !parallel_moves_[i]->IsRedundant()) {
370       return false;
371     }
372   }
373   return true;
374 }
375 
Print() const376 void Instruction::Print() const { StdoutStream{} << *this << std::endl; }
377 
operator <<(std::ostream & os,const ParallelMove & pm)378 std::ostream& operator<<(std::ostream& os, const ParallelMove& pm) {
379   const char* delimiter = "";
380   for (MoveOperands* move : pm) {
381     if (move->IsEliminated()) continue;
382     os << delimiter << *move;
383     delimiter = "; ";
384   }
385   return os;
386 }
387 
RecordReference(const AllocatedOperand & op)388 void ReferenceMap::RecordReference(const AllocatedOperand& op) {
389   // Do not record arguments as pointers.
390   if (op.IsStackSlot() && LocationOperand::cast(op).index() < 0) return;
391   DCHECK(!op.IsFPRegister() && !op.IsFPStackSlot());
392   reference_operands_.push_back(op);
393 }
394 
operator <<(std::ostream & os,const ReferenceMap & pm)395 std::ostream& operator<<(std::ostream& os, const ReferenceMap& pm) {
396   os << "{";
397   const char* separator = "";
398   for (const InstructionOperand& op : pm.reference_operands_) {
399     os << separator << op;
400     separator = ";";
401   }
402   return os << "}";
403 }
404 
operator <<(std::ostream & os,const ArchOpcode & ao)405 std::ostream& operator<<(std::ostream& os, const ArchOpcode& ao) {
406   switch (ao) {
407 #define CASE(Name) \
408   case k##Name:    \
409     return os << #Name;
410     ARCH_OPCODE_LIST(CASE)
411 #undef CASE
412   }
413   UNREACHABLE();
414 }
415 
operator <<(std::ostream & os,const AddressingMode & am)416 std::ostream& operator<<(std::ostream& os, const AddressingMode& am) {
417   switch (am) {
418     case kMode_None:
419       return os;
420 #define CASE(Name)   \
421   case kMode_##Name: \
422     return os << #Name;
423       TARGET_ADDRESSING_MODE_LIST(CASE)
424 #undef CASE
425   }
426   UNREACHABLE();
427 }
428 
operator <<(std::ostream & os,const FlagsMode & fm)429 std::ostream& operator<<(std::ostream& os, const FlagsMode& fm) {
430   switch (fm) {
431     case kFlags_none:
432       return os;
433     case kFlags_branch:
434       return os << "branch";
435     case kFlags_deoptimize:
436       return os << "deoptimize";
437     case kFlags_set:
438       return os << "set";
439     case kFlags_trap:
440       return os << "trap";
441     case kFlags_select:
442       return os << "select";
443   }
444   UNREACHABLE();
445 }
446 
operator <<(std::ostream & os,const FlagsCondition & fc)447 std::ostream& operator<<(std::ostream& os, const FlagsCondition& fc) {
448   switch (fc) {
449     case kEqual:
450       return os << "equal";
451     case kNotEqual:
452       return os << "not equal";
453     case kSignedLessThan:
454       return os << "signed less than";
455     case kSignedGreaterThanOrEqual:
456       return os << "signed greater than or equal";
457     case kSignedLessThanOrEqual:
458       return os << "signed less than or equal";
459     case kSignedGreaterThan:
460       return os << "signed greater than";
461     case kUnsignedLessThan:
462       return os << "unsigned less than";
463     case kUnsignedGreaterThanOrEqual:
464       return os << "unsigned greater than or equal";
465     case kUnsignedLessThanOrEqual:
466       return os << "unsigned less than or equal";
467     case kUnsignedGreaterThan:
468       return os << "unsigned greater than";
469     case kFloatLessThanOrUnordered:
470       return os << "less than or unordered (FP)";
471     case kFloatGreaterThanOrEqual:
472       return os << "greater than or equal (FP)";
473     case kFloatLessThanOrEqual:
474       return os << "less than or equal (FP)";
475     case kFloatGreaterThanOrUnordered:
476       return os << "greater than or unordered (FP)";
477     case kFloatLessThan:
478       return os << "less than (FP)";
479     case kFloatGreaterThanOrEqualOrUnordered:
480       return os << "greater than, equal or unordered (FP)";
481     case kFloatLessThanOrEqualOrUnordered:
482       return os << "less than, equal or unordered (FP)";
483     case kFloatGreaterThan:
484       return os << "greater than (FP)";
485     case kUnorderedEqual:
486       return os << "unordered equal";
487     case kUnorderedNotEqual:
488       return os << "unordered not equal";
489     case kOverflow:
490       return os << "overflow";
491     case kNotOverflow:
492       return os << "not overflow";
493     case kPositiveOrZero:
494       return os << "positive or zero";
495     case kNegative:
496       return os << "negative";
497   }
498   UNREACHABLE();
499 }
500 
operator <<(std::ostream & os,const Instruction & instr)501 std::ostream& operator<<(std::ostream& os, const Instruction& instr) {
502   os << "gap ";
503   for (int i = Instruction::FIRST_GAP_POSITION;
504        i <= Instruction::LAST_GAP_POSITION; i++) {
505     os << "(";
506     if (instr.parallel_moves()[i] != nullptr) {
507       os << *instr.parallel_moves()[i];
508     }
509     os << ") ";
510   }
511   os << "\n          ";
512 
513   if (instr.OutputCount() == 1) {
514     os << *instr.OutputAt(0) << " = ";
515   } else if (instr.OutputCount() > 1) {
516     os << "(" << *instr.OutputAt(0);
517     for (size_t i = 1; i < instr.OutputCount(); i++) {
518       os << ", " << *instr.OutputAt(i);
519     }
520     os << ") = ";
521   }
522 
523   os << ArchOpcodeField::decode(instr.opcode());
524   AddressingMode am = AddressingModeField::decode(instr.opcode());
525   if (am != kMode_None) {
526     os << " : " << AddressingModeField::decode(instr.opcode());
527   }
528   FlagsMode fm = FlagsModeField::decode(instr.opcode());
529   if (fm != kFlags_none) {
530     os << " && " << fm << " if " << FlagsConditionField::decode(instr.opcode());
531   }
532   for (size_t i = 0; i < instr.InputCount(); i++) {
533     os << " " << *instr.InputAt(i);
534   }
535   return os;
536 }
537 
Constant(int32_t v)538 Constant::Constant(int32_t v) : type_(kInt32), value_(v) {}
539 
Constant(RelocatablePtrConstantInfo info)540 Constant::Constant(RelocatablePtrConstantInfo info) {
541   if (info.type() == RelocatablePtrConstantInfo::kInt32) {
542     type_ = kInt32;
543   } else if (info.type() == RelocatablePtrConstantInfo::kInt64) {
544     type_ = kInt64;
545   } else {
546     UNREACHABLE();
547   }
548   value_ = info.value();
549   rmode_ = info.rmode();
550 }
551 
ToHeapObject() const552 Handle<HeapObject> Constant::ToHeapObject() const {
553   DCHECK(kHeapObject == type() || kCompressedHeapObject == type());
554   Handle<HeapObject> value(
555       reinterpret_cast<Address*>(static_cast<intptr_t>(value_)));
556   return value;
557 }
558 
ToCode() const559 Handle<CodeT> Constant::ToCode() const {
560   DCHECK_EQ(kHeapObject, type());
561   Handle<CodeT> value(
562       reinterpret_cast<Address*>(static_cast<intptr_t>(value_)));
563   DCHECK(value->IsCodeT(GetPtrComprCageBaseSlow(*value)));
564   return value;
565 }
566 
ToDelayedStringConstant() const567 const StringConstantBase* Constant::ToDelayedStringConstant() const {
568   DCHECK_EQ(kDelayedStringConstant, type());
569   const StringConstantBase* value =
570       bit_cast<StringConstantBase*>(static_cast<intptr_t>(value_));
571   return value;
572 }
573 
operator <<(std::ostream & os,const Constant & constant)574 std::ostream& operator<<(std::ostream& os, const Constant& constant) {
575   switch (constant.type()) {
576     case Constant::kInt32:
577       return os << constant.ToInt32();
578     case Constant::kInt64:
579       return os << constant.ToInt64() << "l";
580     case Constant::kFloat32:
581       return os << constant.ToFloat32() << "f";
582     case Constant::kFloat64:
583       return os << constant.ToFloat64().value();
584     case Constant::kExternalReference:
585       return os << constant.ToExternalReference().address();
586     case Constant::kHeapObject:  // Fall through.
587     case Constant::kCompressedHeapObject:
588       return os << Brief(*constant.ToHeapObject());
589     case Constant::kRpoNumber:
590       return os << "RPO" << constant.ToRpoNumber().ToInt();
591     case Constant::kDelayedStringConstant:
592       return os << "DelayedStringConstant: "
593                 << constant.ToDelayedStringConstant();
594   }
595   UNREACHABLE();
596 }
597 
PhiInstruction(Zone * zone,int virtual_register,size_t input_count)598 PhiInstruction::PhiInstruction(Zone* zone, int virtual_register,
599                                size_t input_count)
600     : virtual_register_(virtual_register),
601       output_(UnallocatedOperand(UnallocatedOperand::NONE, virtual_register)),
602       operands_(input_count, InstructionOperand::kInvalidVirtualRegister,
603                 zone) {}
604 
SetInput(size_t offset,int virtual_register)605 void PhiInstruction::SetInput(size_t offset, int virtual_register) {
606   DCHECK_EQ(InstructionOperand::kInvalidVirtualRegister, operands_[offset]);
607   operands_[offset] = virtual_register;
608 }
609 
RenameInput(size_t offset,int virtual_register)610 void PhiInstruction::RenameInput(size_t offset, int virtual_register) {
611   DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, operands_[offset]);
612   operands_[offset] = virtual_register;
613 }
614 
InstructionBlock(Zone * zone,RpoNumber rpo_number,RpoNumber loop_header,RpoNumber loop_end,RpoNumber dominator,bool deferred,bool handler)615 InstructionBlock::InstructionBlock(Zone* zone, RpoNumber rpo_number,
616                                    RpoNumber loop_header, RpoNumber loop_end,
617                                    RpoNumber dominator, bool deferred,
618                                    bool handler)
619     : successors_(zone),
620       predecessors_(zone),
621       phis_(zone),
622       ao_number_(RpoNumber::Invalid()),
623       rpo_number_(rpo_number),
624       loop_header_(loop_header),
625       loop_end_(loop_end),
626       dominator_(dominator),
627       deferred_(deferred),
628       handler_(handler),
629       switch_target_(false),
630       code_target_alignment_(false),
631       loop_header_alignment_(false),
632       needs_frame_(false),
633       must_construct_frame_(false),
634       must_deconstruct_frame_(false) {}
635 
PredecessorIndexOf(RpoNumber rpo_number) const636 size_t InstructionBlock::PredecessorIndexOf(RpoNumber rpo_number) const {
637   size_t j = 0;
638   for (InstructionBlock::Predecessors::const_iterator i = predecessors_.begin();
639        i != predecessors_.end(); ++i, ++j) {
640     if (*i == rpo_number) break;
641   }
642   return j;
643 }
644 
GetRpo(const BasicBlock * block)645 static RpoNumber GetRpo(const BasicBlock* block) {
646   if (block == nullptr) return RpoNumber::Invalid();
647   return RpoNumber::FromInt(block->rpo_number());
648 }
649 
GetLoopEndRpo(const BasicBlock * block)650 static RpoNumber GetLoopEndRpo(const BasicBlock* block) {
651   if (!block->IsLoopHeader()) return RpoNumber::Invalid();
652   return RpoNumber::FromInt(block->loop_end()->rpo_number());
653 }
654 
InstructionBlockFor(Zone * zone,const BasicBlock * block)655 static InstructionBlock* InstructionBlockFor(Zone* zone,
656                                              const BasicBlock* block) {
657   bool is_handler =
658       !block->empty() && block->front()->opcode() == IrOpcode::kIfException;
659   InstructionBlock* instr_block = zone->New<InstructionBlock>(
660       zone, GetRpo(block), GetRpo(block->loop_header()), GetLoopEndRpo(block),
661       GetRpo(block->dominator()), block->deferred(), is_handler);
662   // Map successors and precessors
663   instr_block->successors().reserve(block->SuccessorCount());
664   for (BasicBlock* successor : block->successors()) {
665     instr_block->successors().push_back(GetRpo(successor));
666   }
667   instr_block->predecessors().reserve(block->PredecessorCount());
668   for (BasicBlock* predecessor : block->predecessors()) {
669     instr_block->predecessors().push_back(GetRpo(predecessor));
670   }
671   if (block->PredecessorCount() == 1 &&
672       block->predecessors()[0]->control() == BasicBlock::Control::kSwitch) {
673     instr_block->set_switch_target(true);
674   }
675   return instr_block;
676 }
677 
operator <<(std::ostream & os,const PrintableInstructionBlock & printable_block)678 std::ostream& operator<<(std::ostream& os,
679                          const PrintableInstructionBlock& printable_block) {
680   const InstructionBlock* block = printable_block.block_;
681   const InstructionSequence* code = printable_block.code_;
682 
683   os << "B" << block->rpo_number();
684   if (block->ao_number().IsValid()) {
685     os << ": AO#" << block->ao_number();
686   } else {
687     os << ": AO#?";
688   }
689   if (block->IsDeferred()) os << " (deferred)";
690   if (!block->needs_frame()) os << " (no frame)";
691   if (block->must_construct_frame()) os << " (construct frame)";
692   if (block->must_deconstruct_frame()) os << " (deconstruct frame)";
693   if (block->IsLoopHeader()) {
694     os << " loop blocks: [" << block->rpo_number() << ", " << block->loop_end()
695        << ")";
696   }
697   os << "  instructions: [" << block->code_start() << ", " << block->code_end()
698      << ")" << std::endl
699      << " predecessors:";
700 
701   for (RpoNumber pred : block->predecessors()) {
702     os << " B" << pred.ToInt();
703   }
704   os << std::endl;
705 
706   for (const PhiInstruction* phi : block->phis()) {
707     os << "     phi: " << phi->output() << " =";
708     for (int input : phi->operands()) {
709       os << " v" << input;
710     }
711     os << std::endl;
712   }
713 
714   for (int j = block->first_instruction_index();
715        j <= block->last_instruction_index(); j++) {
716     os << "   " << std::setw(5) << j << ": " << *code->InstructionAt(j)
717        << std::endl;
718   }
719 
720   os << " successors:";
721   for (RpoNumber succ : block->successors()) {
722     os << " B" << succ.ToInt();
723   }
724   os << std::endl;
725   return os;
726 }
727 
InstructionBlocksFor(Zone * zone,const Schedule * schedule)728 InstructionBlocks* InstructionSequence::InstructionBlocksFor(
729     Zone* zone, const Schedule* schedule) {
730   InstructionBlocks* blocks = zone->NewArray<InstructionBlocks>(1);
731   new (blocks) InstructionBlocks(
732       static_cast<int>(schedule->rpo_order()->size()), nullptr, zone);
733   size_t rpo_number = 0;
734   for (BasicBlockVector::const_iterator it = schedule->rpo_order()->begin();
735        it != schedule->rpo_order()->end(); ++it, ++rpo_number) {
736     DCHECK(!(*blocks)[rpo_number]);
737     DCHECK(GetRpo(*it).ToSize() == rpo_number);
738     (*blocks)[rpo_number] = InstructionBlockFor(zone, *it);
739   }
740   return blocks;
741 }
742 
ValidateEdgeSplitForm() const743 void InstructionSequence::ValidateEdgeSplitForm() const {
744   // Validate blocks are in edge-split form: no block with multiple successors
745   // has an edge to a block (== a successor) with more than one predecessors.
746   for (const InstructionBlock* block : instruction_blocks()) {
747     if (block->SuccessorCount() > 1) {
748       for (const RpoNumber& successor_id : block->successors()) {
749         const InstructionBlock* successor = InstructionBlockAt(successor_id);
750         // Expect precisely one predecessor: "block".
751         CHECK(successor->PredecessorCount() == 1 &&
752               successor->predecessors()[0] == block->rpo_number());
753       }
754     }
755   }
756 }
757 
ValidateDeferredBlockExitPaths() const758 void InstructionSequence::ValidateDeferredBlockExitPaths() const {
759   // A deferred block with more than one successor must have all its successors
760   // deferred.
761   for (const InstructionBlock* block : instruction_blocks()) {
762     if (!block->IsDeferred() || block->SuccessorCount() <= 1) continue;
763     for (RpoNumber successor_id : block->successors()) {
764       CHECK(InstructionBlockAt(successor_id)->IsDeferred());
765     }
766   }
767 }
768 
ValidateDeferredBlockEntryPaths() const769 void InstructionSequence::ValidateDeferredBlockEntryPaths() const {
770   // If a deferred block has multiple predecessors, they have to
771   // all be deferred. Otherwise, we can run into a situation where a range
772   // that spills only in deferred blocks inserts its spill in the block, but
773   // other ranges need moves inserted by ResolveControlFlow in the predecessors,
774   // which may clobber the register of this range.
775   for (const InstructionBlock* block : instruction_blocks()) {
776     if (!block->IsDeferred() || block->PredecessorCount() <= 1) continue;
777     for (RpoNumber predecessor_id : block->predecessors()) {
778       CHECK(InstructionBlockAt(predecessor_id)->IsDeferred());
779     }
780   }
781 }
782 
ValidateSSA() const783 void InstructionSequence::ValidateSSA() const {
784   // TODO(mtrofin): We could use a local zone here instead.
785   BitVector definitions(VirtualRegisterCount(), zone());
786   for (const Instruction* instruction : *this) {
787     for (size_t i = 0; i < instruction->OutputCount(); ++i) {
788       const InstructionOperand* output = instruction->OutputAt(i);
789       int vreg = (output->IsConstant())
790                      ? ConstantOperand::cast(output)->virtual_register()
791                      : UnallocatedOperand::cast(output)->virtual_register();
792       CHECK(!definitions.Contains(vreg));
793       definitions.Add(vreg);
794     }
795   }
796 }
797 
ComputeAssemblyOrder()798 void InstructionSequence::ComputeAssemblyOrder() {
799   int ao = 0;
800   RpoNumber invalid = RpoNumber::Invalid();
801 
802   ao_blocks_ = zone()->NewArray<InstructionBlocks>(1);
803   new (ao_blocks_) InstructionBlocks(zone());
804   ao_blocks_->reserve(instruction_blocks_->size());
805 
806   // Place non-deferred blocks.
807   for (InstructionBlock* const block : *instruction_blocks_) {
808     DCHECK_NOT_NULL(block);
809     if (block->IsDeferred()) continue;            // skip deferred blocks.
810     if (block->ao_number() != invalid) continue;  // loop rotated.
811     if (block->IsLoopHeader()) {
812       bool header_align = true;
813       if (FLAG_turbo_loop_rotation) {
814         // Perform loop rotation for non-deferred loops.
815         InstructionBlock* loop_end =
816             instruction_blocks_->at(block->loop_end().ToSize() - 1);
817         if (loop_end->SuccessorCount() == 1 && /* ends with goto */
818             loop_end != block /* not a degenerate infinite loop */) {
819           // If the last block has an unconditional jump back to the header,
820           // then move it to be in front of the header in the assembly order.
821           DCHECK_EQ(block->rpo_number(), loop_end->successors()[0]);
822           loop_end->set_ao_number(RpoNumber::FromInt(ao++));
823           ao_blocks_->push_back(loop_end);
824           // This block will be the new machine-level loop header, so align
825           // this block instead of the loop header block.
826           loop_end->set_loop_header_alignment(true);
827           header_align = false;
828         }
829       }
830       block->set_loop_header_alignment(header_align);
831     }
832     if (block->loop_header().IsValid() && block->IsSwitchTarget()) {
833       block->set_code_target_alignment(true);
834     }
835     block->set_ao_number(RpoNumber::FromInt(ao++));
836     ao_blocks_->push_back(block);
837   }
838   // Add all leftover (deferred) blocks.
839   for (InstructionBlock* const block : *instruction_blocks_) {
840     if (block->ao_number() == invalid) {
841       block->set_ao_number(RpoNumber::FromInt(ao++));
842       ao_blocks_->push_back(block);
843     }
844   }
845   DCHECK_EQ(instruction_blocks_->size(), ao);
846 }
847 
RecomputeAssemblyOrderForTesting()848 void InstructionSequence::RecomputeAssemblyOrderForTesting() {
849   RpoNumber invalid = RpoNumber::Invalid();
850   for (InstructionBlock* block : *instruction_blocks_) {
851     block->set_ao_number(invalid);
852   }
853   ComputeAssemblyOrder();
854 }
855 
InstructionSequence(Isolate * isolate,Zone * instruction_zone,InstructionBlocks * instruction_blocks)856 InstructionSequence::InstructionSequence(Isolate* isolate,
857                                          Zone* instruction_zone,
858                                          InstructionBlocks* instruction_blocks)
859     : isolate_(isolate),
860       zone_(instruction_zone),
861       instruction_blocks_(instruction_blocks),
862       ao_blocks_(nullptr),
863       source_positions_(zone()),
864       constants_(ConstantMap::key_compare(),
865                  ConstantMap::allocator_type(zone())),
866       immediates_(zone()),
867       rpo_immediates_(instruction_blocks->size(), zone()),
868       instructions_(zone()),
869       next_virtual_register_(0),
870       reference_maps_(zone()),
871       representations_(zone()),
872       representation_mask_(0),
873       deoptimization_entries_(zone()),
874       current_block_(nullptr) {
875   ComputeAssemblyOrder();
876 }
877 
NextVirtualRegister()878 int InstructionSequence::NextVirtualRegister() {
879   int virtual_register = next_virtual_register_++;
880   CHECK_NE(virtual_register, InstructionOperand::kInvalidVirtualRegister);
881   return virtual_register;
882 }
883 
GetBlockStart(RpoNumber rpo) const884 Instruction* InstructionSequence::GetBlockStart(RpoNumber rpo) const {
885   const InstructionBlock* block = InstructionBlockAt(rpo);
886   return InstructionAt(block->code_start());
887 }
888 
StartBlock(RpoNumber rpo)889 void InstructionSequence::StartBlock(RpoNumber rpo) {
890   DCHECK_NULL(current_block_);
891   current_block_ = InstructionBlockAt(rpo);
892   int code_start = static_cast<int>(instructions_.size());
893   current_block_->set_code_start(code_start);
894 }
895 
EndBlock(RpoNumber rpo)896 void InstructionSequence::EndBlock(RpoNumber rpo) {
897   int end = static_cast<int>(instructions_.size());
898   DCHECK_EQ(current_block_->rpo_number(), rpo);
899   CHECK(current_block_->code_start() >= 0 &&
900         current_block_->code_start() < end);
901   current_block_->set_code_end(end);
902   current_block_ = nullptr;
903 }
904 
AddInstruction(Instruction * instr)905 int InstructionSequence::AddInstruction(Instruction* instr) {
906   DCHECK_NOT_NULL(current_block_);
907   int index = static_cast<int>(instructions_.size());
908   instr->set_block(current_block_);
909   instructions_.push_back(instr);
910   if (instr->NeedsReferenceMap()) {
911     DCHECK_NULL(instr->reference_map());
912     ReferenceMap* reference_map = zone()->New<ReferenceMap>(zone());
913     reference_map->set_instruction_position(index);
914     instr->set_reference_map(reference_map);
915     reference_maps_.push_back(reference_map);
916   }
917   return index;
918 }
919 
GetInstructionBlock(int instruction_index) const920 InstructionBlock* InstructionSequence::GetInstructionBlock(
921     int instruction_index) const {
922   return instructions()[instruction_index]->block();
923 }
924 
FilterRepresentation(MachineRepresentation rep)925 static MachineRepresentation FilterRepresentation(MachineRepresentation rep) {
926   switch (rep) {
927     case MachineRepresentation::kBit:
928     case MachineRepresentation::kWord8:
929     case MachineRepresentation::kWord16:
930       return InstructionSequence::DefaultRepresentation();
931     case MachineRepresentation::kWord32:
932     case MachineRepresentation::kWord64:
933     case MachineRepresentation::kTaggedSigned:
934     case MachineRepresentation::kTaggedPointer:
935     case MachineRepresentation::kTagged:
936     case MachineRepresentation::kFloat32:
937     case MachineRepresentation::kFloat64:
938     case MachineRepresentation::kSimd128:
939     case MachineRepresentation::kCompressedPointer:
940     case MachineRepresentation::kCompressed:
941     case MachineRepresentation::kSandboxedPointer:
942       return rep;
943     case MachineRepresentation::kNone:
944     case MachineRepresentation::kMapWord:
945       break;
946   }
947 
948   UNREACHABLE();
949 }
950 
GetRepresentation(int virtual_register) const951 MachineRepresentation InstructionSequence::GetRepresentation(
952     int virtual_register) const {
953   DCHECK_LE(0, virtual_register);
954   DCHECK_LT(virtual_register, VirtualRegisterCount());
955   if (virtual_register >= static_cast<int>(representations_.size())) {
956     return DefaultRepresentation();
957   }
958   return representations_[virtual_register];
959 }
960 
MarkAsRepresentation(MachineRepresentation rep,int virtual_register)961 void InstructionSequence::MarkAsRepresentation(MachineRepresentation rep,
962                                                int virtual_register) {
963   DCHECK_LE(0, virtual_register);
964   DCHECK_LT(virtual_register, VirtualRegisterCount());
965   if (virtual_register >= static_cast<int>(representations_.size())) {
966     representations_.resize(VirtualRegisterCount(), DefaultRepresentation());
967   }
968   rep = FilterRepresentation(rep);
969   DCHECK_IMPLIES(representations_[virtual_register] != rep,
970                  representations_[virtual_register] == DefaultRepresentation());
971   representations_[virtual_register] = rep;
972   representation_mask_ |= RepresentationBit(rep);
973 }
974 
AddDeoptimizationEntry(FrameStateDescriptor * descriptor,DeoptimizeKind kind,DeoptimizeReason reason,NodeId node_id,FeedbackSource const & feedback)975 int InstructionSequence::AddDeoptimizationEntry(
976     FrameStateDescriptor* descriptor, DeoptimizeKind kind,
977     DeoptimizeReason reason, NodeId node_id, FeedbackSource const& feedback) {
978   int deoptimization_id = static_cast<int>(deoptimization_entries_.size());
979   deoptimization_entries_.push_back(
980       DeoptimizationEntry(descriptor, kind, reason, node_id, feedback));
981   return deoptimization_id;
982 }
983 
GetDeoptimizationEntry(int state_id)984 DeoptimizationEntry const& InstructionSequence::GetDeoptimizationEntry(
985     int state_id) {
986   return deoptimization_entries_[state_id];
987 }
988 
InputRpo(Instruction * instr,size_t index)989 RpoNumber InstructionSequence::InputRpo(Instruction* instr, size_t index) {
990   InstructionOperand* operand = instr->InputAt(index);
991   Constant constant =
992       operand->IsImmediate()
993           ? GetImmediate(ImmediateOperand::cast(operand))
994           : GetConstant(ConstantOperand::cast(operand)->virtual_register());
995   return constant.ToRpoNumber();
996 }
997 
GetSourcePosition(const Instruction * instr,SourcePosition * result) const998 bool InstructionSequence::GetSourcePosition(const Instruction* instr,
999                                             SourcePosition* result) const {
1000   auto it = source_positions_.find(instr);
1001   if (it == source_positions_.end()) return false;
1002   *result = it->second;
1003   return true;
1004 }
1005 
SetSourcePosition(const Instruction * instr,SourcePosition value)1006 void InstructionSequence::SetSourcePosition(const Instruction* instr,
1007                                             SourcePosition value) {
1008   source_positions_.insert(std::make_pair(instr, value));
1009 }
1010 
Print() const1011 void InstructionSequence::Print() const {
1012   StdoutStream{} << *this << std::endl;
1013 }
1014 
PrintBlock(int block_id) const1015 void InstructionSequence::PrintBlock(int block_id) const {
1016   RpoNumber rpo = RpoNumber::FromInt(block_id);
1017   const InstructionBlock* block = InstructionBlockAt(rpo);
1018   CHECK(block->rpo_number() == rpo);
1019   StdoutStream{} << PrintableInstructionBlock{block, this} << std::endl;
1020 }
1021 
1022 const RegisterConfiguration*
1023     InstructionSequence::registerConfigurationForTesting_ = nullptr;
1024 
1025 const RegisterConfiguration*
RegisterConfigurationForTesting()1026 InstructionSequence::RegisterConfigurationForTesting() {
1027   DCHECK_NOT_NULL(registerConfigurationForTesting_);
1028   return registerConfigurationForTesting_;
1029 }
1030 
SetRegisterConfigurationForTesting(const RegisterConfiguration * regConfig)1031 void InstructionSequence::SetRegisterConfigurationForTesting(
1032     const RegisterConfiguration* regConfig) {
1033   registerConfigurationForTesting_ = regConfig;
1034   GetRegConfig = InstructionSequence::RegisterConfigurationForTesting;
1035 }
1036 
1037 namespace {
1038 
GetConservativeFrameSizeInBytes(FrameStateType type,size_t parameters_count,size_t locals_count,BytecodeOffset bailout_id)1039 size_t GetConservativeFrameSizeInBytes(FrameStateType type,
1040                                        size_t parameters_count,
1041                                        size_t locals_count,
1042                                        BytecodeOffset bailout_id) {
1043   switch (type) {
1044     case FrameStateType::kUnoptimizedFunction: {
1045       auto info = UnoptimizedFrameInfo::Conservative(
1046           static_cast<int>(parameters_count), static_cast<int>(locals_count));
1047       return info.frame_size_in_bytes();
1048     }
1049     case FrameStateType::kArgumentsAdaptor:
1050       // The arguments adaptor frame state is only used in the deoptimizer and
1051       // does not occupy any extra space in the stack. Check out the design doc:
1052       // https://docs.google.com/document/d/150wGaUREaZI6YWqOQFD5l2mWQXaPbbZjcAIJLOFrzMs/edit
1053       // We just need to account for the additional parameters we might push
1054       // here.
1055       return UnoptimizedFrameInfo::GetStackSizeForAdditionalArguments(
1056           static_cast<int>(parameters_count));
1057     case FrameStateType::kConstructStub: {
1058       auto info = ConstructStubFrameInfo::Conservative(
1059           static_cast<int>(parameters_count));
1060       return info.frame_size_in_bytes();
1061     }
1062     case FrameStateType::kBuiltinContinuation:
1063 #if V8_ENABLE_WEBASSEMBLY
1064     case FrameStateType::kJSToWasmBuiltinContinuation:
1065 #endif  // V8_ENABLE_WEBASSEMBLY
1066     case FrameStateType::kJavaScriptBuiltinContinuation:
1067     case FrameStateType::kJavaScriptBuiltinContinuationWithCatch: {
1068       const RegisterConfiguration* config = RegisterConfiguration::Default();
1069       auto info = BuiltinContinuationFrameInfo::Conservative(
1070           static_cast<int>(parameters_count),
1071           Builtins::CallInterfaceDescriptorFor(
1072               Builtins::GetBuiltinFromBytecodeOffset(bailout_id)),
1073           config);
1074       return info.frame_size_in_bytes();
1075     }
1076   }
1077   UNREACHABLE();
1078 }
1079 
GetTotalConservativeFrameSizeInBytes(FrameStateType type,size_t parameters_count,size_t locals_count,BytecodeOffset bailout_id,FrameStateDescriptor * outer_state)1080 size_t GetTotalConservativeFrameSizeInBytes(FrameStateType type,
1081                                             size_t parameters_count,
1082                                             size_t locals_count,
1083                                             BytecodeOffset bailout_id,
1084                                             FrameStateDescriptor* outer_state) {
1085   size_t outer_total_conservative_frame_size_in_bytes =
1086       (outer_state == nullptr)
1087           ? 0
1088           : outer_state->total_conservative_frame_size_in_bytes();
1089   return GetConservativeFrameSizeInBytes(type, parameters_count, locals_count,
1090                                          bailout_id) +
1091          outer_total_conservative_frame_size_in_bytes;
1092 }
1093 
1094 }  // namespace
1095 
FrameStateDescriptor(Zone * zone,FrameStateType type,BytecodeOffset bailout_id,OutputFrameStateCombine state_combine,size_t parameters_count,size_t locals_count,size_t stack_count,MaybeHandle<SharedFunctionInfo> shared_info,FrameStateDescriptor * outer_state)1096 FrameStateDescriptor::FrameStateDescriptor(
1097     Zone* zone, FrameStateType type, BytecodeOffset bailout_id,
1098     OutputFrameStateCombine state_combine, size_t parameters_count,
1099     size_t locals_count, size_t stack_count,
1100     MaybeHandle<SharedFunctionInfo> shared_info,
1101     FrameStateDescriptor* outer_state)
1102     : type_(type),
1103       bailout_id_(bailout_id),
1104       frame_state_combine_(state_combine),
1105       parameters_count_(parameters_count),
1106       locals_count_(locals_count),
1107       stack_count_(stack_count),
1108       total_conservative_frame_size_in_bytes_(
1109           GetTotalConservativeFrameSizeInBytes(
1110               type, parameters_count, locals_count, bailout_id, outer_state)),
1111       values_(zone),
1112       shared_info_(shared_info),
1113       outer_state_(outer_state) {}
1114 
GetHeight() const1115 size_t FrameStateDescriptor::GetHeight() const {
1116   switch (type()) {
1117     case FrameStateType::kUnoptimizedFunction:
1118       return locals_count();  // The accumulator is *not* included.
1119     case FrameStateType::kBuiltinContinuation:
1120 #if V8_ENABLE_WEBASSEMBLY
1121     case FrameStateType::kJSToWasmBuiltinContinuation:
1122 #endif
1123       // Custom, non-JS calling convention (that does not have a notion of
1124       // a receiver or context).
1125       return parameters_count();
1126     case FrameStateType::kArgumentsAdaptor:
1127     case FrameStateType::kConstructStub:
1128     case FrameStateType::kJavaScriptBuiltinContinuation:
1129     case FrameStateType::kJavaScriptBuiltinContinuationWithCatch:
1130       // JS linkage. The parameters count
1131       // - includes the receiver (input 1 in CreateArtificialFrameState, and
1132       //   passed as part of stack parameters to
1133       //   CreateJavaScriptBuiltinContinuationFrameState), and
1134       // - does *not* include the context.
1135       return parameters_count();
1136   }
1137   UNREACHABLE();
1138 }
1139 
GetSize() const1140 size_t FrameStateDescriptor::GetSize() const {
1141   return 1 + parameters_count() + locals_count() + stack_count() +
1142          (HasContext() ? 1 : 0);
1143 }
1144 
GetTotalSize() const1145 size_t FrameStateDescriptor::GetTotalSize() const {
1146   size_t total_size = 0;
1147   for (const FrameStateDescriptor* iter = this; iter != nullptr;
1148        iter = iter->outer_state_) {
1149     total_size += iter->GetSize();
1150   }
1151   return total_size;
1152 }
1153 
GetFrameCount() const1154 size_t FrameStateDescriptor::GetFrameCount() const {
1155   size_t count = 0;
1156   for (const FrameStateDescriptor* iter = this; iter != nullptr;
1157        iter = iter->outer_state_) {
1158     ++count;
1159   }
1160   return count;
1161 }
1162 
GetJSFrameCount() const1163 size_t FrameStateDescriptor::GetJSFrameCount() const {
1164   size_t count = 0;
1165   for (const FrameStateDescriptor* iter = this; iter != nullptr;
1166        iter = iter->outer_state_) {
1167     if (FrameStateFunctionInfo::IsJSFunctionType(iter->type_)) {
1168       ++count;
1169     }
1170   }
1171   return count;
1172 }
1173 
1174 #if V8_ENABLE_WEBASSEMBLY
JSToWasmFrameStateDescriptor(Zone * zone,FrameStateType type,BytecodeOffset bailout_id,OutputFrameStateCombine state_combine,size_t parameters_count,size_t locals_count,size_t stack_count,MaybeHandle<SharedFunctionInfo> shared_info,FrameStateDescriptor * outer_state,const wasm::FunctionSig * wasm_signature)1175 JSToWasmFrameStateDescriptor::JSToWasmFrameStateDescriptor(
1176     Zone* zone, FrameStateType type, BytecodeOffset bailout_id,
1177     OutputFrameStateCombine state_combine, size_t parameters_count,
1178     size_t locals_count, size_t stack_count,
1179     MaybeHandle<SharedFunctionInfo> shared_info,
1180     FrameStateDescriptor* outer_state, const wasm::FunctionSig* wasm_signature)
1181     : FrameStateDescriptor(zone, type, bailout_id, state_combine,
1182                            parameters_count, locals_count, stack_count,
1183                            shared_info, outer_state),
1184       return_kind_(wasm::WasmReturnTypeFromSignature(wasm_signature)) {}
1185 #endif  // V8_ENABLE_WEBASSEMBLY
1186 
operator <<(std::ostream & os,const RpoNumber & rpo)1187 std::ostream& operator<<(std::ostream& os, const RpoNumber& rpo) {
1188   return os << rpo.ToSize();
1189 }
1190 
operator <<(std::ostream & os,const InstructionSequence & code)1191 std::ostream& operator<<(std::ostream& os, const InstructionSequence& code) {
1192   for (size_t i = 0; i < code.immediates_.size(); ++i) {
1193     Constant constant = code.immediates_[i];
1194     os << "IMM#" << i << ": " << constant << "\n";
1195   }
1196   int n = 0;
1197   for (ConstantMap::const_iterator it = code.constants_.begin();
1198        it != code.constants_.end(); ++n, ++it) {
1199     os << "CST#" << n << ": v" << it->first << " = " << it->second << "\n";
1200   }
1201   for (int i = 0; i < code.InstructionBlockCount(); i++) {
1202     auto* block = code.InstructionBlockAt(RpoNumber::FromInt(i));
1203     os << PrintableInstructionBlock{block, &code};
1204   }
1205   return os;
1206 }
1207 
1208 }  // namespace compiler
1209 }  // namespace internal
1210 }  // namespace v8
1211