• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_COMPILER_BACKEND_INSTRUCTION_H_
6 #define V8_COMPILER_BACKEND_INSTRUCTION_H_
7 
8 #include <deque>
9 #include <iosfwd>
10 #include <map>
11 #include <set>
12 
13 #include "src/base/compiler-specific.h"
14 #include "src/codegen/external-reference.h"
15 #include "src/codegen/register-arch.h"
16 #include "src/codegen/source-position.h"
17 #include "src/common/globals.h"
18 #include "src/compiler/backend/instruction-codes.h"
19 #include "src/compiler/common-operator.h"
20 #include "src/compiler/feedback-source.h"
21 #include "src/compiler/frame.h"
22 #include "src/compiler/opcodes.h"
23 #include "src/numbers/double.h"
24 #include "src/zone/zone-allocator.h"
25 
26 namespace v8 {
27 namespace internal {
28 
29 class RegisterConfiguration;
30 
31 namespace compiler {
32 
33 class Schedule;
34 class SourcePositionTable;
35 
36 #if defined(V8_CC_MSVC) && defined(V8_TARGET_ARCH_IA32)
37 // MSVC on x86 has issues with ALIGNAS(8) on InstructionOperand, but does
38 // align the object to 8 bytes anyway (covered by a static assert below).
39 // See crbug.com/v8/10796
40 #define INSTRUCTION_OPERAND_ALIGN
41 #else
42 #define INSTRUCTION_OPERAND_ALIGN ALIGNAS(8)
43 #endif
44 
45 class V8_EXPORT_PRIVATE INSTRUCTION_OPERAND_ALIGN InstructionOperand {
46  public:
47   static const int kInvalidVirtualRegister = -1;
48 
49   enum Kind {
50     INVALID,
51     UNALLOCATED,
52     CONSTANT,
53     IMMEDIATE,
54     PENDING,
55     // Location operand kinds.
56     ALLOCATED,
57     FIRST_LOCATION_OPERAND_KIND = ALLOCATED
58     // Location operand kinds must be last.
59   };
60 
InstructionOperand()61   InstructionOperand() : InstructionOperand(INVALID) {}
62 
kind()63   Kind kind() const { return KindField::decode(value_); }
64 
65 #define INSTRUCTION_OPERAND_PREDICATE(name, type) \
66   bool Is##name() const { return kind() == type; }
67   INSTRUCTION_OPERAND_PREDICATE(Invalid, INVALID)
68   // UnallocatedOperands are place-holder operands created before register
69   // allocation. They later are assigned registers and become AllocatedOperands.
70   INSTRUCTION_OPERAND_PREDICATE(Unallocated, UNALLOCATED)
71   // Constant operands participate in register allocation. They are allocated to
72   // registers but have a special "spilling" behavior. When a ConstantOperand
73   // value must be rematerialized, it is loaded from an immediate constant
74   // rather from an unspilled slot.
75   INSTRUCTION_OPERAND_PREDICATE(Constant, CONSTANT)
76   // ImmediateOperands do not participate in register allocation and are only
77   // embedded directly in instructions, e.g. small integers and on some
78   // platforms Objects.
79   INSTRUCTION_OPERAND_PREDICATE(Immediate, IMMEDIATE)
80   // PendingOperands are pending allocation during register allocation and
81   // shouldn't be seen elsewhere. They chain together multiple operators that
82   // will be replaced together with the same value when finalized.
83   INSTRUCTION_OPERAND_PREDICATE(Pending, PENDING)
84   // AllocatedOperands are registers or stack slots that are assigned by the
85   // register allocator and are always associated with a virtual register.
86   INSTRUCTION_OPERAND_PREDICATE(Allocated, ALLOCATED)
87 #undef INSTRUCTION_OPERAND_PREDICATE
88 
89   inline bool IsAnyLocationOperand() const;
90   inline bool IsLocationOperand() const;
91   inline bool IsFPLocationOperand() const;
92   inline bool IsAnyRegister() const;
93   inline bool IsRegister() const;
94   inline bool IsFPRegister() const;
95   inline bool IsFloatRegister() const;
96   inline bool IsDoubleRegister() const;
97   inline bool IsSimd128Register() const;
98   inline bool IsAnyStackSlot() const;
99   inline bool IsStackSlot() const;
100   inline bool IsFPStackSlot() const;
101   inline bool IsFloatStackSlot() const;
102   inline bool IsDoubleStackSlot() const;
103   inline bool IsSimd128StackSlot() const;
104 
105   template <typename SubKindOperand>
New(Zone * zone,const SubKindOperand & op)106   static SubKindOperand* New(Zone* zone, const SubKindOperand& op) {
107     return zone->New<SubKindOperand>(op);
108   }
109 
ReplaceWith(InstructionOperand * dest,const InstructionOperand * src)110   static void ReplaceWith(InstructionOperand* dest,
111                           const InstructionOperand* src) {
112     *dest = *src;
113   }
114 
Equals(const InstructionOperand & that)115   bool Equals(const InstructionOperand& that) const {
116     if (IsPending()) {
117       // Pending operands are only equal if they are the same operand.
118       return this == &that;
119     }
120     return this->value_ == that.value_;
121   }
122 
Compare(const InstructionOperand & that)123   bool Compare(const InstructionOperand& that) const {
124     return this->value_ < that.value_;
125   }
126 
EqualsCanonicalized(const InstructionOperand & that)127   bool EqualsCanonicalized(const InstructionOperand& that) const {
128     if (IsPending()) {
129       // Pending operands can't be canonicalized, so just compare for equality.
130       return Equals(that);
131     }
132     return this->GetCanonicalizedValue() == that.GetCanonicalizedValue();
133   }
134 
CompareCanonicalized(const InstructionOperand & that)135   bool CompareCanonicalized(const InstructionOperand& that) const {
136     DCHECK(!IsPending());
137     return this->GetCanonicalizedValue() < that.GetCanonicalizedValue();
138   }
139 
140   bool InterferesWith(const InstructionOperand& other) const;
141 
142   // APIs to aid debugging. For general-stream APIs, use operator<<.
143   void Print() const;
144 
145  protected:
InstructionOperand(Kind kind)146   explicit InstructionOperand(Kind kind) : value_(KindField::encode(kind)) {}
147 
148   inline uint64_t GetCanonicalizedValue() const;
149 
150   using KindField = base::BitField64<Kind, 0, 3>;
151 
152   uint64_t value_;
153 };
154 
155 using InstructionOperandVector = ZoneVector<InstructionOperand>;
156 
157 std::ostream& operator<<(std::ostream&, const InstructionOperand&);
158 
159 #define INSTRUCTION_OPERAND_CASTS(OperandType, OperandKind)      \
160                                                                  \
161   static OperandType* cast(InstructionOperand* op) {             \
162     DCHECK_EQ(OperandKind, op->kind());                          \
163     return static_cast<OperandType*>(op);                        \
164   }                                                              \
165                                                                  \
166   static const OperandType* cast(const InstructionOperand* op) { \
167     DCHECK_EQ(OperandKind, op->kind());                          \
168     return static_cast<const OperandType*>(op);                  \
169   }                                                              \
170                                                                  \
171   static OperandType cast(const InstructionOperand& op) {        \
172     DCHECK_EQ(OperandKind, op.kind());                           \
173     return *static_cast<const OperandType*>(&op);                \
174   }
175 
176 class UnallocatedOperand final : public InstructionOperand {
177  public:
178   enum BasicPolicy { FIXED_SLOT, EXTENDED_POLICY };
179 
180   enum ExtendedPolicy {
181     NONE,
182     REGISTER_OR_SLOT,
183     REGISTER_OR_SLOT_OR_CONSTANT,
184     FIXED_REGISTER,
185     FIXED_FP_REGISTER,
186     MUST_HAVE_REGISTER,
187     MUST_HAVE_SLOT,
188     SAME_AS_FIRST_INPUT
189   };
190 
191   // Lifetime of operand inside the instruction.
192   enum Lifetime {
193     // USED_AT_START operand is guaranteed to be live only at instruction start.
194     // The register allocator is free to assign the same register to some other
195     // operand used inside instruction (i.e. temporary or output).
196     USED_AT_START,
197 
198     // USED_AT_END operand is treated as live until the end of instruction.
199     // This means that register allocator will not reuse its register for any
200     // other operand inside instruction.
201     USED_AT_END
202   };
203 
UnallocatedOperand(ExtendedPolicy policy,int virtual_register)204   UnallocatedOperand(ExtendedPolicy policy, int virtual_register)
205       : UnallocatedOperand(virtual_register) {
206     value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
207     value_ |= ExtendedPolicyField::encode(policy);
208     value_ |= LifetimeField::encode(USED_AT_END);
209   }
210 
UnallocatedOperand(BasicPolicy policy,int index,int virtual_register)211   UnallocatedOperand(BasicPolicy policy, int index, int virtual_register)
212       : UnallocatedOperand(virtual_register) {
213     DCHECK(policy == FIXED_SLOT);
214     value_ |= BasicPolicyField::encode(policy);
215     value_ |= static_cast<uint64_t>(static_cast<int64_t>(index))
216               << FixedSlotIndexField::kShift;
217     DCHECK(this->fixed_slot_index() == index);
218   }
219 
UnallocatedOperand(ExtendedPolicy policy,int index,int virtual_register)220   UnallocatedOperand(ExtendedPolicy policy, int index, int virtual_register)
221       : UnallocatedOperand(virtual_register) {
222     DCHECK(policy == FIXED_REGISTER || policy == FIXED_FP_REGISTER);
223     value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
224     value_ |= ExtendedPolicyField::encode(policy);
225     value_ |= LifetimeField::encode(USED_AT_END);
226     value_ |= FixedRegisterField::encode(index);
227   }
228 
UnallocatedOperand(ExtendedPolicy policy,Lifetime lifetime,int virtual_register)229   UnallocatedOperand(ExtendedPolicy policy, Lifetime lifetime,
230                      int virtual_register)
231       : UnallocatedOperand(virtual_register) {
232     value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
233     value_ |= ExtendedPolicyField::encode(policy);
234     value_ |= LifetimeField::encode(lifetime);
235   }
236 
UnallocatedOperand(int reg_id,int slot_id,int virtual_register)237   UnallocatedOperand(int reg_id, int slot_id, int virtual_register)
238       : UnallocatedOperand(FIXED_REGISTER, reg_id, virtual_register) {
239     value_ |= HasSecondaryStorageField::encode(true);
240     value_ |= SecondaryStorageField::encode(slot_id);
241   }
242 
UnallocatedOperand(const UnallocatedOperand & other,int virtual_register)243   UnallocatedOperand(const UnallocatedOperand& other, int virtual_register) {
244     DCHECK_NE(kInvalidVirtualRegister, virtual_register);
245     value_ = VirtualRegisterField::update(
246         other.value_, static_cast<uint32_t>(virtual_register));
247   }
248 
249   // Predicates for the operand policy.
HasRegisterOrSlotPolicy()250   bool HasRegisterOrSlotPolicy() const {
251     return basic_policy() == EXTENDED_POLICY &&
252            extended_policy() == REGISTER_OR_SLOT;
253   }
HasRegisterOrSlotOrConstantPolicy()254   bool HasRegisterOrSlotOrConstantPolicy() const {
255     return basic_policy() == EXTENDED_POLICY &&
256            extended_policy() == REGISTER_OR_SLOT_OR_CONSTANT;
257   }
HasFixedPolicy()258   bool HasFixedPolicy() const {
259     return basic_policy() == FIXED_SLOT ||
260            extended_policy() == FIXED_REGISTER ||
261            extended_policy() == FIXED_FP_REGISTER;
262   }
HasRegisterPolicy()263   bool HasRegisterPolicy() const {
264     return basic_policy() == EXTENDED_POLICY &&
265            extended_policy() == MUST_HAVE_REGISTER;
266   }
HasSlotPolicy()267   bool HasSlotPolicy() const {
268     return basic_policy() == EXTENDED_POLICY &&
269            extended_policy() == MUST_HAVE_SLOT;
270   }
HasSameAsInputPolicy()271   bool HasSameAsInputPolicy() const {
272     return basic_policy() == EXTENDED_POLICY &&
273            extended_policy() == SAME_AS_FIRST_INPUT;
274   }
HasFixedSlotPolicy()275   bool HasFixedSlotPolicy() const { return basic_policy() == FIXED_SLOT; }
HasFixedRegisterPolicy()276   bool HasFixedRegisterPolicy() const {
277     return basic_policy() == EXTENDED_POLICY &&
278            extended_policy() == FIXED_REGISTER;
279   }
HasFixedFPRegisterPolicy()280   bool HasFixedFPRegisterPolicy() const {
281     return basic_policy() == EXTENDED_POLICY &&
282            extended_policy() == FIXED_FP_REGISTER;
283   }
HasSecondaryStorage()284   bool HasSecondaryStorage() const {
285     return basic_policy() == EXTENDED_POLICY &&
286            extended_policy() == FIXED_REGISTER &&
287            HasSecondaryStorageField::decode(value_);
288   }
GetSecondaryStorage()289   int GetSecondaryStorage() const {
290     DCHECK(HasSecondaryStorage());
291     return SecondaryStorageField::decode(value_);
292   }
293 
294   // [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
basic_policy()295   BasicPolicy basic_policy() const { return BasicPolicyField::decode(value_); }
296 
297   // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy.
extended_policy()298   ExtendedPolicy extended_policy() const {
299     DCHECK(basic_policy() == EXTENDED_POLICY);
300     return ExtendedPolicyField::decode(value_);
301   }
302 
303   // [fixed_slot_index]: Only for FIXED_SLOT.
fixed_slot_index()304   int fixed_slot_index() const {
305     DCHECK(HasFixedSlotPolicy());
306     return static_cast<int>(static_cast<int64_t>(value_) >>
307                             FixedSlotIndexField::kShift);
308   }
309 
310   // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_FP_REGISTER.
fixed_register_index()311   int fixed_register_index() const {
312     DCHECK(HasFixedRegisterPolicy() || HasFixedFPRegisterPolicy());
313     return FixedRegisterField::decode(value_);
314   }
315 
316   // [virtual_register]: The virtual register ID for this operand.
virtual_register()317   int32_t virtual_register() const {
318     return static_cast<int32_t>(VirtualRegisterField::decode(value_));
319   }
320 
321   // [lifetime]: Only for non-FIXED_SLOT.
IsUsedAtStart()322   bool IsUsedAtStart() const {
323     return basic_policy() == EXTENDED_POLICY &&
324            LifetimeField::decode(value_) == USED_AT_START;
325   }
326 
327   INSTRUCTION_OPERAND_CASTS(UnallocatedOperand, UNALLOCATED)
328 
329   // The encoding used for UnallocatedOperand operands depends on the policy
330   // that is
331   // stored within the operand. The FIXED_SLOT policy uses a compact encoding
332   // because it accommodates a larger pay-load.
333   //
334   // For FIXED_SLOT policy:
335   //     +------------------------------------------------+
336   //     |      slot_index   | 0 | virtual_register | 001 |
337   //     +------------------------------------------------+
338   //
339   // For all other (extended) policies:
340   //     +-----------------------------------------------------+
341   //     |  reg_index  | L | PPP |  1 | virtual_register | 001 |
342   //     +-----------------------------------------------------+
343   //     L ... Lifetime
344   //     P ... Policy
345   //
346   // The slot index is a signed value which requires us to decode it manually
347   // instead of using the base::BitField utility class.
348 
349   STATIC_ASSERT(KindField::kSize == 3);
350 
351   using VirtualRegisterField = base::BitField64<uint32_t, 3, 32>;
352 
353   // base::BitFields for all unallocated operands.
354   using BasicPolicyField = base::BitField64<BasicPolicy, 35, 1>;
355 
356   // BitFields specific to BasicPolicy::FIXED_SLOT.
357   using FixedSlotIndexField = base::BitField64<int, 36, 28>;
358 
359   // BitFields specific to BasicPolicy::EXTENDED_POLICY.
360   using ExtendedPolicyField = base::BitField64<ExtendedPolicy, 36, 3>;
361   using LifetimeField = base::BitField64<Lifetime, 39, 1>;
362   using HasSecondaryStorageField = base::BitField64<bool, 40, 1>;
363   using FixedRegisterField = base::BitField64<int, 41, 6>;
364   using SecondaryStorageField = base::BitField64<int, 47, 3>;
365 
366  private:
UnallocatedOperand(int virtual_register)367   explicit UnallocatedOperand(int virtual_register)
368       : InstructionOperand(UNALLOCATED) {
369     value_ |=
370         VirtualRegisterField::encode(static_cast<uint32_t>(virtual_register));
371   }
372 };
373 
374 class ConstantOperand : public InstructionOperand {
375  public:
ConstantOperand(int virtual_register)376   explicit ConstantOperand(int virtual_register)
377       : InstructionOperand(CONSTANT) {
378     value_ |=
379         VirtualRegisterField::encode(static_cast<uint32_t>(virtual_register));
380   }
381 
virtual_register()382   int32_t virtual_register() const {
383     return static_cast<int32_t>(VirtualRegisterField::decode(value_));
384   }
385 
New(Zone * zone,int virtual_register)386   static ConstantOperand* New(Zone* zone, int virtual_register) {
387     return InstructionOperand::New(zone, ConstantOperand(virtual_register));
388   }
389 
390   INSTRUCTION_OPERAND_CASTS(ConstantOperand, CONSTANT)
391 
392   STATIC_ASSERT(KindField::kSize == 3);
393   using VirtualRegisterField = base::BitField64<uint32_t, 3, 32>;
394 };
395 
396 class ImmediateOperand : public InstructionOperand {
397  public:
398   enum ImmediateType { INLINE, INDEXED };
399 
ImmediateOperand(ImmediateType type,int32_t value)400   explicit ImmediateOperand(ImmediateType type, int32_t value)
401       : InstructionOperand(IMMEDIATE) {
402     value_ |= TypeField::encode(type);
403     value_ |= static_cast<uint64_t>(static_cast<int64_t>(value))
404               << ValueField::kShift;
405   }
406 
type()407   ImmediateType type() const { return TypeField::decode(value_); }
408 
inline_value()409   int32_t inline_value() const {
410     DCHECK_EQ(INLINE, type());
411     return static_cast<int64_t>(value_) >> ValueField::kShift;
412   }
413 
indexed_value()414   int32_t indexed_value() const {
415     DCHECK_EQ(INDEXED, type());
416     return static_cast<int64_t>(value_) >> ValueField::kShift;
417   }
418 
New(Zone * zone,ImmediateType type,int32_t value)419   static ImmediateOperand* New(Zone* zone, ImmediateType type, int32_t value) {
420     return InstructionOperand::New(zone, ImmediateOperand(type, value));
421   }
422 
423   INSTRUCTION_OPERAND_CASTS(ImmediateOperand, IMMEDIATE)
424 
425   STATIC_ASSERT(KindField::kSize == 3);
426   using TypeField = base::BitField64<ImmediateType, 3, 1>;
427   using ValueField = base::BitField64<int32_t, 32, 32>;
428 };
429 
430 class PendingOperand : public InstructionOperand {
431  public:
PendingOperand()432   PendingOperand() : InstructionOperand(PENDING) {}
PendingOperand(PendingOperand * next_operand)433   explicit PendingOperand(PendingOperand* next_operand) : PendingOperand() {
434     set_next(next_operand);
435   }
436 
set_next(PendingOperand * next)437   void set_next(PendingOperand* next) {
438     DCHECK_NULL(this->next());
439     uintptr_t shifted_value =
440         reinterpret_cast<uintptr_t>(next) >> kPointerShift;
441     DCHECK_EQ(reinterpret_cast<uintptr_t>(next),
442               shifted_value << kPointerShift);
443     value_ |= NextOperandField::encode(static_cast<uint64_t>(shifted_value));
444   }
445 
next()446   PendingOperand* next() const {
447     uintptr_t shifted_value =
448         static_cast<uint64_t>(NextOperandField::decode(value_));
449     return reinterpret_cast<PendingOperand*>(shifted_value << kPointerShift);
450   }
451 
New(Zone * zone,PendingOperand * previous_operand)452   static PendingOperand* New(Zone* zone, PendingOperand* previous_operand) {
453     return InstructionOperand::New(zone, PendingOperand(previous_operand));
454   }
455 
456   INSTRUCTION_OPERAND_CASTS(PendingOperand, PENDING)
457 
458  private:
459   // Operands are uint64_t values and so are aligned to 8 byte boundaries,
460   // therefore we can shift off the bottom three zeros without losing data.
461   static const uint64_t kPointerShift = 3;
462   STATIC_ASSERT(alignof(InstructionOperand) >= (1 << kPointerShift));
463 
464   STATIC_ASSERT(KindField::kSize == 3);
465   using NextOperandField = base::BitField64<uint64_t, 3, 61>;
466 };
467 
468 class LocationOperand : public InstructionOperand {
469  public:
470   enum LocationKind { REGISTER, STACK_SLOT };
471 
LocationOperand(InstructionOperand::Kind operand_kind,LocationOperand::LocationKind location_kind,MachineRepresentation rep,int index)472   LocationOperand(InstructionOperand::Kind operand_kind,
473                   LocationOperand::LocationKind location_kind,
474                   MachineRepresentation rep, int index)
475       : InstructionOperand(operand_kind) {
476     DCHECK_IMPLIES(location_kind == REGISTER, index >= 0);
477     DCHECK(IsSupportedRepresentation(rep));
478     value_ |= LocationKindField::encode(location_kind);
479     value_ |= RepresentationField::encode(rep);
480     value_ |= static_cast<uint64_t>(static_cast<int64_t>(index))
481               << IndexField::kShift;
482   }
483 
index()484   int index() const {
485     DCHECK(IsStackSlot() || IsFPStackSlot());
486     return static_cast<int64_t>(value_) >> IndexField::kShift;
487   }
488 
register_code()489   int register_code() const {
490     DCHECK(IsRegister() || IsFPRegister());
491     return static_cast<int64_t>(value_) >> IndexField::kShift;
492   }
493 
GetRegister()494   Register GetRegister() const {
495     DCHECK(IsRegister());
496     return Register::from_code(register_code());
497   }
498 
GetFloatRegister()499   FloatRegister GetFloatRegister() const {
500     DCHECK(IsFloatRegister());
501     return FloatRegister::from_code(register_code());
502   }
503 
GetDoubleRegister()504   DoubleRegister GetDoubleRegister() const {
505     // On platforms where FloatRegister, DoubleRegister, and Simd128Register
506     // are all the same type, it's convenient to treat everything as a
507     // DoubleRegister, so be lax about type checking here.
508     DCHECK(IsFPRegister());
509     return DoubleRegister::from_code(register_code());
510   }
511 
GetSimd128Register()512   Simd128Register GetSimd128Register() const {
513     DCHECK(IsSimd128Register());
514     return Simd128Register::from_code(register_code());
515   }
516 
location_kind()517   LocationKind location_kind() const {
518     return LocationKindField::decode(value_);
519   }
520 
representation()521   MachineRepresentation representation() const {
522     return RepresentationField::decode(value_);
523   }
524 
IsSupportedRepresentation(MachineRepresentation rep)525   static bool IsSupportedRepresentation(MachineRepresentation rep) {
526     switch (rep) {
527       case MachineRepresentation::kWord32:
528       case MachineRepresentation::kWord64:
529       case MachineRepresentation::kFloat32:
530       case MachineRepresentation::kFloat64:
531       case MachineRepresentation::kSimd128:
532       case MachineRepresentation::kTaggedSigned:
533       case MachineRepresentation::kTaggedPointer:
534       case MachineRepresentation::kTagged:
535       case MachineRepresentation::kCompressedPointer:
536       case MachineRepresentation::kCompressed:
537         return true;
538       case MachineRepresentation::kBit:
539       case MachineRepresentation::kWord8:
540       case MachineRepresentation::kWord16:
541       case MachineRepresentation::kNone:
542         return false;
543     }
544     UNREACHABLE();
545   }
546 
547   // Return true if the locations can be moved to one another.
548   bool IsCompatible(LocationOperand* op);
549 
cast(InstructionOperand * op)550   static LocationOperand* cast(InstructionOperand* op) {
551     DCHECK(op->IsAnyLocationOperand());
552     return static_cast<LocationOperand*>(op);
553   }
554 
cast(const InstructionOperand * op)555   static const LocationOperand* cast(const InstructionOperand* op) {
556     DCHECK(op->IsAnyLocationOperand());
557     return static_cast<const LocationOperand*>(op);
558   }
559 
cast(const InstructionOperand & op)560   static LocationOperand cast(const InstructionOperand& op) {
561     DCHECK(op.IsAnyLocationOperand());
562     return *static_cast<const LocationOperand*>(&op);
563   }
564 
565   STATIC_ASSERT(KindField::kSize == 3);
566   using LocationKindField = base::BitField64<LocationKind, 3, 2>;
567   using RepresentationField = base::BitField64<MachineRepresentation, 5, 8>;
568   using IndexField = base::BitField64<int32_t, 35, 29>;
569 };
570 
571 class AllocatedOperand : public LocationOperand {
572  public:
AllocatedOperand(LocationKind kind,MachineRepresentation rep,int index)573   AllocatedOperand(LocationKind kind, MachineRepresentation rep, int index)
574       : LocationOperand(ALLOCATED, kind, rep, index) {}
575 
New(Zone * zone,LocationKind kind,MachineRepresentation rep,int index)576   static AllocatedOperand* New(Zone* zone, LocationKind kind,
577                                MachineRepresentation rep, int index) {
578     return InstructionOperand::New(zone, AllocatedOperand(kind, rep, index));
579   }
580 
581   INSTRUCTION_OPERAND_CASTS(AllocatedOperand, ALLOCATED)
582 };
583 
584 #undef INSTRUCTION_OPERAND_CASTS
585 
IsAnyLocationOperand()586 bool InstructionOperand::IsAnyLocationOperand() const {
587   return this->kind() >= FIRST_LOCATION_OPERAND_KIND;
588 }
589 
IsLocationOperand()590 bool InstructionOperand::IsLocationOperand() const {
591   return IsAnyLocationOperand() &&
592          !IsFloatingPoint(LocationOperand::cast(this)->representation());
593 }
594 
IsFPLocationOperand()595 bool InstructionOperand::IsFPLocationOperand() const {
596   return IsAnyLocationOperand() &&
597          IsFloatingPoint(LocationOperand::cast(this)->representation());
598 }
599 
IsAnyRegister()600 bool InstructionOperand::IsAnyRegister() const {
601   return IsAnyLocationOperand() &&
602          LocationOperand::cast(this)->location_kind() ==
603              LocationOperand::REGISTER;
604 }
605 
IsRegister()606 bool InstructionOperand::IsRegister() const {
607   return IsAnyRegister() &&
608          !IsFloatingPoint(LocationOperand::cast(this)->representation());
609 }
610 
IsFPRegister()611 bool InstructionOperand::IsFPRegister() const {
612   return IsAnyRegister() &&
613          IsFloatingPoint(LocationOperand::cast(this)->representation());
614 }
615 
IsFloatRegister()616 bool InstructionOperand::IsFloatRegister() const {
617   return IsAnyRegister() && LocationOperand::cast(this)->representation() ==
618                                 MachineRepresentation::kFloat32;
619 }
620 
IsDoubleRegister()621 bool InstructionOperand::IsDoubleRegister() const {
622   return IsAnyRegister() && LocationOperand::cast(this)->representation() ==
623                                 MachineRepresentation::kFloat64;
624 }
625 
IsSimd128Register()626 bool InstructionOperand::IsSimd128Register() const {
627   return IsAnyRegister() && LocationOperand::cast(this)->representation() ==
628                                 MachineRepresentation::kSimd128;
629 }
630 
IsAnyStackSlot()631 bool InstructionOperand::IsAnyStackSlot() const {
632   return IsAnyLocationOperand() &&
633          LocationOperand::cast(this)->location_kind() ==
634              LocationOperand::STACK_SLOT;
635 }
636 
IsStackSlot()637 bool InstructionOperand::IsStackSlot() const {
638   return IsAnyStackSlot() &&
639          !IsFloatingPoint(LocationOperand::cast(this)->representation());
640 }
641 
IsFPStackSlot()642 bool InstructionOperand::IsFPStackSlot() const {
643   return IsAnyStackSlot() &&
644          IsFloatingPoint(LocationOperand::cast(this)->representation());
645 }
646 
IsFloatStackSlot()647 bool InstructionOperand::IsFloatStackSlot() const {
648   return IsAnyLocationOperand() &&
649          LocationOperand::cast(this)->location_kind() ==
650              LocationOperand::STACK_SLOT &&
651          LocationOperand::cast(this)->representation() ==
652              MachineRepresentation::kFloat32;
653 }
654 
IsDoubleStackSlot()655 bool InstructionOperand::IsDoubleStackSlot() const {
656   return IsAnyLocationOperand() &&
657          LocationOperand::cast(this)->location_kind() ==
658              LocationOperand::STACK_SLOT &&
659          LocationOperand::cast(this)->representation() ==
660              MachineRepresentation::kFloat64;
661 }
662 
IsSimd128StackSlot()663 bool InstructionOperand::IsSimd128StackSlot() const {
664   return IsAnyLocationOperand() &&
665          LocationOperand::cast(this)->location_kind() ==
666              LocationOperand::STACK_SLOT &&
667          LocationOperand::cast(this)->representation() ==
668              MachineRepresentation::kSimd128;
669 }
670 
GetCanonicalizedValue()671 uint64_t InstructionOperand::GetCanonicalizedValue() const {
672   if (IsAnyLocationOperand()) {
673     MachineRepresentation canonical = MachineRepresentation::kNone;
674     if (IsFPRegister()) {
675       if (kSimpleFPAliasing) {
676         // We treat all FP register operands the same for simple aliasing.
677         canonical = MachineRepresentation::kFloat64;
678       } else {
679         // We need to distinguish FP register operands of different reps when
680         // aliasing is not simple (e.g. ARM).
681         canonical = LocationOperand::cast(this)->representation();
682       }
683     }
684     return InstructionOperand::KindField::update(
685         LocationOperand::RepresentationField::update(this->value_, canonical),
686         LocationOperand::ALLOCATED);
687   }
688   return this->value_;
689 }
690 
691 // Required for maps that don't care about machine type.
692 struct CompareOperandModuloType {
operatorCompareOperandModuloType693   bool operator()(const InstructionOperand& a,
694                   const InstructionOperand& b) const {
695     return a.CompareCanonicalized(b);
696   }
697 };
698 
699 class V8_EXPORT_PRIVATE MoveOperands final
NON_EXPORTED_BASE(ZoneObject)700     : public NON_EXPORTED_BASE(ZoneObject) {
701  public:
702   MoveOperands(const InstructionOperand& source,
703                const InstructionOperand& destination)
704       : source_(source), destination_(destination) {
705     DCHECK(!source.IsInvalid() && !destination.IsInvalid());
706   }
707 
708   MoveOperands(const MoveOperands&) = delete;
709   MoveOperands& operator=(const MoveOperands&) = delete;
710 
711   const InstructionOperand& source() const { return source_; }
712   InstructionOperand& source() { return source_; }
713   void set_source(const InstructionOperand& operand) { source_ = operand; }
714 
715   const InstructionOperand& destination() const { return destination_; }
716   InstructionOperand& destination() { return destination_; }
717   void set_destination(const InstructionOperand& operand) {
718     destination_ = operand;
719   }
720 
721   // The gap resolver marks moves as "in-progress" by clearing the
722   // destination (but not the source).
723   bool IsPending() const {
724     return destination_.IsInvalid() && !source_.IsInvalid();
725   }
726   void SetPending() { destination_ = InstructionOperand(); }
727 
728   // A move is redundant if it's been eliminated or if its source and
729   // destination are the same.
730   bool IsRedundant() const {
731     DCHECK_IMPLIES(!destination_.IsInvalid(), !destination_.IsConstant());
732     return IsEliminated() || source_.EqualsCanonicalized(destination_);
733   }
734 
735   // We clear both operands to indicate move that's been eliminated.
736   void Eliminate() { source_ = destination_ = InstructionOperand(); }
737   bool IsEliminated() const {
738     DCHECK_IMPLIES(source_.IsInvalid(), destination_.IsInvalid());
739     return source_.IsInvalid();
740   }
741 
742   // APIs to aid debugging. For general-stream APIs, use operator<<.
743   void Print() const;
744 
745  private:
746   InstructionOperand source_;
747   InstructionOperand destination_;
748 };
749 
750 V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&, const MoveOperands&);
751 
752 class V8_EXPORT_PRIVATE ParallelMove final
NON_EXPORTED_BASE(ZoneVector<MoveOperands * >)753     : public NON_EXPORTED_BASE(ZoneVector<MoveOperands*>),
754       public NON_EXPORTED_BASE(ZoneObject) {
755  public:
756   explicit ParallelMove(Zone* zone) : ZoneVector<MoveOperands*>(zone) {}
757   ParallelMove(const ParallelMove&) = delete;
758   ParallelMove& operator=(const ParallelMove&) = delete;
759 
760   MoveOperands* AddMove(const InstructionOperand& from,
761                         const InstructionOperand& to) {
762     Zone* zone = get_allocator().zone();
763     return AddMove(from, to, zone);
764   }
765 
766   MoveOperands* AddMove(const InstructionOperand& from,
767                         const InstructionOperand& to,
768                         Zone* operand_allocation_zone) {
769     if (from.EqualsCanonicalized(to)) return nullptr;
770     MoveOperands* move = operand_allocation_zone->New<MoveOperands>(from, to);
771     if (empty()) reserve(4);
772     push_back(move);
773     return move;
774   }
775 
776   bool IsRedundant() const;
777 
778   // Prepare this ParallelMove to insert move as if it happened in a subsequent
779   // ParallelMove.  move->source() may be changed.  Any MoveOperands added to
780   // to_eliminate must be Eliminated.
781   void PrepareInsertAfter(MoveOperands* move,
782                           ZoneVector<MoveOperands*>* to_eliminate) const;
783 };
784 
785 std::ostream& operator<<(std::ostream&, const ParallelMove&);
786 
787 class ReferenceMap final : public ZoneObject {
788  public:
ReferenceMap(Zone * zone)789   explicit ReferenceMap(Zone* zone)
790       : reference_operands_(8, zone), instruction_position_(-1) {}
791 
reference_operands()792   const ZoneVector<InstructionOperand>& reference_operands() const {
793     return reference_operands_;
794   }
instruction_position()795   int instruction_position() const { return instruction_position_; }
796 
set_instruction_position(int pos)797   void set_instruction_position(int pos) {
798     DCHECK_EQ(-1, instruction_position_);
799     instruction_position_ = pos;
800   }
801 
802   void RecordReference(const AllocatedOperand& op);
803 
804  private:
805   friend std::ostream& operator<<(std::ostream&, const ReferenceMap&);
806 
807   ZoneVector<InstructionOperand> reference_operands_;
808   int instruction_position_;
809 };
810 
811 std::ostream& operator<<(std::ostream&, const ReferenceMap&);
812 
813 class InstructionBlock;
814 
815 class V8_EXPORT_PRIVATE Instruction final {
816  public:
817   Instruction(const Instruction&) = delete;
818   Instruction& operator=(const Instruction&) = delete;
819 
OutputCount()820   size_t OutputCount() const { return OutputCountField::decode(bit_field_); }
OutputAt(size_t i)821   const InstructionOperand* OutputAt(size_t i) const {
822     DCHECK_LT(i, OutputCount());
823     return &operands_[i];
824   }
OutputAt(size_t i)825   InstructionOperand* OutputAt(size_t i) {
826     DCHECK_LT(i, OutputCount());
827     return &operands_[i];
828   }
829 
HasOutput()830   bool HasOutput() const { return OutputCount() > 0; }
Output()831   const InstructionOperand* Output() const { return OutputAt(0); }
Output()832   InstructionOperand* Output() { return OutputAt(0); }
833 
InputCount()834   size_t InputCount() const { return InputCountField::decode(bit_field_); }
InputAt(size_t i)835   const InstructionOperand* InputAt(size_t i) const {
836     DCHECK_LT(i, InputCount());
837     return &operands_[OutputCount() + i];
838   }
InputAt(size_t i)839   InstructionOperand* InputAt(size_t i) {
840     DCHECK_LT(i, InputCount());
841     return &operands_[OutputCount() + i];
842   }
843 
TempCount()844   size_t TempCount() const { return TempCountField::decode(bit_field_); }
TempAt(size_t i)845   const InstructionOperand* TempAt(size_t i) const {
846     DCHECK_LT(i, TempCount());
847     return &operands_[OutputCount() + InputCount() + i];
848   }
TempAt(size_t i)849   InstructionOperand* TempAt(size_t i) {
850     DCHECK_LT(i, TempCount());
851     return &operands_[OutputCount() + InputCount() + i];
852   }
853 
opcode()854   InstructionCode opcode() const { return opcode_; }
arch_opcode()855   ArchOpcode arch_opcode() const { return ArchOpcodeField::decode(opcode()); }
addressing_mode()856   AddressingMode addressing_mode() const {
857     return AddressingModeField::decode(opcode());
858   }
flags_mode()859   FlagsMode flags_mode() const { return FlagsModeField::decode(opcode()); }
flags_condition()860   FlagsCondition flags_condition() const {
861     return FlagsConditionField::decode(opcode());
862   }
863 
New(Zone * zone,InstructionCode opcode)864   static Instruction* New(Zone* zone, InstructionCode opcode) {
865     return New(zone, opcode, 0, nullptr, 0, nullptr, 0, nullptr);
866   }
867 
New(Zone * zone,InstructionCode opcode,size_t output_count,InstructionOperand * outputs,size_t input_count,InstructionOperand * inputs,size_t temp_count,InstructionOperand * temps)868   static Instruction* New(Zone* zone, InstructionCode opcode,
869                           size_t output_count, InstructionOperand* outputs,
870                           size_t input_count, InstructionOperand* inputs,
871                           size_t temp_count, InstructionOperand* temps) {
872     DCHECK(output_count == 0 || outputs != nullptr);
873     DCHECK(input_count == 0 || inputs != nullptr);
874     DCHECK(temp_count == 0 || temps != nullptr);
875     // TODO(turbofan): Handle this gracefully. See crbug.com/582702.
876     CHECK(InputCountField::is_valid(input_count));
877 
878     size_t total_extra_ops = output_count + input_count + temp_count;
879     if (total_extra_ops != 0) total_extra_ops--;
880     int size = static_cast<int>(
881         RoundUp(sizeof(Instruction), sizeof(InstructionOperand)) +
882         total_extra_ops * sizeof(InstructionOperand));
883     return new (zone->Allocate<Instruction>(size)) Instruction(
884         opcode, output_count, outputs, input_count, inputs, temp_count, temps);
885   }
886 
MarkAsCall()887   Instruction* MarkAsCall() {
888     bit_field_ = IsCallField::update(bit_field_, true);
889     return this;
890   }
IsCall()891   bool IsCall() const { return IsCallField::decode(bit_field_); }
NeedsReferenceMap()892   bool NeedsReferenceMap() const { return IsCall(); }
HasReferenceMap()893   bool HasReferenceMap() const { return reference_map_ != nullptr; }
894 
ClobbersRegisters()895   bool ClobbersRegisters() const { return IsCall(); }
ClobbersTemps()896   bool ClobbersTemps() const { return IsCall(); }
ClobbersDoubleRegisters()897   bool ClobbersDoubleRegisters() const { return IsCall(); }
reference_map()898   ReferenceMap* reference_map() const { return reference_map_; }
899 
set_reference_map(ReferenceMap * map)900   void set_reference_map(ReferenceMap* map) {
901     DCHECK(NeedsReferenceMap());
902     DCHECK(!reference_map_);
903     reference_map_ = map;
904   }
905 
OverwriteWithNop()906   void OverwriteWithNop() {
907     opcode_ = ArchOpcodeField::encode(kArchNop);
908     bit_field_ = 0;
909     reference_map_ = nullptr;
910   }
911 
IsNop()912   bool IsNop() const { return arch_opcode() == kArchNop; }
913 
IsDeoptimizeCall()914   bool IsDeoptimizeCall() const {
915     return arch_opcode() == ArchOpcode::kArchDeoptimize ||
916            FlagsModeField::decode(opcode()) == kFlags_deoptimize ||
917            FlagsModeField::decode(opcode()) == kFlags_deoptimize_and_poison;
918   }
919 
IsTrap()920   bool IsTrap() const {
921     return FlagsModeField::decode(opcode()) == kFlags_trap;
922   }
923 
IsJump()924   bool IsJump() const { return arch_opcode() == ArchOpcode::kArchJmp; }
IsRet()925   bool IsRet() const { return arch_opcode() == ArchOpcode::kArchRet; }
IsTailCall()926   bool IsTailCall() const {
927     return arch_opcode() <= ArchOpcode::kArchTailCallWasm;
928   }
IsThrow()929   bool IsThrow() const {
930     return arch_opcode() == ArchOpcode::kArchThrowTerminator;
931   }
932 
IsCallWithDescriptorFlags(InstructionCode arch_opcode)933   static constexpr bool IsCallWithDescriptorFlags(InstructionCode arch_opcode) {
934     return arch_opcode <= ArchOpcode::kArchCallBuiltinPointer;
935   }
IsCallWithDescriptorFlags()936   bool IsCallWithDescriptorFlags() const {
937     return IsCallWithDescriptorFlags(arch_opcode());
938   }
HasCallDescriptorFlag(CallDescriptor::Flag flag)939   bool HasCallDescriptorFlag(CallDescriptor::Flag flag) const {
940     DCHECK(IsCallWithDescriptorFlags());
941     STATIC_ASSERT(CallDescriptor::kFlagsBitsEncodedInInstructionCode == 10);
942 #ifdef DEBUG
943     static constexpr int kInstructionCodeFlagsMask =
944         ((1 << CallDescriptor::kFlagsBitsEncodedInInstructionCode) - 1);
945     DCHECK_EQ(static_cast<int>(flag) & kInstructionCodeFlagsMask, flag);
946 #endif
947     return MiscField::decode(opcode()) & flag;
948   }
949 
950   enum GapPosition {
951     START,
952     END,
953     FIRST_GAP_POSITION = START,
954     LAST_GAP_POSITION = END
955   };
956 
GetOrCreateParallelMove(GapPosition pos,Zone * zone)957   ParallelMove* GetOrCreateParallelMove(GapPosition pos, Zone* zone) {
958     if (parallel_moves_[pos] == nullptr) {
959       parallel_moves_[pos] = zone->New<ParallelMove>(zone);
960     }
961     return parallel_moves_[pos];
962   }
963 
GetParallelMove(GapPosition pos)964   ParallelMove* GetParallelMove(GapPosition pos) {
965     return parallel_moves_[pos];
966   }
967 
GetParallelMove(GapPosition pos)968   const ParallelMove* GetParallelMove(GapPosition pos) const {
969     return parallel_moves_[pos];
970   }
971 
972   bool AreMovesRedundant() const;
973 
parallel_moves()974   ParallelMove* const* parallel_moves() const { return &parallel_moves_[0]; }
parallel_moves()975   ParallelMove** parallel_moves() { return &parallel_moves_[0]; }
976 
977   // The block_id may be invalidated in JumpThreading. It is only important for
978   // register allocation, to avoid searching for blocks from instruction
979   // indexes.
block()980   InstructionBlock* block() const { return block_; }
set_block(InstructionBlock * block)981   void set_block(InstructionBlock* block) {
982     DCHECK_NOT_NULL(block);
983     block_ = block;
984   }
985 
986   // APIs to aid debugging. For general-stream APIs, use operator<<.
987   void Print() const;
988 
989   using OutputCountField = base::BitField<size_t, 0, 8>;
990   using InputCountField = base::BitField<size_t, 8, 16>;
991   using TempCountField = base::BitField<size_t, 24, 6>;
992 
993   static const size_t kMaxOutputCount = OutputCountField::kMax;
994   static const size_t kMaxInputCount = InputCountField::kMax;
995   static const size_t kMaxTempCount = TempCountField::kMax;
996 
997  private:
998   explicit Instruction(InstructionCode opcode);
999 
1000   Instruction(InstructionCode opcode, size_t output_count,
1001               InstructionOperand* outputs, size_t input_count,
1002               InstructionOperand* inputs, size_t temp_count,
1003               InstructionOperand* temps);
1004 
1005   using IsCallField = base::BitField<bool, 30, 1>;
1006 
1007   InstructionCode opcode_;
1008   uint32_t bit_field_;
1009   ParallelMove* parallel_moves_[2];
1010   ReferenceMap* reference_map_;
1011   InstructionBlock* block_;
1012   InstructionOperand operands_[1];
1013 };
1014 
1015 std::ostream& operator<<(std::ostream&, const Instruction&);
1016 
1017 class RpoNumber final {
1018  public:
1019   static const int kInvalidRpoNumber = -1;
ToInt()1020   int ToInt() const {
1021     DCHECK(IsValid());
1022     return index_;
1023   }
ToSize()1024   size_t ToSize() const {
1025     DCHECK(IsValid());
1026     return static_cast<size_t>(index_);
1027   }
IsValid()1028   bool IsValid() const { return index_ >= 0; }
FromInt(int index)1029   static RpoNumber FromInt(int index) { return RpoNumber(index); }
Invalid()1030   static RpoNumber Invalid() { return RpoNumber(kInvalidRpoNumber); }
1031 
IsNext(const RpoNumber other)1032   bool IsNext(const RpoNumber other) const {
1033     DCHECK(IsValid());
1034     return other.index_ == this->index_ + 1;
1035   }
1036 
Next()1037   RpoNumber Next() const {
1038     DCHECK(IsValid());
1039     return RpoNumber(index_ + 1);
1040   }
1041 
1042   // Comparison operators.
1043   bool operator==(RpoNumber other) const { return index_ == other.index_; }
1044   bool operator!=(RpoNumber other) const { return index_ != other.index_; }
1045   bool operator>(RpoNumber other) const { return index_ > other.index_; }
1046   bool operator<(RpoNumber other) const { return index_ < other.index_; }
1047   bool operator<=(RpoNumber other) const { return index_ <= other.index_; }
1048   bool operator>=(RpoNumber other) const { return index_ >= other.index_; }
1049 
1050  private:
RpoNumber(int32_t index)1051   explicit RpoNumber(int32_t index) : index_(index) {}
1052   int32_t index_;
1053 };
1054 
1055 V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&, const RpoNumber&);
1056 
1057 class V8_EXPORT_PRIVATE Constant final {
1058  public:
1059   enum Type {
1060     kInt32,
1061     kInt64,
1062     kFloat32,
1063     kFloat64,
1064     kExternalReference,
1065     kCompressedHeapObject,
1066     kHeapObject,
1067     kRpoNumber,
1068     kDelayedStringConstant
1069   };
1070 
1071   explicit Constant(int32_t v);
Constant(int64_t v)1072   explicit Constant(int64_t v) : type_(kInt64), value_(v) {}
Constant(float v)1073   explicit Constant(float v) : type_(kFloat32), value_(bit_cast<int32_t>(v)) {}
Constant(double v)1074   explicit Constant(double v) : type_(kFloat64), value_(bit_cast<int64_t>(v)) {}
Constant(ExternalReference ref)1075   explicit Constant(ExternalReference ref)
1076       : type_(kExternalReference), value_(bit_cast<intptr_t>(ref.address())) {}
1077   explicit Constant(Handle<HeapObject> obj, bool is_compressed = false)
1078       : type_(is_compressed ? kCompressedHeapObject : kHeapObject),
1079         value_(bit_cast<intptr_t>(obj)) {}
Constant(RpoNumber rpo)1080   explicit Constant(RpoNumber rpo) : type_(kRpoNumber), value_(rpo.ToInt()) {}
Constant(const StringConstantBase * str)1081   explicit Constant(const StringConstantBase* str)
1082       : type_(kDelayedStringConstant), value_(bit_cast<intptr_t>(str)) {}
1083   explicit Constant(RelocatablePtrConstantInfo info);
1084 
type()1085   Type type() const { return type_; }
1086 
rmode()1087   RelocInfo::Mode rmode() const { return rmode_; }
1088 
ToInt32()1089   int32_t ToInt32() const {
1090     DCHECK(type() == kInt32 || type() == kInt64);
1091     const int32_t value = static_cast<int32_t>(value_);
1092     DCHECK_EQ(value_, static_cast<int64_t>(value));
1093     return value;
1094   }
1095 
ToInt64()1096   int64_t ToInt64() const {
1097     if (type() == kInt32) return ToInt32();
1098     DCHECK_EQ(kInt64, type());
1099     return value_;
1100   }
1101 
ToFloat32()1102   float ToFloat32() const {
1103     // TODO(ahaas): We should remove this function. If value_ has the bit
1104     // representation of a signalling NaN, then returning it as float can cause
1105     // the signalling bit to flip, and value_ is returned as a quiet NaN.
1106     DCHECK_EQ(kFloat32, type());
1107     return bit_cast<float>(static_cast<int32_t>(value_));
1108   }
1109 
ToFloat32AsInt()1110   uint32_t ToFloat32AsInt() const {
1111     DCHECK_EQ(kFloat32, type());
1112     return bit_cast<uint32_t>(static_cast<int32_t>(value_));
1113   }
1114 
ToFloat64()1115   Double ToFloat64() const {
1116     DCHECK_EQ(kFloat64, type());
1117     return Double(bit_cast<uint64_t>(value_));
1118   }
1119 
ToExternalReference()1120   ExternalReference ToExternalReference() const {
1121     DCHECK_EQ(kExternalReference, type());
1122     return ExternalReference::FromRawAddress(static_cast<Address>(value_));
1123   }
1124 
ToRpoNumber()1125   RpoNumber ToRpoNumber() const {
1126     DCHECK_EQ(kRpoNumber, type());
1127     return RpoNumber::FromInt(static_cast<int>(value_));
1128   }
1129 
1130   Handle<HeapObject> ToHeapObject() const;
1131   Handle<Code> ToCode() const;
1132   const StringConstantBase* ToDelayedStringConstant() const;
1133 
1134  private:
1135   Type type_;
1136   RelocInfo::Mode rmode_ = RelocInfo::NONE;
1137   int64_t value_;
1138 };
1139 
1140 std::ostream& operator<<(std::ostream&, const Constant&);
1141 
1142 // Forward declarations.
1143 class FrameStateDescriptor;
1144 
1145 enum class StateValueKind : uint8_t {
1146   kArgumentsElements,
1147   kArgumentsLength,
1148   kPlain,
1149   kOptimizedOut,
1150   kNested,
1151   kDuplicate
1152 };
1153 
1154 class StateValueDescriptor {
1155  public:
StateValueDescriptor()1156   StateValueDescriptor()
1157       : kind_(StateValueKind::kPlain), type_(MachineType::AnyTagged()) {}
1158 
ArgumentsElements(ArgumentsStateType type)1159   static StateValueDescriptor ArgumentsElements(ArgumentsStateType type) {
1160     StateValueDescriptor descr(StateValueKind::kArgumentsElements,
1161                                MachineType::AnyTagged());
1162     descr.args_type_ = type;
1163     return descr;
1164   }
ArgumentsLength()1165   static StateValueDescriptor ArgumentsLength() {
1166     return StateValueDescriptor(StateValueKind::kArgumentsLength,
1167                                 MachineType::AnyTagged());
1168   }
Plain(MachineType type)1169   static StateValueDescriptor Plain(MachineType type) {
1170     return StateValueDescriptor(StateValueKind::kPlain, type);
1171   }
OptimizedOut()1172   static StateValueDescriptor OptimizedOut() {
1173     return StateValueDescriptor(StateValueKind::kOptimizedOut,
1174                                 MachineType::AnyTagged());
1175   }
Recursive(size_t id)1176   static StateValueDescriptor Recursive(size_t id) {
1177     StateValueDescriptor descr(StateValueKind::kNested,
1178                                MachineType::AnyTagged());
1179     descr.id_ = id;
1180     return descr;
1181   }
Duplicate(size_t id)1182   static StateValueDescriptor Duplicate(size_t id) {
1183     StateValueDescriptor descr(StateValueKind::kDuplicate,
1184                                MachineType::AnyTagged());
1185     descr.id_ = id;
1186     return descr;
1187   }
1188 
IsArgumentsElements()1189   bool IsArgumentsElements() const {
1190     return kind_ == StateValueKind::kArgumentsElements;
1191   }
IsArgumentsLength()1192   bool IsArgumentsLength() const {
1193     return kind_ == StateValueKind::kArgumentsLength;
1194   }
IsPlain()1195   bool IsPlain() const { return kind_ == StateValueKind::kPlain; }
IsOptimizedOut()1196   bool IsOptimizedOut() const { return kind_ == StateValueKind::kOptimizedOut; }
IsNested()1197   bool IsNested() const { return kind_ == StateValueKind::kNested; }
IsDuplicate()1198   bool IsDuplicate() const { return kind_ == StateValueKind::kDuplicate; }
type()1199   MachineType type() const { return type_; }
id()1200   size_t id() const {
1201     DCHECK(kind_ == StateValueKind::kDuplicate ||
1202            kind_ == StateValueKind::kNested);
1203     return id_;
1204   }
arguments_type()1205   ArgumentsStateType arguments_type() const {
1206     DCHECK(kind_ == StateValueKind::kArgumentsElements);
1207     return args_type_;
1208   }
1209 
1210  private:
StateValueDescriptor(StateValueKind kind,MachineType type)1211   StateValueDescriptor(StateValueKind kind, MachineType type)
1212       : kind_(kind), type_(type) {}
1213 
1214   StateValueKind kind_;
1215   MachineType type_;
1216   union {
1217     size_t id_;
1218     ArgumentsStateType args_type_;
1219   };
1220 };
1221 
1222 class StateValueList {
1223  public:
StateValueList(Zone * zone)1224   explicit StateValueList(Zone* zone) : fields_(zone), nested_(zone) {}
1225 
size()1226   size_t size() { return fields_.size(); }
1227 
1228   struct Value {
1229     StateValueDescriptor* desc;
1230     StateValueList* nested;
1231 
ValueValue1232     Value(StateValueDescriptor* desc, StateValueList* nested)
1233         : desc(desc), nested(nested) {}
1234   };
1235 
1236   class iterator {
1237    public:
1238     // Bare minimum of operators needed for range iteration.
1239     bool operator!=(const iterator& other) const {
1240       return field_iterator != other.field_iterator;
1241     }
1242     bool operator==(const iterator& other) const {
1243       return field_iterator == other.field_iterator;
1244     }
1245     iterator& operator++() {
1246       if (field_iterator->IsNested()) {
1247         nested_iterator++;
1248       }
1249       ++field_iterator;
1250       return *this;
1251     }
1252     Value operator*() {
1253       StateValueDescriptor* desc = &(*field_iterator);
1254       StateValueList* nested = desc->IsNested() ? *nested_iterator : nullptr;
1255       return Value(desc, nested);
1256     }
1257 
1258    private:
1259     friend class StateValueList;
1260 
iterator(ZoneVector<StateValueDescriptor>::iterator it,ZoneVector<StateValueList * >::iterator nested)1261     iterator(ZoneVector<StateValueDescriptor>::iterator it,
1262              ZoneVector<StateValueList*>::iterator nested)
1263         : field_iterator(it), nested_iterator(nested) {}
1264 
1265     ZoneVector<StateValueDescriptor>::iterator field_iterator;
1266     ZoneVector<StateValueList*>::iterator nested_iterator;
1267   };
1268 
ReserveSize(size_t size)1269   void ReserveSize(size_t size) { fields_.reserve(size); }
1270 
PushRecursiveField(Zone * zone,size_t id)1271   StateValueList* PushRecursiveField(Zone* zone, size_t id) {
1272     fields_.push_back(StateValueDescriptor::Recursive(id));
1273     StateValueList* nested = zone->New<StateValueList>(zone);
1274     nested_.push_back(nested);
1275     return nested;
1276   }
PushArgumentsElements(ArgumentsStateType type)1277   void PushArgumentsElements(ArgumentsStateType type) {
1278     fields_.push_back(StateValueDescriptor::ArgumentsElements(type));
1279   }
PushArgumentsLength()1280   void PushArgumentsLength() {
1281     fields_.push_back(StateValueDescriptor::ArgumentsLength());
1282   }
PushDuplicate(size_t id)1283   void PushDuplicate(size_t id) {
1284     fields_.push_back(StateValueDescriptor::Duplicate(id));
1285   }
PushPlain(MachineType type)1286   void PushPlain(MachineType type) {
1287     fields_.push_back(StateValueDescriptor::Plain(type));
1288   }
1289   void PushOptimizedOut(size_t num = 1) {
1290     fields_.insert(fields_.end(), num, StateValueDescriptor::OptimizedOut());
1291   }
1292 
begin()1293   iterator begin() { return iterator(fields_.begin(), nested_.begin()); }
end()1294   iterator end() { return iterator(fields_.end(), nested_.end()); }
1295 
1296  private:
1297   ZoneVector<StateValueDescriptor> fields_;
1298   ZoneVector<StateValueList*> nested_;
1299 };
1300 
1301 class FrameStateDescriptor : public ZoneObject {
1302  public:
1303   FrameStateDescriptor(Zone* zone, FrameStateType type, BailoutId bailout_id,
1304                        OutputFrameStateCombine state_combine,
1305                        size_t parameters_count, size_t locals_count,
1306                        size_t stack_count,
1307                        MaybeHandle<SharedFunctionInfo> shared_info,
1308                        FrameStateDescriptor* outer_state = nullptr);
1309 
type()1310   FrameStateType type() const { return type_; }
bailout_id()1311   BailoutId bailout_id() const { return bailout_id_; }
state_combine()1312   OutputFrameStateCombine state_combine() const { return frame_state_combine_; }
parameters_count()1313   size_t parameters_count() const { return parameters_count_; }
locals_count()1314   size_t locals_count() const { return locals_count_; }
stack_count()1315   size_t stack_count() const { return stack_count_; }
shared_info()1316   MaybeHandle<SharedFunctionInfo> shared_info() const { return shared_info_; }
outer_state()1317   FrameStateDescriptor* outer_state() const { return outer_state_; }
HasContext()1318   bool HasContext() const {
1319     return FrameStateFunctionInfo::IsJSFunctionType(type_) ||
1320            type_ == FrameStateType::kBuiltinContinuation ||
1321            type_ == FrameStateType::kConstructStub;
1322   }
1323 
1324   // The frame height on the stack, in number of slots, as serialized into a
1325   // Translation and later used by the deoptimizer. Does *not* include
1326   // information from the chain of outer states. Unlike |GetSize| this does not
1327   // always include parameters, locals, and stack slots; instead, the returned
1328   // slot kinds depend on the frame type.
1329   size_t GetHeight() const;
1330 
1331   // Returns an overapproximation of the unoptimized stack frame size in bytes,
1332   // as later produced by the deoptimizer. Considers both this and the chain of
1333   // outer states.
total_conservative_frame_size_in_bytes()1334   size_t total_conservative_frame_size_in_bytes() const {
1335     return total_conservative_frame_size_in_bytes_;
1336   }
1337 
1338   size_t GetSize() const;
1339   size_t GetTotalSize() const;
1340   size_t GetFrameCount() const;
1341   size_t GetJSFrameCount() const;
1342 
GetStateValueDescriptors()1343   StateValueList* GetStateValueDescriptors() { return &values_; }
1344 
1345   static const int kImpossibleValue = 0xdead;
1346 
1347  private:
1348   FrameStateType type_;
1349   BailoutId bailout_id_;
1350   OutputFrameStateCombine frame_state_combine_;
1351   const size_t parameters_count_;
1352   const size_t locals_count_;
1353   const size_t stack_count_;
1354   const size_t total_conservative_frame_size_in_bytes_;
1355   StateValueList values_;
1356   MaybeHandle<SharedFunctionInfo> const shared_info_;
1357   FrameStateDescriptor* const outer_state_;
1358 };
1359 
1360 // A deoptimization entry is a pair of the reason why we deoptimize and the
1361 // frame state descriptor that we have to go back to.
1362 class DeoptimizationEntry final {
1363  public:
1364   DeoptimizationEntry() = default;
DeoptimizationEntry(FrameStateDescriptor * descriptor,DeoptimizeKind kind,DeoptimizeReason reason,FeedbackSource const & feedback)1365   DeoptimizationEntry(FrameStateDescriptor* descriptor, DeoptimizeKind kind,
1366                       DeoptimizeReason reason, FeedbackSource const& feedback)
1367       : descriptor_(descriptor),
1368         kind_(kind),
1369         reason_(reason),
1370         feedback_(feedback) {}
1371 
descriptor()1372   FrameStateDescriptor* descriptor() const { return descriptor_; }
kind()1373   DeoptimizeKind kind() const { return kind_; }
reason()1374   DeoptimizeReason reason() const { return reason_; }
feedback()1375   FeedbackSource const& feedback() const { return feedback_; }
1376 
1377  private:
1378   FrameStateDescriptor* descriptor_ = nullptr;
1379   DeoptimizeKind kind_ = DeoptimizeKind::kEager;
1380   DeoptimizeReason reason_ = DeoptimizeReason::kUnknown;
1381   FeedbackSource feedback_ = FeedbackSource();
1382 };
1383 
1384 using DeoptimizationVector = ZoneVector<DeoptimizationEntry>;
1385 
1386 class V8_EXPORT_PRIVATE PhiInstruction final
NON_EXPORTED_BASE(ZoneObject)1387     : public NON_EXPORTED_BASE(ZoneObject) {
1388  public:
1389   using Inputs = ZoneVector<InstructionOperand>;
1390 
1391   PhiInstruction(Zone* zone, int virtual_register, size_t input_count);
1392 
1393   void SetInput(size_t offset, int virtual_register);
1394   void RenameInput(size_t offset, int virtual_register);
1395 
1396   int virtual_register() const { return virtual_register_; }
1397   const IntVector& operands() const { return operands_; }
1398 
1399   // TODO(dcarney): this has no real business being here, since it's internal to
1400   // the register allocator, but putting it here was convenient.
1401   const InstructionOperand& output() const { return output_; }
1402   InstructionOperand& output() { return output_; }
1403 
1404  private:
1405   const int virtual_register_;
1406   InstructionOperand output_;
1407   IntVector operands_;
1408 };
1409 
1410 // Analogue of BasicBlock for Instructions instead of Nodes.
1411 class V8_EXPORT_PRIVATE InstructionBlock final
NON_EXPORTED_BASE(ZoneObject)1412     : public NON_EXPORTED_BASE(ZoneObject) {
1413  public:
1414   InstructionBlock(Zone* zone, RpoNumber rpo_number, RpoNumber loop_header,
1415                    RpoNumber loop_end, RpoNumber dominator, bool deferred,
1416                    bool handler);
1417 
1418   // Instruction indexes (used by the register allocator).
1419   int first_instruction_index() const {
1420     DCHECK_LE(0, code_start_);
1421     DCHECK_LT(0, code_end_);
1422     DCHECK_GE(code_end_, code_start_);
1423     return code_start_;
1424   }
1425   int last_instruction_index() const {
1426     DCHECK_LE(0, code_start_);
1427     DCHECK_LT(0, code_end_);
1428     DCHECK_GE(code_end_, code_start_);
1429     return code_end_ - 1;
1430   }
1431 
1432   int32_t code_start() const { return code_start_; }
1433   void set_code_start(int32_t start) { code_start_ = start; }
1434 
1435   int32_t code_end() const { return code_end_; }
1436   void set_code_end(int32_t end) { code_end_ = end; }
1437 
1438   bool IsDeferred() const { return deferred_; }
1439   bool IsHandler() const { return handler_; }
1440   void MarkHandler() { handler_ = true; }
1441   void UnmarkHandler() { handler_ = false; }
1442 
1443   RpoNumber ao_number() const { return ao_number_; }
1444   RpoNumber rpo_number() const { return rpo_number_; }
1445   RpoNumber loop_header() const { return loop_header_; }
1446   RpoNumber loop_end() const {
1447     DCHECK(IsLoopHeader());
1448     return loop_end_;
1449   }
1450   inline bool IsLoopHeader() const { return loop_end_.IsValid(); }
1451   inline bool IsSwitchTarget() const { return switch_target_; }
1452   inline bool ShouldAlign() const { return alignment_; }
1453 
1454   using Predecessors = ZoneVector<RpoNumber>;
1455   Predecessors& predecessors() { return predecessors_; }
1456   const Predecessors& predecessors() const { return predecessors_; }
1457   size_t PredecessorCount() const { return predecessors_.size(); }
1458   size_t PredecessorIndexOf(RpoNumber rpo_number) const;
1459 
1460   using Successors = ZoneVector<RpoNumber>;
1461   Successors& successors() { return successors_; }
1462   const Successors& successors() const { return successors_; }
1463   size_t SuccessorCount() const { return successors_.size(); }
1464 
1465   RpoNumber dominator() const { return dominator_; }
1466   void set_dominator(RpoNumber dominator) { dominator_ = dominator; }
1467 
1468   using PhiInstructions = ZoneVector<PhiInstruction*>;
1469   const PhiInstructions& phis() const { return phis_; }
1470   PhiInstruction* PhiAt(size_t i) const { return phis_[i]; }
1471   void AddPhi(PhiInstruction* phi) { phis_.push_back(phi); }
1472 
1473   void set_ao_number(RpoNumber ao_number) { ao_number_ = ao_number; }
1474 
1475   void set_alignment(bool val) { alignment_ = val; }
1476 
1477   void set_switch_target(bool val) { switch_target_ = val; }
1478 
1479   bool needs_frame() const { return needs_frame_; }
1480   void mark_needs_frame() { needs_frame_ = true; }
1481 
1482   bool must_construct_frame() const { return must_construct_frame_; }
1483   void mark_must_construct_frame() { must_construct_frame_ = true; }
1484 
1485   bool must_deconstruct_frame() const { return must_deconstruct_frame_; }
1486   void mark_must_deconstruct_frame() { must_deconstruct_frame_ = true; }
1487   void clear_must_deconstruct_frame() { must_deconstruct_frame_ = false; }
1488 
1489  private:
1490   Successors successors_;
1491   Predecessors predecessors_;
1492   PhiInstructions phis_;
1493   RpoNumber ao_number_;  // Assembly order number.
1494   const RpoNumber rpo_number_;
1495   const RpoNumber loop_header_;
1496   const RpoNumber loop_end_;
1497   RpoNumber dominator_;
1498   int32_t code_start_;   // start index of arch-specific code.
1499   int32_t code_end_ = -1;     // end index of arch-specific code.
1500   const bool deferred_;       // Block contains deferred code.
1501   bool handler_;              // Block is a handler entry point.
1502   bool switch_target_ = false;
1503   bool alignment_ = false;  // insert alignment before this block
1504   bool needs_frame_ = false;
1505   bool must_construct_frame_ = false;
1506   bool must_deconstruct_frame_ = false;
1507 };
1508 
1509 class InstructionSequence;
1510 
1511 struct PrintableInstructionBlock {
1512   const InstructionBlock* block_;
1513   const InstructionSequence* code_;
1514 };
1515 
1516 std::ostream& operator<<(std::ostream&, const PrintableInstructionBlock&);
1517 
1518 using ConstantDeque = ZoneDeque<Constant>;
1519 using ConstantMap = std::map<int, Constant, std::less<int>,
1520                              ZoneAllocator<std::pair<const int, Constant> > >;
1521 
1522 using InstructionDeque = ZoneDeque<Instruction*>;
1523 using ReferenceMapDeque = ZoneDeque<ReferenceMap*>;
1524 using InstructionBlocks = ZoneVector<InstructionBlock*>;
1525 
1526 // Represents architecture-specific generated code before, during, and after
1527 // register allocation.
1528 class V8_EXPORT_PRIVATE InstructionSequence final
NON_EXPORTED_BASE(ZoneObject)1529     : public NON_EXPORTED_BASE(ZoneObject) {
1530  public:
1531   static InstructionBlocks* InstructionBlocksFor(Zone* zone,
1532                                                  const Schedule* schedule);
1533   InstructionSequence(Isolate* isolate, Zone* zone,
1534                       InstructionBlocks* instruction_blocks);
1535   InstructionSequence(const InstructionSequence&) = delete;
1536   InstructionSequence& operator=(const InstructionSequence&) = delete;
1537 
1538   int NextVirtualRegister();
1539   int VirtualRegisterCount() const { return next_virtual_register_; }
1540 
1541   const InstructionBlocks& instruction_blocks() const {
1542     return *instruction_blocks_;
1543   }
1544 
1545   const InstructionBlocks& ao_blocks() const { return *ao_blocks_; }
1546 
1547   int InstructionBlockCount() const {
1548     return static_cast<int>(instruction_blocks_->size());
1549   }
1550 
1551   InstructionBlock* InstructionBlockAt(RpoNumber rpo_number) {
1552     return instruction_blocks_->at(rpo_number.ToSize());
1553   }
1554 
1555   int LastLoopInstructionIndex(const InstructionBlock* block) {
1556     return instruction_blocks_->at(block->loop_end().ToSize() - 1)
1557         ->last_instruction_index();
1558   }
1559 
1560   const InstructionBlock* InstructionBlockAt(RpoNumber rpo_number) const {
1561     return instruction_blocks_->at(rpo_number.ToSize());
1562   }
1563 
1564   InstructionBlock* GetInstructionBlock(int instruction_index) const;
1565 
1566   static MachineRepresentation DefaultRepresentation() {
1567     return MachineType::PointerRepresentation();
1568   }
1569   MachineRepresentation GetRepresentation(int virtual_register) const;
1570   void MarkAsRepresentation(MachineRepresentation rep, int virtual_register);
1571 
1572   bool IsReference(int virtual_register) const {
1573     return CanBeTaggedOrCompressedPointer(GetRepresentation(virtual_register));
1574   }
1575   bool IsFP(int virtual_register) const {
1576     return IsFloatingPoint(GetRepresentation(virtual_register));
1577   }
1578   int representation_mask() const { return representation_mask_; }
1579   bool HasFPVirtualRegisters() const {
1580     constexpr int kFPRepMask =
1581         RepresentationBit(MachineRepresentation::kFloat32) |
1582         RepresentationBit(MachineRepresentation::kFloat64) |
1583         RepresentationBit(MachineRepresentation::kSimd128);
1584     return (representation_mask() & kFPRepMask) != 0;
1585   }
1586 
1587   Instruction* GetBlockStart(RpoNumber rpo) const;
1588 
1589   using const_iterator = InstructionDeque::const_iterator;
1590   const_iterator begin() const { return instructions_.begin(); }
1591   const_iterator end() const { return instructions_.end(); }
1592   const InstructionDeque& instructions() const { return instructions_; }
1593   int LastInstructionIndex() const {
1594     return static_cast<int>(instructions().size()) - 1;
1595   }
1596 
1597   Instruction* InstructionAt(int index) const {
1598     DCHECK_LE(0, index);
1599     DCHECK_GT(instructions_.size(), index);
1600     return instructions_[index];
1601   }
1602 
1603   Isolate* isolate() const { return isolate_; }
1604   const ReferenceMapDeque* reference_maps() const { return &reference_maps_; }
1605   Zone* zone() const { return zone_; }
1606 
1607   // Used by the instruction selector while adding instructions.
1608   int AddInstruction(Instruction* instr);
1609   void StartBlock(RpoNumber rpo);
1610   void EndBlock(RpoNumber rpo);
1611 
1612   int AddConstant(int virtual_register, Constant constant) {
1613     // TODO(titzer): allow RPO numbers as constants?
1614     DCHECK_NE(Constant::kRpoNumber, constant.type());
1615     DCHECK(virtual_register >= 0 && virtual_register < next_virtual_register_);
1616     DCHECK(constants_.find(virtual_register) == constants_.end());
1617     constants_.insert(std::make_pair(virtual_register, constant));
1618     return virtual_register;
1619   }
1620   Constant GetConstant(int virtual_register) const {
1621     auto it = constants_.find(virtual_register);
1622     DCHECK(it != constants_.end());
1623     DCHECK_EQ(virtual_register, it->first);
1624     return it->second;
1625   }
1626 
1627   using Immediates = ZoneVector<Constant>;
1628   Immediates& immediates() { return immediates_; }
1629 
1630   ImmediateOperand AddImmediate(const Constant& constant) {
1631     if (constant.type() == Constant::kInt32 &&
1632         RelocInfo::IsNone(constant.rmode())) {
1633       return ImmediateOperand(ImmediateOperand::INLINE, constant.ToInt32());
1634     }
1635     int index = static_cast<int>(immediates_.size());
1636     immediates_.push_back(constant);
1637     return ImmediateOperand(ImmediateOperand::INDEXED, index);
1638   }
1639 
1640   Constant GetImmediate(const ImmediateOperand* op) const {
1641     switch (op->type()) {
1642       case ImmediateOperand::INLINE:
1643         return Constant(op->inline_value());
1644       case ImmediateOperand::INDEXED: {
1645         int index = op->indexed_value();
1646         DCHECK_LE(0, index);
1647         DCHECK_GT(immediates_.size(), index);
1648         return immediates_[index];
1649       }
1650     }
1651     UNREACHABLE();
1652   }
1653 
1654   int AddDeoptimizationEntry(FrameStateDescriptor* descriptor,
1655                              DeoptimizeKind kind, DeoptimizeReason reason,
1656                              FeedbackSource const& feedback);
1657   DeoptimizationEntry const& GetDeoptimizationEntry(int deoptimization_id);
1658   int GetDeoptimizationEntryCount() const {
1659     return static_cast<int>(deoptimization_entries_.size());
1660   }
1661 
1662   RpoNumber InputRpo(Instruction* instr, size_t index);
1663 
1664   bool GetSourcePosition(const Instruction* instr,
1665                          SourcePosition* result) const;
1666   void SetSourcePosition(const Instruction* instr, SourcePosition value);
1667 
1668   bool ContainsCall() const {
1669     for (Instruction* instr : instructions_) {
1670       if (instr->IsCall()) return true;
1671     }
1672     return false;
1673   }
1674 
1675   // APIs to aid debugging. For general-stream APIs, use operator<<.
1676   void Print() const;
1677 
1678   void PrintBlock(int block_id) const;
1679 
1680   void ValidateEdgeSplitForm() const;
1681   void ValidateDeferredBlockExitPaths() const;
1682   void ValidateDeferredBlockEntryPaths() const;
1683   void ValidateSSA() const;
1684 
1685   static void SetRegisterConfigurationForTesting(
1686       const RegisterConfiguration* regConfig);
1687   static void ClearRegisterConfigurationForTesting();
1688 
1689   void RecomputeAssemblyOrderForTesting();
1690 
1691  private:
1692   friend V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&,
1693                                                     const InstructionSequence&);
1694 
1695   using SourcePositionMap = ZoneMap<const Instruction*, SourcePosition>;
1696 
1697   static const RegisterConfiguration* RegisterConfigurationForTesting();
1698   static const RegisterConfiguration* registerConfigurationForTesting_;
1699 
1700   // Puts the deferred blocks last and may rotate loops.
1701   void ComputeAssemblyOrder();
1702 
1703   Isolate* isolate_;
1704   Zone* const zone_;
1705   InstructionBlocks* const instruction_blocks_;
1706   InstructionBlocks* ao_blocks_;
1707   SourcePositionMap source_positions_;
1708   ConstantMap constants_;
1709   Immediates immediates_;
1710   InstructionDeque instructions_;
1711   int next_virtual_register_;
1712   ReferenceMapDeque reference_maps_;
1713   ZoneVector<MachineRepresentation> representations_;
1714   int representation_mask_;
1715   DeoptimizationVector deoptimization_entries_;
1716 
1717   // Used at construction time
1718   InstructionBlock* current_block_;
1719 };
1720 
1721 V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&,
1722                                            const InstructionSequence&);
1723 #undef INSTRUCTION_OPERAND_ALIGN
1724 
1725 }  // namespace compiler
1726 }  // namespace internal
1727 }  // namespace v8
1728 
1729 #endif  // V8_COMPILER_BACKEND_INSTRUCTION_H_
1730