1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_COMPILER_INSTRUCTION_H_
6 #define V8_COMPILER_INSTRUCTION_H_
7
8 #include <deque>
9 #include <iosfwd>
10 #include <map>
11 #include <set>
12
13 #include "src/base/compiler-specific.h"
14 #include "src/compiler/common-operator.h"
15 #include "src/compiler/frame.h"
16 #include "src/compiler/instruction-codes.h"
17 #include "src/compiler/opcodes.h"
18 #include "src/globals.h"
19 #include "src/macro-assembler.h"
20 #include "src/register-configuration.h"
21 #include "src/zone/zone-allocator.h"
22
23 namespace v8 {
24 namespace internal {
25
26 class SourcePosition;
27
28 namespace compiler {
29
30 class Schedule;
31 class SourcePositionTable;
32
33 class V8_EXPORT_PRIVATE InstructionOperand {
34 public:
35 static const int kInvalidVirtualRegister = -1;
36
37 // TODO(dcarney): recover bit. INVALID can be represented as UNALLOCATED with
38 // kInvalidVirtualRegister and some DCHECKS.
39 enum Kind {
40 INVALID,
41 UNALLOCATED,
42 CONSTANT,
43 IMMEDIATE,
44 // Location operand kinds.
45 EXPLICIT,
46 ALLOCATED,
47 FIRST_LOCATION_OPERAND_KIND = EXPLICIT
48 // Location operand kinds must be last.
49 };
50
InstructionOperand()51 InstructionOperand() : InstructionOperand(INVALID) {}
52
kind()53 Kind kind() const { return KindField::decode(value_); }
54
55 #define INSTRUCTION_OPERAND_PREDICATE(name, type) \
56 bool Is##name() const { return kind() == type; }
57 INSTRUCTION_OPERAND_PREDICATE(Invalid, INVALID)
58 // UnallocatedOperands are place-holder operands created before register
59 // allocation. They later are assigned registers and become AllocatedOperands.
60 INSTRUCTION_OPERAND_PREDICATE(Unallocated, UNALLOCATED)
61 // Constant operands participate in register allocation. They are allocated to
62 // registers but have a special "spilling" behavior. When a ConstantOperand
63 // value must be rematerialized, it is loaded from an immediate constant
64 // rather from an unspilled slot.
65 INSTRUCTION_OPERAND_PREDICATE(Constant, CONSTANT)
66 // ImmediateOperands do not participate in register allocation and are only
67 // embedded directly in instructions, e.g. small integers and on some
68 // platforms Objects.
69 INSTRUCTION_OPERAND_PREDICATE(Immediate, IMMEDIATE)
70 // ExplicitOperands do not participate in register allocation. They are
71 // created by the instruction selector for direct access to registers and
72 // stack slots, completely bypassing the register allocator. They are never
73 // associated with a virtual register
74 INSTRUCTION_OPERAND_PREDICATE(Explicit, EXPLICIT)
75 // AllocatedOperands are registers or stack slots that are assigned by the
76 // register allocator and are always associated with a virtual register.
77 INSTRUCTION_OPERAND_PREDICATE(Allocated, ALLOCATED)
78 #undef INSTRUCTION_OPERAND_PREDICATE
79
80 inline bool IsAnyLocationOperand() const;
81 inline bool IsLocationOperand() const;
82 inline bool IsFPLocationOperand() const;
83 inline bool IsAnyRegister() const;
84 inline bool IsRegister() const;
85 inline bool IsFPRegister() const;
86 inline bool IsFloatRegister() const;
87 inline bool IsDoubleRegister() const;
88 inline bool IsSimd128Register() const;
89 inline bool IsAnyStackSlot() const;
90 inline bool IsStackSlot() const;
91 inline bool IsFPStackSlot() const;
92 inline bool IsFloatStackSlot() const;
93 inline bool IsDoubleStackSlot() const;
94 inline bool IsSimd128StackSlot() const;
95
96 template <typename SubKindOperand>
New(Zone * zone,const SubKindOperand & op)97 static SubKindOperand* New(Zone* zone, const SubKindOperand& op) {
98 void* buffer = zone->New(sizeof(op));
99 return new (buffer) SubKindOperand(op);
100 }
101
ReplaceWith(InstructionOperand * dest,const InstructionOperand * src)102 static void ReplaceWith(InstructionOperand* dest,
103 const InstructionOperand* src) {
104 *dest = *src;
105 }
106
Equals(const InstructionOperand & that)107 bool Equals(const InstructionOperand& that) const {
108 return this->value_ == that.value_;
109 }
110
Compare(const InstructionOperand & that)111 bool Compare(const InstructionOperand& that) const {
112 return this->value_ < that.value_;
113 }
114
EqualsCanonicalized(const InstructionOperand & that)115 bool EqualsCanonicalized(const InstructionOperand& that) const {
116 return this->GetCanonicalizedValue() == that.GetCanonicalizedValue();
117 }
118
CompareCanonicalized(const InstructionOperand & that)119 bool CompareCanonicalized(const InstructionOperand& that) const {
120 return this->GetCanonicalizedValue() < that.GetCanonicalizedValue();
121 }
122
123 bool InterferesWith(const InstructionOperand& other) const;
124
125 // APIs to aid debugging. For general-stream APIs, use operator<<
126 void Print(const RegisterConfiguration* config) const;
127 void Print() const;
128
129 protected:
InstructionOperand(Kind kind)130 explicit InstructionOperand(Kind kind) : value_(KindField::encode(kind)) {}
131
132 inline uint64_t GetCanonicalizedValue() const;
133
134 class KindField : public BitField64<Kind, 0, 3> {};
135
136 uint64_t value_;
137 };
138
139
140 typedef ZoneVector<InstructionOperand> InstructionOperandVector;
141
142
143 struct PrintableInstructionOperand {
144 const RegisterConfiguration* register_configuration_;
145 InstructionOperand op_;
146 };
147
148
149 std::ostream& operator<<(std::ostream& os,
150 const PrintableInstructionOperand& op);
151
152
153 #define INSTRUCTION_OPERAND_CASTS(OperandType, OperandKind) \
154 \
155 static OperandType* cast(InstructionOperand* op) { \
156 DCHECK_EQ(OperandKind, op->kind()); \
157 return static_cast<OperandType*>(op); \
158 } \
159 \
160 static const OperandType* cast(const InstructionOperand* op) { \
161 DCHECK_EQ(OperandKind, op->kind()); \
162 return static_cast<const OperandType*>(op); \
163 } \
164 \
165 static OperandType cast(const InstructionOperand& op) { \
166 DCHECK_EQ(OperandKind, op.kind()); \
167 return *static_cast<const OperandType*>(&op); \
168 }
169
170 class UnallocatedOperand : public InstructionOperand {
171 public:
172 enum BasicPolicy { FIXED_SLOT, EXTENDED_POLICY };
173
174 enum ExtendedPolicy {
175 NONE,
176 ANY,
177 FIXED_REGISTER,
178 FIXED_FP_REGISTER,
179 MUST_HAVE_REGISTER,
180 MUST_HAVE_SLOT,
181 SAME_AS_FIRST_INPUT
182 };
183
184 // Lifetime of operand inside the instruction.
185 enum Lifetime {
186 // USED_AT_START operand is guaranteed to be live only at
187 // instruction start. Register allocator is free to assign the same register
188 // to some other operand used inside instruction (i.e. temporary or
189 // output).
190 USED_AT_START,
191
192 // USED_AT_END operand is treated as live until the end of
193 // instruction. This means that register allocator will not reuse it's
194 // register for any other operand inside instruction.
195 USED_AT_END
196 };
197
UnallocatedOperand(ExtendedPolicy policy,int virtual_register)198 UnallocatedOperand(ExtendedPolicy policy, int virtual_register)
199 : UnallocatedOperand(virtual_register) {
200 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
201 value_ |= ExtendedPolicyField::encode(policy);
202 value_ |= LifetimeField::encode(USED_AT_END);
203 }
204
UnallocatedOperand(BasicPolicy policy,int index,int virtual_register)205 UnallocatedOperand(BasicPolicy policy, int index, int virtual_register)
206 : UnallocatedOperand(virtual_register) {
207 DCHECK(policy == FIXED_SLOT);
208 value_ |= BasicPolicyField::encode(policy);
209 value_ |= static_cast<int64_t>(index) << FixedSlotIndexField::kShift;
210 DCHECK(this->fixed_slot_index() == index);
211 }
212
UnallocatedOperand(ExtendedPolicy policy,int index,int virtual_register)213 UnallocatedOperand(ExtendedPolicy policy, int index, int virtual_register)
214 : UnallocatedOperand(virtual_register) {
215 DCHECK(policy == FIXED_REGISTER || policy == FIXED_FP_REGISTER);
216 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
217 value_ |= ExtendedPolicyField::encode(policy);
218 value_ |= LifetimeField::encode(USED_AT_END);
219 value_ |= FixedRegisterField::encode(index);
220 }
221
UnallocatedOperand(ExtendedPolicy policy,Lifetime lifetime,int virtual_register)222 UnallocatedOperand(ExtendedPolicy policy, Lifetime lifetime,
223 int virtual_register)
224 : UnallocatedOperand(virtual_register) {
225 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
226 value_ |= ExtendedPolicyField::encode(policy);
227 value_ |= LifetimeField::encode(lifetime);
228 }
229
UnallocatedOperand(int reg_id,int slot_id,int virtual_register)230 UnallocatedOperand(int reg_id, int slot_id, int virtual_register)
231 : UnallocatedOperand(FIXED_REGISTER, reg_id, virtual_register) {
232 value_ |= HasSecondaryStorageField::encode(true);
233 value_ |= SecondaryStorageField::encode(slot_id);
234 }
235
236 // Predicates for the operand policy.
HasAnyPolicy()237 bool HasAnyPolicy() const {
238 return basic_policy() == EXTENDED_POLICY && extended_policy() == ANY;
239 }
HasFixedPolicy()240 bool HasFixedPolicy() const {
241 return basic_policy() == FIXED_SLOT ||
242 extended_policy() == FIXED_REGISTER ||
243 extended_policy() == FIXED_FP_REGISTER;
244 }
HasRegisterPolicy()245 bool HasRegisterPolicy() const {
246 return basic_policy() == EXTENDED_POLICY &&
247 extended_policy() == MUST_HAVE_REGISTER;
248 }
HasSlotPolicy()249 bool HasSlotPolicy() const {
250 return basic_policy() == EXTENDED_POLICY &&
251 extended_policy() == MUST_HAVE_SLOT;
252 }
HasSameAsInputPolicy()253 bool HasSameAsInputPolicy() const {
254 return basic_policy() == EXTENDED_POLICY &&
255 extended_policy() == SAME_AS_FIRST_INPUT;
256 }
HasFixedSlotPolicy()257 bool HasFixedSlotPolicy() const { return basic_policy() == FIXED_SLOT; }
HasFixedRegisterPolicy()258 bool HasFixedRegisterPolicy() const {
259 return basic_policy() == EXTENDED_POLICY &&
260 extended_policy() == FIXED_REGISTER;
261 }
HasFixedFPRegisterPolicy()262 bool HasFixedFPRegisterPolicy() const {
263 return basic_policy() == EXTENDED_POLICY &&
264 extended_policy() == FIXED_FP_REGISTER;
265 }
HasSecondaryStorage()266 bool HasSecondaryStorage() const {
267 return basic_policy() == EXTENDED_POLICY &&
268 extended_policy() == FIXED_REGISTER &&
269 HasSecondaryStorageField::decode(value_);
270 }
GetSecondaryStorage()271 int GetSecondaryStorage() const {
272 DCHECK(HasSecondaryStorage());
273 return SecondaryStorageField::decode(value_);
274 }
275
276 // [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
basic_policy()277 BasicPolicy basic_policy() const {
278 DCHECK_EQ(UNALLOCATED, kind());
279 return BasicPolicyField::decode(value_);
280 }
281
282 // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy.
extended_policy()283 ExtendedPolicy extended_policy() const {
284 DCHECK(basic_policy() == EXTENDED_POLICY);
285 return ExtendedPolicyField::decode(value_);
286 }
287
288 // [fixed_slot_index]: Only for FIXED_SLOT.
fixed_slot_index()289 int fixed_slot_index() const {
290 DCHECK(HasFixedSlotPolicy());
291 return static_cast<int>(static_cast<int64_t>(value_) >>
292 FixedSlotIndexField::kShift);
293 }
294
295 // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_FP_REGISTER.
fixed_register_index()296 int fixed_register_index() const {
297 DCHECK(HasFixedRegisterPolicy() || HasFixedFPRegisterPolicy());
298 return FixedRegisterField::decode(value_);
299 }
300
301 // [virtual_register]: The virtual register ID for this operand.
virtual_register()302 int32_t virtual_register() const {
303 DCHECK_EQ(UNALLOCATED, kind());
304 return static_cast<int32_t>(VirtualRegisterField::decode(value_));
305 }
306
307 // TODO(dcarney): remove this.
set_virtual_register(int32_t id)308 void set_virtual_register(int32_t id) {
309 DCHECK_EQ(UNALLOCATED, kind());
310 value_ = VirtualRegisterField::update(value_, static_cast<uint32_t>(id));
311 }
312
313 // [lifetime]: Only for non-FIXED_SLOT.
IsUsedAtStart()314 bool IsUsedAtStart() const {
315 DCHECK(basic_policy() == EXTENDED_POLICY);
316 return LifetimeField::decode(value_) == USED_AT_START;
317 }
318
319 INSTRUCTION_OPERAND_CASTS(UnallocatedOperand, UNALLOCATED);
320
321 // The encoding used for UnallocatedOperand operands depends on the policy
322 // that is
323 // stored within the operand. The FIXED_SLOT policy uses a compact encoding
324 // because it accommodates a larger pay-load.
325 //
326 // For FIXED_SLOT policy:
327 // +------------------------------------------------+
328 // | slot_index | 0 | virtual_register | 001 |
329 // +------------------------------------------------+
330 //
331 // For all other (extended) policies:
332 // +-----------------------------------------------------+
333 // | reg_index | L | PPP | 1 | virtual_register | 001 |
334 // +-----------------------------------------------------+
335 // L ... Lifetime
336 // P ... Policy
337 //
338 // The slot index is a signed value which requires us to decode it manually
339 // instead of using the BitField utility class.
340
341 STATIC_ASSERT(KindField::kSize == 3);
342
343 class VirtualRegisterField : public BitField64<uint32_t, 3, 32> {};
344
345 // BitFields for all unallocated operands.
346 class BasicPolicyField : public BitField64<BasicPolicy, 35, 1> {};
347
348 // BitFields specific to BasicPolicy::FIXED_SLOT.
349 class FixedSlotIndexField : public BitField64<int, 36, 28> {};
350
351 // BitFields specific to BasicPolicy::EXTENDED_POLICY.
352 class ExtendedPolicyField : public BitField64<ExtendedPolicy, 36, 3> {};
353 class LifetimeField : public BitField64<Lifetime, 39, 1> {};
354 class HasSecondaryStorageField : public BitField64<bool, 40, 1> {};
355 class FixedRegisterField : public BitField64<int, 41, 6> {};
356 class SecondaryStorageField : public BitField64<int, 47, 3> {};
357
358 private:
UnallocatedOperand(int virtual_register)359 explicit UnallocatedOperand(int virtual_register)
360 : InstructionOperand(UNALLOCATED) {
361 value_ |=
362 VirtualRegisterField::encode(static_cast<uint32_t>(virtual_register));
363 }
364 };
365
366
367 class ConstantOperand : public InstructionOperand {
368 public:
ConstantOperand(int virtual_register)369 explicit ConstantOperand(int virtual_register)
370 : InstructionOperand(CONSTANT) {
371 value_ |=
372 VirtualRegisterField::encode(static_cast<uint32_t>(virtual_register));
373 }
374
virtual_register()375 int32_t virtual_register() const {
376 return static_cast<int32_t>(VirtualRegisterField::decode(value_));
377 }
378
New(Zone * zone,int virtual_register)379 static ConstantOperand* New(Zone* zone, int virtual_register) {
380 return InstructionOperand::New(zone, ConstantOperand(virtual_register));
381 }
382
383 INSTRUCTION_OPERAND_CASTS(ConstantOperand, CONSTANT);
384
385 STATIC_ASSERT(KindField::kSize == 3);
386 class VirtualRegisterField : public BitField64<uint32_t, 3, 32> {};
387 };
388
389
390 class ImmediateOperand : public InstructionOperand {
391 public:
392 enum ImmediateType { INLINE, INDEXED };
393
ImmediateOperand(ImmediateType type,int32_t value)394 explicit ImmediateOperand(ImmediateType type, int32_t value)
395 : InstructionOperand(IMMEDIATE) {
396 value_ |= TypeField::encode(type);
397 value_ |= static_cast<int64_t>(value) << ValueField::kShift;
398 }
399
type()400 ImmediateType type() const { return TypeField::decode(value_); }
401
inline_value()402 int32_t inline_value() const {
403 DCHECK_EQ(INLINE, type());
404 return static_cast<int64_t>(value_) >> ValueField::kShift;
405 }
406
indexed_value()407 int32_t indexed_value() const {
408 DCHECK_EQ(INDEXED, type());
409 return static_cast<int64_t>(value_) >> ValueField::kShift;
410 }
411
New(Zone * zone,ImmediateType type,int32_t value)412 static ImmediateOperand* New(Zone* zone, ImmediateType type, int32_t value) {
413 return InstructionOperand::New(zone, ImmediateOperand(type, value));
414 }
415
416 INSTRUCTION_OPERAND_CASTS(ImmediateOperand, IMMEDIATE);
417
418 STATIC_ASSERT(KindField::kSize == 3);
419 class TypeField : public BitField64<ImmediateType, 3, 1> {};
420 class ValueField : public BitField64<int32_t, 32, 32> {};
421 };
422
423
424 class LocationOperand : public InstructionOperand {
425 public:
426 enum LocationKind { REGISTER, STACK_SLOT };
427
LocationOperand(InstructionOperand::Kind operand_kind,LocationOperand::LocationKind location_kind,MachineRepresentation rep,int index)428 LocationOperand(InstructionOperand::Kind operand_kind,
429 LocationOperand::LocationKind location_kind,
430 MachineRepresentation rep, int index)
431 : InstructionOperand(operand_kind) {
432 DCHECK_IMPLIES(location_kind == REGISTER, index >= 0);
433 DCHECK(IsSupportedRepresentation(rep));
434 value_ |= LocationKindField::encode(location_kind);
435 value_ |= RepresentationField::encode(rep);
436 value_ |= static_cast<int64_t>(index) << IndexField::kShift;
437 }
438
index()439 int index() const {
440 DCHECK(IsStackSlot() || IsFPStackSlot());
441 return static_cast<int64_t>(value_) >> IndexField::kShift;
442 }
443
register_code()444 int register_code() const {
445 DCHECK(IsRegister() || IsFPRegister());
446 return static_cast<int64_t>(value_) >> IndexField::kShift;
447 }
448
GetRegister()449 Register GetRegister() const {
450 DCHECK(IsRegister());
451 return Register::from_code(register_code());
452 }
453
GetFloatRegister()454 FloatRegister GetFloatRegister() const {
455 DCHECK(IsFloatRegister());
456 return FloatRegister::from_code(register_code());
457 }
458
GetDoubleRegister()459 DoubleRegister GetDoubleRegister() const {
460 // On platforms where FloatRegister, DoubleRegister, and Simd128Register
461 // are all the same type, it's convenient to treat everything as a
462 // DoubleRegister, so be lax about type checking here.
463 DCHECK(IsFPRegister());
464 return DoubleRegister::from_code(register_code());
465 }
466
GetSimd128Register()467 Simd128Register GetSimd128Register() const {
468 DCHECK(IsSimd128Register());
469 return Simd128Register::from_code(register_code());
470 }
471
location_kind()472 LocationKind location_kind() const {
473 return LocationKindField::decode(value_);
474 }
475
representation()476 MachineRepresentation representation() const {
477 return RepresentationField::decode(value_);
478 }
479
IsSupportedRepresentation(MachineRepresentation rep)480 static bool IsSupportedRepresentation(MachineRepresentation rep) {
481 switch (rep) {
482 case MachineRepresentation::kWord32:
483 case MachineRepresentation::kWord64:
484 case MachineRepresentation::kFloat32:
485 case MachineRepresentation::kFloat64:
486 case MachineRepresentation::kSimd128:
487 case MachineRepresentation::kSimd1x4:
488 case MachineRepresentation::kSimd1x8:
489 case MachineRepresentation::kSimd1x16:
490 case MachineRepresentation::kTaggedSigned:
491 case MachineRepresentation::kTaggedPointer:
492 case MachineRepresentation::kTagged:
493 return true;
494 case MachineRepresentation::kBit:
495 case MachineRepresentation::kWord8:
496 case MachineRepresentation::kWord16:
497 case MachineRepresentation::kNone:
498 return false;
499 }
500 UNREACHABLE();
501 return false;
502 }
503
cast(InstructionOperand * op)504 static LocationOperand* cast(InstructionOperand* op) {
505 DCHECK(op->IsAnyLocationOperand());
506 return static_cast<LocationOperand*>(op);
507 }
508
cast(const InstructionOperand * op)509 static const LocationOperand* cast(const InstructionOperand* op) {
510 DCHECK(op->IsAnyLocationOperand());
511 return static_cast<const LocationOperand*>(op);
512 }
513
cast(const InstructionOperand & op)514 static LocationOperand cast(const InstructionOperand& op) {
515 DCHECK(op.IsAnyLocationOperand());
516 return *static_cast<const LocationOperand*>(&op);
517 }
518
519 STATIC_ASSERT(KindField::kSize == 3);
520 class LocationKindField : public BitField64<LocationKind, 3, 2> {};
521 class RepresentationField : public BitField64<MachineRepresentation, 5, 8> {};
522 class IndexField : public BitField64<int32_t, 35, 29> {};
523 };
524
525 class V8_EXPORT_PRIVATE ExplicitOperand
NON_EXPORTED_BASE(LocationOperand)526 : public NON_EXPORTED_BASE(LocationOperand) {
527 public:
528 ExplicitOperand(LocationKind kind, MachineRepresentation rep, int index);
529
530 static ExplicitOperand* New(Zone* zone, LocationKind kind,
531 MachineRepresentation rep, int index) {
532 return InstructionOperand::New(zone, ExplicitOperand(kind, rep, index));
533 }
534
535 INSTRUCTION_OPERAND_CASTS(ExplicitOperand, EXPLICIT);
536 };
537
538
539 class AllocatedOperand : public LocationOperand {
540 public:
AllocatedOperand(LocationKind kind,MachineRepresentation rep,int index)541 AllocatedOperand(LocationKind kind, MachineRepresentation rep, int index)
542 : LocationOperand(ALLOCATED, kind, rep, index) {}
543
New(Zone * zone,LocationKind kind,MachineRepresentation rep,int index)544 static AllocatedOperand* New(Zone* zone, LocationKind kind,
545 MachineRepresentation rep, int index) {
546 return InstructionOperand::New(zone, AllocatedOperand(kind, rep, index));
547 }
548
549 INSTRUCTION_OPERAND_CASTS(AllocatedOperand, ALLOCATED);
550 };
551
552
553 #undef INSTRUCTION_OPERAND_CASTS
554
IsAnyLocationOperand()555 bool InstructionOperand::IsAnyLocationOperand() const {
556 return this->kind() >= FIRST_LOCATION_OPERAND_KIND;
557 }
558
IsLocationOperand()559 bool InstructionOperand::IsLocationOperand() const {
560 return IsAnyLocationOperand() &&
561 !IsFloatingPoint(LocationOperand::cast(this)->representation());
562 }
563
IsFPLocationOperand()564 bool InstructionOperand::IsFPLocationOperand() const {
565 return IsAnyLocationOperand() &&
566 IsFloatingPoint(LocationOperand::cast(this)->representation());
567 }
568
IsAnyRegister()569 bool InstructionOperand::IsAnyRegister() const {
570 return IsAnyLocationOperand() &&
571 LocationOperand::cast(this)->location_kind() ==
572 LocationOperand::REGISTER;
573 }
574
575
IsRegister()576 bool InstructionOperand::IsRegister() const {
577 return IsAnyRegister() &&
578 !IsFloatingPoint(LocationOperand::cast(this)->representation());
579 }
580
IsFPRegister()581 bool InstructionOperand::IsFPRegister() const {
582 return IsAnyRegister() &&
583 IsFloatingPoint(LocationOperand::cast(this)->representation());
584 }
585
IsFloatRegister()586 bool InstructionOperand::IsFloatRegister() const {
587 return IsAnyRegister() &&
588 LocationOperand::cast(this)->representation() ==
589 MachineRepresentation::kFloat32;
590 }
591
IsDoubleRegister()592 bool InstructionOperand::IsDoubleRegister() const {
593 return IsAnyRegister() &&
594 LocationOperand::cast(this)->representation() ==
595 MachineRepresentation::kFloat64;
596 }
597
IsSimd128Register()598 bool InstructionOperand::IsSimd128Register() const {
599 return IsAnyRegister() &&
600 LocationOperand::cast(this)->representation() ==
601 MachineRepresentation::kSimd128;
602 }
603
IsAnyStackSlot()604 bool InstructionOperand::IsAnyStackSlot() const {
605 return IsAnyLocationOperand() &&
606 LocationOperand::cast(this)->location_kind() ==
607 LocationOperand::STACK_SLOT;
608 }
609
IsStackSlot()610 bool InstructionOperand::IsStackSlot() const {
611 return IsAnyStackSlot() &&
612 !IsFloatingPoint(LocationOperand::cast(this)->representation());
613 }
614
IsFPStackSlot()615 bool InstructionOperand::IsFPStackSlot() const {
616 return IsAnyStackSlot() &&
617 IsFloatingPoint(LocationOperand::cast(this)->representation());
618 }
619
IsFloatStackSlot()620 bool InstructionOperand::IsFloatStackSlot() const {
621 return IsAnyLocationOperand() &&
622 LocationOperand::cast(this)->location_kind() ==
623 LocationOperand::STACK_SLOT &&
624 LocationOperand::cast(this)->representation() ==
625 MachineRepresentation::kFloat32;
626 }
627
IsDoubleStackSlot()628 bool InstructionOperand::IsDoubleStackSlot() const {
629 return IsAnyLocationOperand() &&
630 LocationOperand::cast(this)->location_kind() ==
631 LocationOperand::STACK_SLOT &&
632 LocationOperand::cast(this)->representation() ==
633 MachineRepresentation::kFloat64;
634 }
635
IsSimd128StackSlot()636 bool InstructionOperand::IsSimd128StackSlot() const {
637 return IsAnyLocationOperand() &&
638 LocationOperand::cast(this)->location_kind() ==
639 LocationOperand::STACK_SLOT &&
640 LocationOperand::cast(this)->representation() ==
641 MachineRepresentation::kSimd128;
642 }
643
GetCanonicalizedValue()644 uint64_t InstructionOperand::GetCanonicalizedValue() const {
645 if (IsAnyLocationOperand()) {
646 MachineRepresentation canonical = MachineRepresentation::kNone;
647 if (IsFPRegister()) {
648 if (kSimpleFPAliasing) {
649 // We treat all FP register operands the same for simple aliasing.
650 canonical = MachineRepresentation::kFloat64;
651 } else {
652 // We need to distinguish FP register operands of different reps when
653 // aliasing is not simple (e.g. ARM).
654 canonical = LocationOperand::cast(this)->representation();
655 }
656 }
657 return InstructionOperand::KindField::update(
658 LocationOperand::RepresentationField::update(this->value_, canonical),
659 LocationOperand::EXPLICIT);
660 }
661 return this->value_;
662 }
663
664 // Required for maps that don't care about machine type.
665 struct CompareOperandModuloType {
operatorCompareOperandModuloType666 bool operator()(const InstructionOperand& a,
667 const InstructionOperand& b) const {
668 return a.CompareCanonicalized(b);
669 }
670 };
671
672 class V8_EXPORT_PRIVATE MoveOperands final
NON_EXPORTED_BASE(ZoneObject)673 : public NON_EXPORTED_BASE(ZoneObject) {
674 public:
675 MoveOperands(const InstructionOperand& source,
676 const InstructionOperand& destination)
677 : source_(source), destination_(destination) {
678 DCHECK(!source.IsInvalid() && !destination.IsInvalid());
679 }
680
681 const InstructionOperand& source() const { return source_; }
682 InstructionOperand& source() { return source_; }
683 void set_source(const InstructionOperand& operand) { source_ = operand; }
684
685 const InstructionOperand& destination() const { return destination_; }
686 InstructionOperand& destination() { return destination_; }
687 void set_destination(const InstructionOperand& operand) {
688 destination_ = operand;
689 }
690
691 // The gap resolver marks moves as "in-progress" by clearing the
692 // destination (but not the source).
693 bool IsPending() const {
694 return destination_.IsInvalid() && !source_.IsInvalid();
695 }
696 void SetPending() { destination_ = InstructionOperand(); }
697
698 // A move is redundant if it's been eliminated or if its source and
699 // destination are the same.
700 bool IsRedundant() const {
701 DCHECK_IMPLIES(!destination_.IsInvalid(), !destination_.IsConstant());
702 return IsEliminated() || source_.EqualsCanonicalized(destination_);
703 }
704
705 // We clear both operands to indicate move that's been eliminated.
706 void Eliminate() { source_ = destination_ = InstructionOperand(); }
707 bool IsEliminated() const {
708 DCHECK_IMPLIES(source_.IsInvalid(), destination_.IsInvalid());
709 return source_.IsInvalid();
710 }
711
712 // APIs to aid debugging. For general-stream APIs, use operator<<
713 void Print(const RegisterConfiguration* config) const;
714 void Print() const;
715
716 private:
717 InstructionOperand source_;
718 InstructionOperand destination_;
719
720 DISALLOW_COPY_AND_ASSIGN(MoveOperands);
721 };
722
723
724 struct PrintableMoveOperands {
725 const RegisterConfiguration* register_configuration_;
726 const MoveOperands* move_operands_;
727 };
728
729
730 std::ostream& operator<<(std::ostream& os, const PrintableMoveOperands& mo);
731
732 class V8_EXPORT_PRIVATE ParallelMove final
NON_EXPORTED_BASE(ZoneVector<MoveOperands * >)733 : public NON_EXPORTED_BASE(ZoneVector<MoveOperands *>),
734 public NON_EXPORTED_BASE(ZoneObject) {
735 public:
736 explicit ParallelMove(Zone* zone) : ZoneVector<MoveOperands*>(zone) {
737 reserve(4);
738 }
739
740 MoveOperands* AddMove(const InstructionOperand& from,
741 const InstructionOperand& to) {
742 Zone* zone = get_allocator().zone();
743 return AddMove(from, to, zone);
744 }
745
746 MoveOperands* AddMove(const InstructionOperand& from,
747 const InstructionOperand& to,
748 Zone* operand_allocation_zone) {
749 MoveOperands* move = new (operand_allocation_zone) MoveOperands(from, to);
750 push_back(move);
751 return move;
752 }
753
754 bool IsRedundant() const;
755
756 // Prepare this ParallelMove to insert move as if it happened in a subsequent
757 // ParallelMove. move->source() may be changed. Any MoveOperands added to
758 // to_eliminate must be Eliminated.
759 void PrepareInsertAfter(MoveOperands* move,
760 ZoneVector<MoveOperands*>* to_eliminate) const;
761
762 private:
763 DISALLOW_COPY_AND_ASSIGN(ParallelMove);
764 };
765
766
767 struct PrintableParallelMove {
768 const RegisterConfiguration* register_configuration_;
769 const ParallelMove* parallel_move_;
770 };
771
772
773 std::ostream& operator<<(std::ostream& os, const PrintableParallelMove& pm);
774
775
776 class ReferenceMap final : public ZoneObject {
777 public:
ReferenceMap(Zone * zone)778 explicit ReferenceMap(Zone* zone)
779 : reference_operands_(8, zone), instruction_position_(-1) {}
780
reference_operands()781 const ZoneVector<InstructionOperand>& reference_operands() const {
782 return reference_operands_;
783 }
instruction_position()784 int instruction_position() const { return instruction_position_; }
785
set_instruction_position(int pos)786 void set_instruction_position(int pos) {
787 DCHECK(instruction_position_ == -1);
788 instruction_position_ = pos;
789 }
790
791 void RecordReference(const AllocatedOperand& op);
792
793 private:
794 friend std::ostream& operator<<(std::ostream& os, const ReferenceMap& pm);
795
796 ZoneVector<InstructionOperand> reference_operands_;
797 int instruction_position_;
798 };
799
800 std::ostream& operator<<(std::ostream& os, const ReferenceMap& pm);
801
802 class InstructionBlock;
803
804 class V8_EXPORT_PRIVATE Instruction final {
805 public:
OutputCount()806 size_t OutputCount() const { return OutputCountField::decode(bit_field_); }
OutputAt(size_t i)807 const InstructionOperand* OutputAt(size_t i) const {
808 DCHECK(i < OutputCount());
809 return &operands_[i];
810 }
OutputAt(size_t i)811 InstructionOperand* OutputAt(size_t i) {
812 DCHECK(i < OutputCount());
813 return &operands_[i];
814 }
815
HasOutput()816 bool HasOutput() const { return OutputCount() == 1; }
Output()817 const InstructionOperand* Output() const { return OutputAt(0); }
Output()818 InstructionOperand* Output() { return OutputAt(0); }
819
InputCount()820 size_t InputCount() const { return InputCountField::decode(bit_field_); }
InputAt(size_t i)821 const InstructionOperand* InputAt(size_t i) const {
822 DCHECK(i < InputCount());
823 return &operands_[OutputCount() + i];
824 }
InputAt(size_t i)825 InstructionOperand* InputAt(size_t i) {
826 DCHECK(i < InputCount());
827 return &operands_[OutputCount() + i];
828 }
829
TempCount()830 size_t TempCount() const { return TempCountField::decode(bit_field_); }
TempAt(size_t i)831 const InstructionOperand* TempAt(size_t i) const {
832 DCHECK(i < TempCount());
833 return &operands_[OutputCount() + InputCount() + i];
834 }
TempAt(size_t i)835 InstructionOperand* TempAt(size_t i) {
836 DCHECK(i < TempCount());
837 return &operands_[OutputCount() + InputCount() + i];
838 }
839
opcode()840 InstructionCode opcode() const { return opcode_; }
arch_opcode()841 ArchOpcode arch_opcode() const { return ArchOpcodeField::decode(opcode()); }
addressing_mode()842 AddressingMode addressing_mode() const {
843 return AddressingModeField::decode(opcode());
844 }
flags_mode()845 FlagsMode flags_mode() const { return FlagsModeField::decode(opcode()); }
flags_condition()846 FlagsCondition flags_condition() const {
847 return FlagsConditionField::decode(opcode());
848 }
849
New(Zone * zone,InstructionCode opcode)850 static Instruction* New(Zone* zone, InstructionCode opcode) {
851 return New(zone, opcode, 0, nullptr, 0, nullptr, 0, nullptr);
852 }
853
New(Zone * zone,InstructionCode opcode,size_t output_count,InstructionOperand * outputs,size_t input_count,InstructionOperand * inputs,size_t temp_count,InstructionOperand * temps)854 static Instruction* New(Zone* zone, InstructionCode opcode,
855 size_t output_count, InstructionOperand* outputs,
856 size_t input_count, InstructionOperand* inputs,
857 size_t temp_count, InstructionOperand* temps) {
858 DCHECK(opcode >= 0);
859 DCHECK(output_count == 0 || outputs != nullptr);
860 DCHECK(input_count == 0 || inputs != nullptr);
861 DCHECK(temp_count == 0 || temps != nullptr);
862 // TODO(jarin/mstarzinger): Handle this gracefully. See crbug.com/582702.
863 CHECK(InputCountField::is_valid(input_count));
864
865 size_t total_extra_ops = output_count + input_count + temp_count;
866 if (total_extra_ops != 0) total_extra_ops--;
867 int size = static_cast<int>(
868 RoundUp(sizeof(Instruction), sizeof(InstructionOperand)) +
869 total_extra_ops * sizeof(InstructionOperand));
870 return new (zone->New(size)) Instruction(
871 opcode, output_count, outputs, input_count, inputs, temp_count, temps);
872 }
873
MarkAsCall()874 Instruction* MarkAsCall() {
875 bit_field_ = IsCallField::update(bit_field_, true);
876 return this;
877 }
IsCall()878 bool IsCall() const { return IsCallField::decode(bit_field_); }
NeedsReferenceMap()879 bool NeedsReferenceMap() const { return IsCall(); }
HasReferenceMap()880 bool HasReferenceMap() const { return reference_map_ != nullptr; }
881
ClobbersRegisters()882 bool ClobbersRegisters() const { return IsCall(); }
ClobbersTemps()883 bool ClobbersTemps() const { return IsCall(); }
ClobbersDoubleRegisters()884 bool ClobbersDoubleRegisters() const { return IsCall(); }
reference_map()885 ReferenceMap* reference_map() const { return reference_map_; }
886
set_reference_map(ReferenceMap * map)887 void set_reference_map(ReferenceMap* map) {
888 DCHECK(NeedsReferenceMap());
889 DCHECK(!reference_map_);
890 reference_map_ = map;
891 }
892
OverwriteWithNop()893 void OverwriteWithNop() {
894 opcode_ = ArchOpcodeField::encode(kArchNop);
895 bit_field_ = 0;
896 reference_map_ = nullptr;
897 }
898
IsNop()899 bool IsNop() const { return arch_opcode() == kArchNop; }
900
IsDeoptimizeCall()901 bool IsDeoptimizeCall() const {
902 return arch_opcode() == ArchOpcode::kArchDeoptimize ||
903 FlagsModeField::decode(opcode()) == kFlags_deoptimize;
904 }
905
IsJump()906 bool IsJump() const { return arch_opcode() == ArchOpcode::kArchJmp; }
IsRet()907 bool IsRet() const { return arch_opcode() == ArchOpcode::kArchRet; }
IsTailCall()908 bool IsTailCall() const {
909 return arch_opcode() == ArchOpcode::kArchTailCallCodeObject ||
910 arch_opcode() == ArchOpcode::kArchTailCallCodeObjectFromJSFunction ||
911 arch_opcode() == ArchOpcode::kArchTailCallJSFunctionFromJSFunction ||
912 arch_opcode() == ArchOpcode::kArchTailCallAddress;
913 }
IsThrow()914 bool IsThrow() const {
915 return arch_opcode() == ArchOpcode::kArchThrowTerminator;
916 }
917
918 enum GapPosition {
919 START,
920 END,
921 FIRST_GAP_POSITION = START,
922 LAST_GAP_POSITION = END
923 };
924
GetOrCreateParallelMove(GapPosition pos,Zone * zone)925 ParallelMove* GetOrCreateParallelMove(GapPosition pos, Zone* zone) {
926 if (parallel_moves_[pos] == nullptr) {
927 parallel_moves_[pos] = new (zone) ParallelMove(zone);
928 }
929 return parallel_moves_[pos];
930 }
931
GetParallelMove(GapPosition pos)932 ParallelMove* GetParallelMove(GapPosition pos) {
933 return parallel_moves_[pos];
934 }
935
GetParallelMove(GapPosition pos)936 const ParallelMove* GetParallelMove(GapPosition pos) const {
937 return parallel_moves_[pos];
938 }
939
940 bool AreMovesRedundant() const;
941
parallel_moves()942 ParallelMove* const* parallel_moves() const { return ¶llel_moves_[0]; }
parallel_moves()943 ParallelMove** parallel_moves() { return ¶llel_moves_[0]; }
944
945 // The block_id may be invalidated in JumpThreading. It is only important for
946 // register allocation, to avoid searching for blocks from instruction
947 // indexes.
block()948 InstructionBlock* block() const { return block_; }
set_block(InstructionBlock * block)949 void set_block(InstructionBlock* block) {
950 DCHECK_NOT_NULL(block);
951 block_ = block;
952 }
953
954 // APIs to aid debugging. For general-stream APIs, use operator<<
955 void Print(const RegisterConfiguration* config) const;
956 void Print() const;
957
958 typedef BitField<size_t, 0, 8> OutputCountField;
959 typedef BitField<size_t, 8, 16> InputCountField;
960 typedef BitField<size_t, 24, 6> TempCountField;
961
962 static const size_t kMaxOutputCount = OutputCountField::kMax;
963 static const size_t kMaxInputCount = InputCountField::kMax;
964 static const size_t kMaxTempCount = TempCountField::kMax;
965
966 private:
967 explicit Instruction(InstructionCode opcode);
968
969 Instruction(InstructionCode opcode, size_t output_count,
970 InstructionOperand* outputs, size_t input_count,
971 InstructionOperand* inputs, size_t temp_count,
972 InstructionOperand* temps);
973
974 typedef BitField<bool, 30, 1> IsCallField;
975
976 InstructionCode opcode_;
977 uint32_t bit_field_;
978 ParallelMove* parallel_moves_[2];
979 ReferenceMap* reference_map_;
980 InstructionBlock* block_;
981 InstructionOperand operands_[1];
982
983 DISALLOW_COPY_AND_ASSIGN(Instruction);
984 };
985
986
987 struct PrintableInstruction {
988 const RegisterConfiguration* register_configuration_;
989 const Instruction* instr_;
990 };
991 std::ostream& operator<<(std::ostream& os, const PrintableInstruction& instr);
992
993
994 class RpoNumber final {
995 public:
996 static const int kInvalidRpoNumber = -1;
ToInt()997 int ToInt() const {
998 DCHECK(IsValid());
999 return index_;
1000 }
ToSize()1001 size_t ToSize() const {
1002 DCHECK(IsValid());
1003 return static_cast<size_t>(index_);
1004 }
IsValid()1005 bool IsValid() const { return index_ >= 0; }
FromInt(int index)1006 static RpoNumber FromInt(int index) { return RpoNumber(index); }
Invalid()1007 static RpoNumber Invalid() { return RpoNumber(kInvalidRpoNumber); }
1008
IsNext(const RpoNumber other)1009 bool IsNext(const RpoNumber other) const {
1010 DCHECK(IsValid());
1011 return other.index_ == this->index_ + 1;
1012 }
1013
1014 // Comparison operators.
1015 bool operator==(RpoNumber other) const { return index_ == other.index_; }
1016 bool operator!=(RpoNumber other) const { return index_ != other.index_; }
1017 bool operator>(RpoNumber other) const { return index_ > other.index_; }
1018 bool operator<(RpoNumber other) const { return index_ < other.index_; }
1019 bool operator<=(RpoNumber other) const { return index_ <= other.index_; }
1020 bool operator>=(RpoNumber other) const { return index_ >= other.index_; }
1021
1022 private:
RpoNumber(int32_t index)1023 explicit RpoNumber(int32_t index) : index_(index) {}
1024 int32_t index_;
1025 };
1026
1027
1028 std::ostream& operator<<(std::ostream&, const RpoNumber&);
1029
1030 class V8_EXPORT_PRIVATE Constant final {
1031 public:
1032 enum Type {
1033 kInt32,
1034 kInt64,
1035 kFloat32,
1036 kFloat64,
1037 kExternalReference,
1038 kHeapObject,
1039 kRpoNumber
1040 };
1041
1042 explicit Constant(int32_t v);
Constant(int64_t v)1043 explicit Constant(int64_t v) : type_(kInt64), value_(v) {}
Constant(float v)1044 explicit Constant(float v) : type_(kFloat32), value_(bit_cast<int32_t>(v)) {}
Constant(double v)1045 explicit Constant(double v) : type_(kFloat64), value_(bit_cast<int64_t>(v)) {}
Constant(ExternalReference ref)1046 explicit Constant(ExternalReference ref)
1047 : type_(kExternalReference), value_(bit_cast<intptr_t>(ref)) {}
Constant(Handle<HeapObject> obj)1048 explicit Constant(Handle<HeapObject> obj)
1049 : type_(kHeapObject), value_(bit_cast<intptr_t>(obj)) {}
Constant(RpoNumber rpo)1050 explicit Constant(RpoNumber rpo) : type_(kRpoNumber), value_(rpo.ToInt()) {}
1051 explicit Constant(RelocatablePtrConstantInfo info);
1052
type()1053 Type type() const { return type_; }
1054
rmode()1055 RelocInfo::Mode rmode() const { return rmode_; }
1056
ToInt32()1057 int32_t ToInt32() const {
1058 DCHECK(type() == kInt32 || type() == kInt64);
1059 const int32_t value = static_cast<int32_t>(value_);
1060 DCHECK_EQ(value_, static_cast<int64_t>(value));
1061 return value;
1062 }
1063
ToInt64()1064 int64_t ToInt64() const {
1065 if (type() == kInt32) return ToInt32();
1066 DCHECK_EQ(kInt64, type());
1067 return value_;
1068 }
1069
ToFloat32()1070 float ToFloat32() const {
1071 // TODO(ahaas): We should remove this function. If value_ has the bit
1072 // representation of a signalling NaN, then returning it as float can cause
1073 // the signalling bit to flip, and value_ is returned as a quiet NaN.
1074 DCHECK_EQ(kFloat32, type());
1075 return bit_cast<float>(static_cast<int32_t>(value_));
1076 }
1077
ToFloat32AsInt()1078 uint32_t ToFloat32AsInt() const {
1079 DCHECK_EQ(kFloat32, type());
1080 return bit_cast<uint32_t>(static_cast<int32_t>(value_));
1081 }
1082
ToFloat64()1083 double ToFloat64() const {
1084 // TODO(ahaas): We should remove this function. If value_ has the bit
1085 // representation of a signalling NaN, then returning it as float can cause
1086 // the signalling bit to flip, and value_ is returned as a quiet NaN.
1087 if (type() == kInt32) return ToInt32();
1088 DCHECK_EQ(kFloat64, type());
1089 return bit_cast<double>(value_);
1090 }
1091
ToFloat64AsInt()1092 uint64_t ToFloat64AsInt() const {
1093 if (type() == kInt32) return ToInt32();
1094 DCHECK_EQ(kFloat64, type());
1095 return bit_cast<uint64_t>(value_);
1096 }
1097
ToExternalReference()1098 ExternalReference ToExternalReference() const {
1099 DCHECK_EQ(kExternalReference, type());
1100 return bit_cast<ExternalReference>(static_cast<intptr_t>(value_));
1101 }
1102
ToRpoNumber()1103 RpoNumber ToRpoNumber() const {
1104 DCHECK_EQ(kRpoNumber, type());
1105 return RpoNumber::FromInt(static_cast<int>(value_));
1106 }
1107
1108 Handle<HeapObject> ToHeapObject() const;
1109
1110 private:
1111 Type type_;
1112 int64_t value_;
1113 #if V8_TARGET_ARCH_32_BIT
1114 RelocInfo::Mode rmode_ = RelocInfo::NONE32;
1115 #else
1116 RelocInfo::Mode rmode_ = RelocInfo::NONE64;
1117 #endif
1118 };
1119
1120
1121 std::ostream& operator<<(std::ostream& os, const Constant& constant);
1122
1123
1124 // Forward declarations.
1125 class FrameStateDescriptor;
1126
1127 enum class StateValueKind : uint8_t {
1128 kArguments,
1129 kPlain,
1130 kOptimizedOut,
1131 kNested,
1132 kDuplicate
1133 };
1134
1135 class StateValueDescriptor {
1136 public:
StateValueDescriptor()1137 StateValueDescriptor()
1138 : kind_(StateValueKind::kPlain),
1139 type_(MachineType::AnyTagged()),
1140 id_(0) {}
1141
Arguments()1142 static StateValueDescriptor Arguments() {
1143 return StateValueDescriptor(StateValueKind::kArguments,
1144 MachineType::AnyTagged(), 0);
1145 }
Plain(MachineType type)1146 static StateValueDescriptor Plain(MachineType type) {
1147 return StateValueDescriptor(StateValueKind::kPlain, type, 0);
1148 }
OptimizedOut()1149 static StateValueDescriptor OptimizedOut() {
1150 return StateValueDescriptor(StateValueKind::kOptimizedOut,
1151 MachineType::AnyTagged(), 0);
1152 }
Recursive(size_t id)1153 static StateValueDescriptor Recursive(size_t id) {
1154 return StateValueDescriptor(StateValueKind::kNested,
1155 MachineType::AnyTagged(), id);
1156 }
Duplicate(size_t id)1157 static StateValueDescriptor Duplicate(size_t id) {
1158 return StateValueDescriptor(StateValueKind::kDuplicate,
1159 MachineType::AnyTagged(), id);
1160 }
1161
IsArguments()1162 bool IsArguments() const { return kind_ == StateValueKind::kArguments; }
IsPlain()1163 bool IsPlain() const { return kind_ == StateValueKind::kPlain; }
IsOptimizedOut()1164 bool IsOptimizedOut() const { return kind_ == StateValueKind::kOptimizedOut; }
IsNested()1165 bool IsNested() const { return kind_ == StateValueKind::kNested; }
IsDuplicate()1166 bool IsDuplicate() const { return kind_ == StateValueKind::kDuplicate; }
type()1167 MachineType type() const { return type_; }
id()1168 size_t id() const { return id_; }
1169
1170 private:
StateValueDescriptor(StateValueKind kind,MachineType type,size_t id)1171 StateValueDescriptor(StateValueKind kind, MachineType type, size_t id)
1172 : kind_(kind), type_(type), id_(id) {}
1173
1174 StateValueKind kind_;
1175 MachineType type_;
1176 size_t id_;
1177 };
1178
1179 class StateValueList {
1180 public:
StateValueList(Zone * zone)1181 explicit StateValueList(Zone* zone) : fields_(zone), nested_(zone) {}
1182
size()1183 size_t size() { return fields_.size(); }
1184
1185 struct Value {
1186 StateValueDescriptor* desc;
1187 StateValueList* nested;
1188
ValueValue1189 Value(StateValueDescriptor* desc, StateValueList* nested)
1190 : desc(desc), nested(nested) {}
1191 };
1192
1193 class iterator {
1194 public:
1195 // Bare minimum of operators needed for range iteration.
1196 bool operator!=(const iterator& other) const {
1197 return field_iterator != other.field_iterator;
1198 }
1199 bool operator==(const iterator& other) const {
1200 return field_iterator == other.field_iterator;
1201 }
1202 iterator& operator++() {
1203 if (field_iterator->IsNested()) {
1204 nested_iterator++;
1205 }
1206 ++field_iterator;
1207 return *this;
1208 }
1209 Value operator*() {
1210 StateValueDescriptor* desc = &(*field_iterator);
1211 StateValueList* nested = desc->IsNested() ? *nested_iterator : nullptr;
1212 return Value(desc, nested);
1213 }
1214
1215 private:
1216 friend class StateValueList;
1217
iterator(ZoneVector<StateValueDescriptor>::iterator it,ZoneVector<StateValueList * >::iterator nested)1218 iterator(ZoneVector<StateValueDescriptor>::iterator it,
1219 ZoneVector<StateValueList*>::iterator nested)
1220 : field_iterator(it), nested_iterator(nested) {}
1221
1222 ZoneVector<StateValueDescriptor>::iterator field_iterator;
1223 ZoneVector<StateValueList*>::iterator nested_iterator;
1224 };
1225
ReserveSize(size_t size)1226 void ReserveSize(size_t size) { fields_.reserve(size); }
1227
PushRecursiveField(Zone * zone,size_t id)1228 StateValueList* PushRecursiveField(Zone* zone, size_t id) {
1229 fields_.push_back(StateValueDescriptor::Recursive(id));
1230 StateValueList* nested =
1231 new (zone->New(sizeof(StateValueList))) StateValueList(zone);
1232 nested_.push_back(nested);
1233 return nested;
1234 }
PushArguments()1235 void PushArguments() { fields_.push_back(StateValueDescriptor::Arguments()); }
PushDuplicate(size_t id)1236 void PushDuplicate(size_t id) {
1237 fields_.push_back(StateValueDescriptor::Duplicate(id));
1238 }
PushPlain(MachineType type)1239 void PushPlain(MachineType type) {
1240 fields_.push_back(StateValueDescriptor::Plain(type));
1241 }
PushOptimizedOut()1242 void PushOptimizedOut() {
1243 fields_.push_back(StateValueDescriptor::OptimizedOut());
1244 }
1245
begin()1246 iterator begin() { return iterator(fields_.begin(), nested_.begin()); }
end()1247 iterator end() { return iterator(fields_.end(), nested_.end()); }
1248
1249 private:
1250 ZoneVector<StateValueDescriptor> fields_;
1251 ZoneVector<StateValueList*> nested_;
1252 };
1253
1254 class FrameStateDescriptor : public ZoneObject {
1255 public:
1256 FrameStateDescriptor(Zone* zone, FrameStateType type, BailoutId bailout_id,
1257 OutputFrameStateCombine state_combine,
1258 size_t parameters_count, size_t locals_count,
1259 size_t stack_count,
1260 MaybeHandle<SharedFunctionInfo> shared_info,
1261 FrameStateDescriptor* outer_state = nullptr);
1262
type()1263 FrameStateType type() const { return type_; }
bailout_id()1264 BailoutId bailout_id() const { return bailout_id_; }
state_combine()1265 OutputFrameStateCombine state_combine() const { return frame_state_combine_; }
parameters_count()1266 size_t parameters_count() const { return parameters_count_; }
locals_count()1267 size_t locals_count() const { return locals_count_; }
stack_count()1268 size_t stack_count() const { return stack_count_; }
shared_info()1269 MaybeHandle<SharedFunctionInfo> shared_info() const { return shared_info_; }
outer_state()1270 FrameStateDescriptor* outer_state() const { return outer_state_; }
HasContext()1271 bool HasContext() const {
1272 return FrameStateFunctionInfo::IsJSFunctionType(type_);
1273 }
1274
1275 size_t GetSize(OutputFrameStateCombine combine =
1276 OutputFrameStateCombine::Ignore()) const;
1277 size_t GetTotalSize() const;
1278 size_t GetFrameCount() const;
1279 size_t GetJSFrameCount() const;
1280
GetStateValueDescriptors()1281 StateValueList* GetStateValueDescriptors() { return &values_; }
1282
1283 static const int kImpossibleValue = 0xdead;
1284
1285 private:
1286 FrameStateType type_;
1287 BailoutId bailout_id_;
1288 OutputFrameStateCombine frame_state_combine_;
1289 size_t parameters_count_;
1290 size_t locals_count_;
1291 size_t stack_count_;
1292 StateValueList values_;
1293 MaybeHandle<SharedFunctionInfo> const shared_info_;
1294 FrameStateDescriptor* outer_state_;
1295 };
1296
1297 // A deoptimization entry is a pair of the reason why we deoptimize and the
1298 // frame state descriptor that we have to go back to.
1299 class DeoptimizationEntry final {
1300 public:
DeoptimizationEntry()1301 DeoptimizationEntry() {}
DeoptimizationEntry(FrameStateDescriptor * descriptor,DeoptimizeKind kind,DeoptimizeReason reason)1302 DeoptimizationEntry(FrameStateDescriptor* descriptor, DeoptimizeKind kind,
1303 DeoptimizeReason reason)
1304 : descriptor_(descriptor), kind_(kind), reason_(reason) {}
1305
descriptor()1306 FrameStateDescriptor* descriptor() const { return descriptor_; }
kind()1307 DeoptimizeKind kind() const { return kind_; }
reason()1308 DeoptimizeReason reason() const { return reason_; }
1309
1310 private:
1311 FrameStateDescriptor* descriptor_ = nullptr;
1312 DeoptimizeKind kind_ = DeoptimizeKind::kEager;
1313 DeoptimizeReason reason_ = DeoptimizeReason::kNoReason;
1314 };
1315
1316 typedef ZoneVector<DeoptimizationEntry> DeoptimizationVector;
1317
1318 class V8_EXPORT_PRIVATE PhiInstruction final
NON_EXPORTED_BASE(ZoneObject)1319 : public NON_EXPORTED_BASE(ZoneObject) {
1320 public:
1321 typedef ZoneVector<InstructionOperand> Inputs;
1322
1323 PhiInstruction(Zone* zone, int virtual_register, size_t input_count);
1324
1325 void SetInput(size_t offset, int virtual_register);
1326 void RenameInput(size_t offset, int virtual_register);
1327
1328 int virtual_register() const { return virtual_register_; }
1329 const IntVector& operands() const { return operands_; }
1330
1331 // TODO(dcarney): this has no real business being here, since it's internal to
1332 // the register allocator, but putting it here was convenient.
1333 const InstructionOperand& output() const { return output_; }
1334 InstructionOperand& output() { return output_; }
1335
1336 private:
1337 const int virtual_register_;
1338 InstructionOperand output_;
1339 IntVector operands_;
1340 };
1341
1342
1343 // Analogue of BasicBlock for Instructions instead of Nodes.
1344 class V8_EXPORT_PRIVATE InstructionBlock final
NON_EXPORTED_BASE(ZoneObject)1345 : public NON_EXPORTED_BASE(ZoneObject) {
1346 public:
1347 InstructionBlock(Zone* zone, RpoNumber rpo_number, RpoNumber loop_header,
1348 RpoNumber loop_end, bool deferred, bool handler);
1349
1350 // Instruction indexes (used by the register allocator).
1351 int first_instruction_index() const {
1352 DCHECK(code_start_ >= 0);
1353 DCHECK(code_end_ > 0);
1354 DCHECK(code_end_ >= code_start_);
1355 return code_start_;
1356 }
1357 int last_instruction_index() const {
1358 DCHECK(code_start_ >= 0);
1359 DCHECK(code_end_ > 0);
1360 DCHECK(code_end_ >= code_start_);
1361 return code_end_ - 1;
1362 }
1363
1364 int32_t code_start() const { return code_start_; }
1365 void set_code_start(int32_t start) { code_start_ = start; }
1366
1367 int32_t code_end() const { return code_end_; }
1368 void set_code_end(int32_t end) { code_end_ = end; }
1369
1370 bool IsDeferred() const { return deferred_; }
1371 bool IsHandler() const { return handler_; }
1372
1373 RpoNumber ao_number() const { return ao_number_; }
1374 RpoNumber rpo_number() const { return rpo_number_; }
1375 RpoNumber loop_header() const { return loop_header_; }
1376 RpoNumber loop_end() const {
1377 DCHECK(IsLoopHeader());
1378 return loop_end_;
1379 }
1380 inline bool IsLoopHeader() const { return loop_end_.IsValid(); }
1381
1382 typedef ZoneVector<RpoNumber> Predecessors;
1383 Predecessors& predecessors() { return predecessors_; }
1384 const Predecessors& predecessors() const { return predecessors_; }
1385 size_t PredecessorCount() const { return predecessors_.size(); }
1386 size_t PredecessorIndexOf(RpoNumber rpo_number) const;
1387
1388 typedef ZoneVector<RpoNumber> Successors;
1389 Successors& successors() { return successors_; }
1390 const Successors& successors() const { return successors_; }
1391 size_t SuccessorCount() const { return successors_.size(); }
1392
1393 typedef ZoneVector<PhiInstruction*> PhiInstructions;
1394 const PhiInstructions& phis() const { return phis_; }
1395 PhiInstruction* PhiAt(size_t i) const { return phis_[i]; }
1396 void AddPhi(PhiInstruction* phi) { phis_.push_back(phi); }
1397
1398 void set_ao_number(RpoNumber ao_number) { ao_number_ = ao_number; }
1399
1400 bool needs_frame() const { return needs_frame_; }
1401 void mark_needs_frame() { needs_frame_ = true; }
1402
1403 bool must_construct_frame() const { return must_construct_frame_; }
1404 void mark_must_construct_frame() { must_construct_frame_ = true; }
1405
1406 bool must_deconstruct_frame() const { return must_deconstruct_frame_; }
1407 void mark_must_deconstruct_frame() { must_deconstruct_frame_ = true; }
1408
1409 private:
1410 Successors successors_;
1411 Predecessors predecessors_;
1412 PhiInstructions phis_;
1413 RpoNumber ao_number_; // Assembly order number.
1414 const RpoNumber rpo_number_;
1415 const RpoNumber loop_header_;
1416 const RpoNumber loop_end_;
1417 int32_t code_start_; // start index of arch-specific code.
1418 int32_t code_end_; // end index of arch-specific code.
1419 const bool deferred_; // Block contains deferred code.
1420 const bool handler_; // Block is a handler entry point.
1421 bool needs_frame_;
1422 bool must_construct_frame_;
1423 bool must_deconstruct_frame_;
1424 };
1425
1426 class InstructionSequence;
1427
1428 struct PrintableInstructionBlock {
1429 const RegisterConfiguration* register_configuration_;
1430 const InstructionBlock* block_;
1431 const InstructionSequence* code_;
1432 };
1433
1434 std::ostream& operator<<(std::ostream& os,
1435 const PrintableInstructionBlock& printable_block);
1436
1437 typedef ZoneDeque<Constant> ConstantDeque;
1438 typedef std::map<int, Constant, std::less<int>,
1439 zone_allocator<std::pair<const int, Constant> > > ConstantMap;
1440
1441 typedef ZoneDeque<Instruction*> InstructionDeque;
1442 typedef ZoneDeque<ReferenceMap*> ReferenceMapDeque;
1443 typedef ZoneVector<InstructionBlock*> InstructionBlocks;
1444
1445
1446 // Forward declarations.
1447 struct PrintableInstructionSequence;
1448
1449
1450 // Represents architecture-specific generated code before, during, and after
1451 // register allocation.
1452 class V8_EXPORT_PRIVATE InstructionSequence final
NON_EXPORTED_BASE(ZoneObject)1453 : public NON_EXPORTED_BASE(ZoneObject) {
1454 public:
1455 static InstructionBlocks* InstructionBlocksFor(Zone* zone,
1456 const Schedule* schedule);
1457 // Puts the deferred blocks last.
1458 static void ComputeAssemblyOrder(InstructionBlocks* blocks);
1459
1460 InstructionSequence(Isolate* isolate, Zone* zone,
1461 InstructionBlocks* instruction_blocks);
1462
1463 int NextVirtualRegister();
1464 int VirtualRegisterCount() const { return next_virtual_register_; }
1465
1466 const InstructionBlocks& instruction_blocks() const {
1467 return *instruction_blocks_;
1468 }
1469
1470 int InstructionBlockCount() const {
1471 return static_cast<int>(instruction_blocks_->size());
1472 }
1473
1474 InstructionBlock* InstructionBlockAt(RpoNumber rpo_number) {
1475 return instruction_blocks_->at(rpo_number.ToSize());
1476 }
1477
1478 int LastLoopInstructionIndex(const InstructionBlock* block) {
1479 return instruction_blocks_->at(block->loop_end().ToSize() - 1)
1480 ->last_instruction_index();
1481 }
1482
1483 const InstructionBlock* InstructionBlockAt(RpoNumber rpo_number) const {
1484 return instruction_blocks_->at(rpo_number.ToSize());
1485 }
1486
1487 InstructionBlock* GetInstructionBlock(int instruction_index) const;
1488
1489 static MachineRepresentation DefaultRepresentation() {
1490 return MachineType::PointerRepresentation();
1491 }
1492 MachineRepresentation GetRepresentation(int virtual_register) const;
1493 void MarkAsRepresentation(MachineRepresentation rep, int virtual_register);
1494 int representation_mask() const { return representation_mask_; }
1495
1496 bool IsReference(int virtual_register) const {
1497 return CanBeTaggedPointer(GetRepresentation(virtual_register));
1498 }
1499 bool IsFP(int virtual_register) const {
1500 return IsFloatingPoint(GetRepresentation(virtual_register));
1501 }
1502
1503 Instruction* GetBlockStart(RpoNumber rpo) const;
1504
1505 typedef InstructionDeque::const_iterator const_iterator;
1506 const_iterator begin() const { return instructions_.begin(); }
1507 const_iterator end() const { return instructions_.end(); }
1508 const InstructionDeque& instructions() const { return instructions_; }
1509 int LastInstructionIndex() const {
1510 return static_cast<int>(instructions().size()) - 1;
1511 }
1512
1513 Instruction* InstructionAt(int index) const {
1514 DCHECK(index >= 0);
1515 DCHECK(index < static_cast<int>(instructions_.size()));
1516 return instructions_[index];
1517 }
1518
1519 Isolate* isolate() const { return isolate_; }
1520 const ReferenceMapDeque* reference_maps() const { return &reference_maps_; }
1521 Zone* zone() const { return zone_; }
1522
1523 // Used by the instruction selector while adding instructions.
1524 int AddInstruction(Instruction* instr);
1525 void StartBlock(RpoNumber rpo);
1526 void EndBlock(RpoNumber rpo);
1527
1528 int AddConstant(int virtual_register, Constant constant) {
1529 // TODO(titzer): allow RPO numbers as constants?
1530 DCHECK(constant.type() != Constant::kRpoNumber);
1531 DCHECK(virtual_register >= 0 && virtual_register < next_virtual_register_);
1532 DCHECK(constants_.find(virtual_register) == constants_.end());
1533 constants_.insert(std::make_pair(virtual_register, constant));
1534 return virtual_register;
1535 }
1536 Constant GetConstant(int virtual_register) const {
1537 ConstantMap::const_iterator it = constants_.find(virtual_register);
1538 DCHECK(it != constants_.end());
1539 DCHECK_EQ(virtual_register, it->first);
1540 return it->second;
1541 }
1542
1543 typedef ZoneVector<Constant> Immediates;
1544 Immediates& immediates() { return immediates_; }
1545
1546 ImmediateOperand AddImmediate(const Constant& constant) {
1547 if (constant.type() == Constant::kInt32 &&
1548 RelocInfo::IsNone(constant.rmode())) {
1549 return ImmediateOperand(ImmediateOperand::INLINE, constant.ToInt32());
1550 }
1551 int index = static_cast<int>(immediates_.size());
1552 immediates_.push_back(constant);
1553 return ImmediateOperand(ImmediateOperand::INDEXED, index);
1554 }
1555
1556 Constant GetImmediate(const ImmediateOperand* op) const {
1557 switch (op->type()) {
1558 case ImmediateOperand::INLINE:
1559 return Constant(op->inline_value());
1560 case ImmediateOperand::INDEXED: {
1561 int index = op->indexed_value();
1562 DCHECK(index >= 0);
1563 DCHECK(index < static_cast<int>(immediates_.size()));
1564 return immediates_[index];
1565 }
1566 }
1567 UNREACHABLE();
1568 return Constant(static_cast<int32_t>(0));
1569 }
1570
1571 int AddDeoptimizationEntry(FrameStateDescriptor* descriptor,
1572 DeoptimizeKind kind, DeoptimizeReason reason);
1573 DeoptimizationEntry const& GetDeoptimizationEntry(int deoptimization_id);
1574 int GetDeoptimizationEntryCount() const {
1575 return static_cast<int>(deoptimization_entries_.size());
1576 }
1577
1578 RpoNumber InputRpo(Instruction* instr, size_t index);
1579
1580 bool GetSourcePosition(const Instruction* instr,
1581 SourcePosition* result) const;
1582 void SetSourcePosition(const Instruction* instr, SourcePosition value);
1583
1584 bool ContainsCall() const {
1585 for (Instruction* instr : instructions_) {
1586 if (instr->IsCall()) return true;
1587 }
1588 return false;
1589 }
1590
1591 // APIs to aid debugging. For general-stream APIs, use operator<<
1592 void Print(const RegisterConfiguration* config) const;
1593 void Print() const;
1594
1595 void PrintBlock(const RegisterConfiguration* config, int block_id) const;
1596 void PrintBlock(int block_id) const;
1597
1598 void ValidateEdgeSplitForm() const;
1599 void ValidateDeferredBlockExitPaths() const;
1600 void ValidateDeferredBlockEntryPaths() const;
1601 void ValidateSSA() const;
1602
1603 static void SetRegisterConfigurationForTesting(
1604 const RegisterConfiguration* regConfig);
1605 static void ClearRegisterConfigurationForTesting();
1606
1607 private:
1608 friend V8_EXPORT_PRIVATE std::ostream& operator<<(
1609 std::ostream& os, const PrintableInstructionSequence& code);
1610
1611 typedef ZoneMap<const Instruction*, SourcePosition> SourcePositionMap;
1612
1613 static const RegisterConfiguration* RegisterConfigurationForTesting();
1614 static const RegisterConfiguration* registerConfigurationForTesting_;
1615
1616 Isolate* isolate_;
1617 Zone* const zone_;
1618 InstructionBlocks* const instruction_blocks_;
1619 SourcePositionMap source_positions_;
1620 ConstantMap constants_;
1621 Immediates immediates_;
1622 InstructionDeque instructions_;
1623 int next_virtual_register_;
1624 ReferenceMapDeque reference_maps_;
1625 ZoneVector<MachineRepresentation> representations_;
1626 int representation_mask_;
1627 DeoptimizationVector deoptimization_entries_;
1628
1629 // Used at construction time
1630 InstructionBlock* current_block_;
1631
1632 DISALLOW_COPY_AND_ASSIGN(InstructionSequence);
1633 };
1634
1635
1636 struct PrintableInstructionSequence {
1637 const RegisterConfiguration* register_configuration_;
1638 const InstructionSequence* sequence_;
1639 };
1640
1641 V8_EXPORT_PRIVATE std::ostream& operator<<(
1642 std::ostream& os, const PrintableInstructionSequence& code);
1643
1644 } // namespace compiler
1645 } // namespace internal
1646 } // namespace v8
1647
1648 #endif // V8_COMPILER_INSTRUCTION_H_
1649