1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/crankshaft/hydrogen-instructions.h"
6
7 #include "src/base/bits.h"
8 #include "src/base/ieee754.h"
9 #include "src/base/safe_math.h"
10 #include "src/codegen.h"
11 #include "src/crankshaft/hydrogen-infer-representation.h"
12 #include "src/double.h"
13 #include "src/elements.h"
14 #include "src/factory.h"
15 #include "src/objects-inl.h"
16
17 #if V8_TARGET_ARCH_IA32
18 #include "src/crankshaft/ia32/lithium-ia32.h" // NOLINT
19 #elif V8_TARGET_ARCH_X64
20 #include "src/crankshaft/x64/lithium-x64.h" // NOLINT
21 #elif V8_TARGET_ARCH_ARM64
22 #include "src/crankshaft/arm64/lithium-arm64.h" // NOLINT
23 #elif V8_TARGET_ARCH_ARM
24 #include "src/crankshaft/arm/lithium-arm.h" // NOLINT
25 #elif V8_TARGET_ARCH_PPC
26 #include "src/crankshaft/ppc/lithium-ppc.h" // NOLINT
27 #elif V8_TARGET_ARCH_MIPS
28 #include "src/crankshaft/mips/lithium-mips.h" // NOLINT
29 #elif V8_TARGET_ARCH_MIPS64
30 #include "src/crankshaft/mips64/lithium-mips64.h" // NOLINT
31 #elif V8_TARGET_ARCH_S390
32 #include "src/crankshaft/s390/lithium-s390.h" // NOLINT
33 #elif V8_TARGET_ARCH_X87
34 #include "src/crankshaft/x87/lithium-x87.h" // NOLINT
35 #else
36 #error Unsupported target architecture.
37 #endif
38
39 namespace v8 {
40 namespace internal {
41
42 #define DEFINE_COMPILE(type) \
43 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \
44 return builder->Do##type(this); \
45 }
HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)46 HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
47 #undef DEFINE_COMPILE
48
49 Representation RepresentationFromMachineType(MachineType type) {
50 if (type == MachineType::Int32()) {
51 return Representation::Integer32();
52 }
53
54 if (type == MachineType::TaggedSigned()) {
55 return Representation::Smi();
56 }
57
58 if (type == MachineType::Pointer()) {
59 return Representation::External();
60 }
61
62 return Representation::Tagged();
63 }
64
isolate() const65 Isolate* HValue::isolate() const {
66 DCHECK(block() != NULL);
67 return block()->isolate();
68 }
69
70
AssumeRepresentation(Representation r)71 void HValue::AssumeRepresentation(Representation r) {
72 if (CheckFlag(kFlexibleRepresentation)) {
73 ChangeRepresentation(r);
74 // The representation of the value is dictated by type feedback and
75 // will not be changed later.
76 ClearFlag(kFlexibleRepresentation);
77 }
78 }
79
80
InferRepresentation(HInferRepresentationPhase * h_infer)81 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) {
82 DCHECK(CheckFlag(kFlexibleRepresentation));
83 Representation new_rep = RepresentationFromInputs();
84 UpdateRepresentation(new_rep, h_infer, "inputs");
85 new_rep = RepresentationFromUses();
86 UpdateRepresentation(new_rep, h_infer, "uses");
87 if (representation().IsSmi() && HasNonSmiUse()) {
88 UpdateRepresentation(
89 Representation::Integer32(), h_infer, "use requirements");
90 }
91 }
92
93
RepresentationFromUses()94 Representation HValue::RepresentationFromUses() {
95 if (HasNoUses()) return Representation::None();
96 Representation result = Representation::None();
97
98 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
99 HValue* use = it.value();
100 Representation rep = use->observed_input_representation(it.index());
101 result = result.generalize(rep);
102
103 if (FLAG_trace_representation) {
104 PrintF("#%d %s is used by #%d %s as %s%s\n",
105 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(),
106 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
107 }
108 }
109 if (IsPhi()) {
110 result = result.generalize(
111 HPhi::cast(this)->representation_from_indirect_uses());
112 }
113
114 // External representations are dealt with separately.
115 return result.IsExternal() ? Representation::None() : result;
116 }
117
118
UpdateRepresentation(Representation new_rep,HInferRepresentationPhase * h_infer,const char * reason)119 void HValue::UpdateRepresentation(Representation new_rep,
120 HInferRepresentationPhase* h_infer,
121 const char* reason) {
122 Representation r = representation();
123 if (new_rep.is_more_general_than(r)) {
124 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return;
125 if (FLAG_trace_representation) {
126 PrintF("Changing #%d %s representation %s -> %s based on %s\n",
127 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason);
128 }
129 ChangeRepresentation(new_rep);
130 AddDependantsToWorklist(h_infer);
131 }
132 }
133
134
AddDependantsToWorklist(HInferRepresentationPhase * h_infer)135 void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) {
136 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
137 h_infer->AddToWorklist(it.value());
138 }
139 for (int i = 0; i < OperandCount(); ++i) {
140 h_infer->AddToWorklist(OperandAt(i));
141 }
142 }
143
144
ConvertAndSetOverflow(Representation r,int64_t result,bool * overflow)145 static int32_t ConvertAndSetOverflow(Representation r,
146 int64_t result,
147 bool* overflow) {
148 if (r.IsSmi()) {
149 if (result > Smi::kMaxValue) {
150 *overflow = true;
151 return Smi::kMaxValue;
152 }
153 if (result < Smi::kMinValue) {
154 *overflow = true;
155 return Smi::kMinValue;
156 }
157 } else {
158 if (result > kMaxInt) {
159 *overflow = true;
160 return kMaxInt;
161 }
162 if (result < kMinInt) {
163 *overflow = true;
164 return kMinInt;
165 }
166 }
167 return static_cast<int32_t>(result);
168 }
169
170
AddWithoutOverflow(Representation r,int32_t a,int32_t b,bool * overflow)171 static int32_t AddWithoutOverflow(Representation r,
172 int32_t a,
173 int32_t b,
174 bool* overflow) {
175 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b);
176 return ConvertAndSetOverflow(r, result, overflow);
177 }
178
179
SubWithoutOverflow(Representation r,int32_t a,int32_t b,bool * overflow)180 static int32_t SubWithoutOverflow(Representation r,
181 int32_t a,
182 int32_t b,
183 bool* overflow) {
184 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b);
185 return ConvertAndSetOverflow(r, result, overflow);
186 }
187
188
MulWithoutOverflow(const Representation & r,int32_t a,int32_t b,bool * overflow)189 static int32_t MulWithoutOverflow(const Representation& r,
190 int32_t a,
191 int32_t b,
192 bool* overflow) {
193 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b);
194 return ConvertAndSetOverflow(r, result, overflow);
195 }
196
197
Mask() const198 int32_t Range::Mask() const {
199 if (lower_ == upper_) return lower_;
200 if (lower_ >= 0) {
201 int32_t res = 1;
202 while (res < upper_) {
203 res = (res << 1) | 1;
204 }
205 return res;
206 }
207 return 0xffffffff;
208 }
209
210
AddConstant(int32_t value)211 void Range::AddConstant(int32_t value) {
212 if (value == 0) return;
213 bool may_overflow = false; // Overflow is ignored here.
214 Representation r = Representation::Integer32();
215 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow);
216 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow);
217 #ifdef DEBUG
218 Verify();
219 #endif
220 }
221
222
Intersect(Range * other)223 void Range::Intersect(Range* other) {
224 upper_ = Min(upper_, other->upper_);
225 lower_ = Max(lower_, other->lower_);
226 bool b = CanBeMinusZero() && other->CanBeMinusZero();
227 set_can_be_minus_zero(b);
228 }
229
230
Union(Range * other)231 void Range::Union(Range* other) {
232 upper_ = Max(upper_, other->upper_);
233 lower_ = Min(lower_, other->lower_);
234 bool b = CanBeMinusZero() || other->CanBeMinusZero();
235 set_can_be_minus_zero(b);
236 }
237
238
CombinedMax(Range * other)239 void Range::CombinedMax(Range* other) {
240 upper_ = Max(upper_, other->upper_);
241 lower_ = Max(lower_, other->lower_);
242 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
243 }
244
245
CombinedMin(Range * other)246 void Range::CombinedMin(Range* other) {
247 upper_ = Min(upper_, other->upper_);
248 lower_ = Min(lower_, other->lower_);
249 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
250 }
251
252
Sar(int32_t value)253 void Range::Sar(int32_t value) {
254 int32_t bits = value & 0x1F;
255 lower_ = lower_ >> bits;
256 upper_ = upper_ >> bits;
257 set_can_be_minus_zero(false);
258 }
259
260
Shl(int32_t value)261 void Range::Shl(int32_t value) {
262 int32_t bits = value & 0x1F;
263 int old_lower = lower_;
264 int old_upper = upper_;
265 lower_ = lower_ << bits;
266 upper_ = upper_ << bits;
267 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) {
268 upper_ = kMaxInt;
269 lower_ = kMinInt;
270 }
271 set_can_be_minus_zero(false);
272 }
273
274
AddAndCheckOverflow(const Representation & r,Range * other)275 bool Range::AddAndCheckOverflow(const Representation& r, Range* other) {
276 bool may_overflow = false;
277 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow);
278 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow);
279 if (may_overflow) {
280 Clear();
281 } else {
282 KeepOrder();
283 }
284 #ifdef DEBUG
285 Verify();
286 #endif
287 return may_overflow;
288 }
289
290
SubAndCheckOverflow(const Representation & r,Range * other)291 bool Range::SubAndCheckOverflow(const Representation& r, Range* other) {
292 bool may_overflow = false;
293 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow);
294 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow);
295 if (may_overflow) {
296 Clear();
297 } else {
298 KeepOrder();
299 }
300 #ifdef DEBUG
301 Verify();
302 #endif
303 return may_overflow;
304 }
305
Clear()306 void Range::Clear() {
307 lower_ = kMinInt;
308 upper_ = kMaxInt;
309 }
310
KeepOrder()311 void Range::KeepOrder() {
312 if (lower_ > upper_) {
313 int32_t tmp = lower_;
314 lower_ = upper_;
315 upper_ = tmp;
316 }
317 }
318
319
320 #ifdef DEBUG
Verify() const321 void Range::Verify() const {
322 DCHECK(lower_ <= upper_);
323 }
324 #endif
325
326
MulAndCheckOverflow(const Representation & r,Range * other)327 bool Range::MulAndCheckOverflow(const Representation& r, Range* other) {
328 bool may_overflow = false;
329 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow);
330 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow);
331 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow);
332 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow);
333 if (may_overflow) {
334 Clear();
335 } else {
336 lower_ = Min(Min(v1, v2), Min(v3, v4));
337 upper_ = Max(Max(v1, v2), Max(v3, v4));
338 }
339 #ifdef DEBUG
340 Verify();
341 #endif
342 return may_overflow;
343 }
344
345
IsDefinedAfter(HBasicBlock * other) const346 bool HValue::IsDefinedAfter(HBasicBlock* other) const {
347 return block()->block_id() > other->block_id();
348 }
349
350
tail()351 HUseListNode* HUseListNode::tail() {
352 // Skip and remove dead items in the use list.
353 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) {
354 tail_ = tail_->tail_;
355 }
356 return tail_;
357 }
358
359
CheckUsesForFlag(Flag f) const360 bool HValue::CheckUsesForFlag(Flag f) const {
361 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
362 if (it.value()->IsSimulate()) continue;
363 if (!it.value()->CheckFlag(f)) return false;
364 }
365 return true;
366 }
367
368
CheckUsesForFlag(Flag f,HValue ** value) const369 bool HValue::CheckUsesForFlag(Flag f, HValue** value) const {
370 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
371 if (it.value()->IsSimulate()) continue;
372 if (!it.value()->CheckFlag(f)) {
373 *value = it.value();
374 return false;
375 }
376 }
377 return true;
378 }
379
380
HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const381 bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const {
382 bool return_value = false;
383 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
384 if (it.value()->IsSimulate()) continue;
385 if (!it.value()->CheckFlag(f)) return false;
386 return_value = true;
387 }
388 return return_value;
389 }
390
391
HUseIterator(HUseListNode * head)392 HUseIterator::HUseIterator(HUseListNode* head) : next_(head) {
393 Advance();
394 }
395
396
Advance()397 void HUseIterator::Advance() {
398 current_ = next_;
399 if (current_ != NULL) {
400 next_ = current_->tail();
401 value_ = current_->value();
402 index_ = current_->index();
403 }
404 }
405
406
UseCount() const407 int HValue::UseCount() const {
408 int count = 0;
409 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count;
410 return count;
411 }
412
413
RemoveUse(HValue * value,int index)414 HUseListNode* HValue::RemoveUse(HValue* value, int index) {
415 HUseListNode* previous = NULL;
416 HUseListNode* current = use_list_;
417 while (current != NULL) {
418 if (current->value() == value && current->index() == index) {
419 if (previous == NULL) {
420 use_list_ = current->tail();
421 } else {
422 previous->set_tail(current->tail());
423 }
424 break;
425 }
426
427 previous = current;
428 current = current->tail();
429 }
430
431 #ifdef DEBUG
432 // Do not reuse use list nodes in debug mode, zap them.
433 if (current != NULL) {
434 HUseListNode* temp =
435 new(block()->zone())
436 HUseListNode(current->value(), current->index(), NULL);
437 current->Zap();
438 current = temp;
439 }
440 #endif
441 return current;
442 }
443
444
Equals(HValue * other)445 bool HValue::Equals(HValue* other) {
446 if (other->opcode() != opcode()) return false;
447 if (!other->representation().Equals(representation())) return false;
448 if (!other->type_.Equals(type_)) return false;
449 if (other->flags() != flags()) return false;
450 if (OperandCount() != other->OperandCount()) return false;
451 for (int i = 0; i < OperandCount(); ++i) {
452 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false;
453 }
454 bool result = DataEquals(other);
455 DCHECK(!result || Hashcode() == other->Hashcode());
456 return result;
457 }
458
459
Hashcode()460 intptr_t HValue::Hashcode() {
461 intptr_t result = opcode();
462 int count = OperandCount();
463 for (int i = 0; i < count; ++i) {
464 result = result * 19 + OperandAt(i)->id() + (result >> 7);
465 }
466 return result;
467 }
468
469
Mnemonic() const470 const char* HValue::Mnemonic() const {
471 switch (opcode()) {
472 #define MAKE_CASE(type) case k##type: return #type;
473 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE)
474 #undef MAKE_CASE
475 case kPhi: return "Phi";
476 default: return "";
477 }
478 }
479
480
CanReplaceWithDummyUses()481 bool HValue::CanReplaceWithDummyUses() {
482 return FLAG_unreachable_code_elimination &&
483 !(block()->IsReachable() ||
484 IsBlockEntry() ||
485 IsControlInstruction() ||
486 IsArgumentsObject() ||
487 IsCapturedObject() ||
488 IsSimulate() ||
489 IsEnterInlined() ||
490 IsLeaveInlined());
491 }
492
493
IsInteger32Constant()494 bool HValue::IsInteger32Constant() {
495 return IsConstant() && HConstant::cast(this)->HasInteger32Value();
496 }
497
498
GetInteger32Constant()499 int32_t HValue::GetInteger32Constant() {
500 return HConstant::cast(this)->Integer32Value();
501 }
502
503
EqualsInteger32Constant(int32_t value)504 bool HValue::EqualsInteger32Constant(int32_t value) {
505 return IsInteger32Constant() && GetInteger32Constant() == value;
506 }
507
508
SetOperandAt(int index,HValue * value)509 void HValue::SetOperandAt(int index, HValue* value) {
510 RegisterUse(index, value);
511 InternalSetOperandAt(index, value);
512 }
513
514
DeleteAndReplaceWith(HValue * other)515 void HValue::DeleteAndReplaceWith(HValue* other) {
516 // We replace all uses first, so Delete can assert that there are none.
517 if (other != NULL) ReplaceAllUsesWith(other);
518 Kill();
519 DeleteFromGraph();
520 }
521
522
ReplaceAllUsesWith(HValue * other)523 void HValue::ReplaceAllUsesWith(HValue* other) {
524 while (use_list_ != NULL) {
525 HUseListNode* list_node = use_list_;
526 HValue* value = list_node->value();
527 DCHECK(!value->block()->IsStartBlock());
528 value->InternalSetOperandAt(list_node->index(), other);
529 use_list_ = list_node->tail();
530 list_node->set_tail(other->use_list_);
531 other->use_list_ = list_node;
532 }
533 }
534
535
Kill()536 void HValue::Kill() {
537 // Instead of going through the entire use list of each operand, we only
538 // check the first item in each use list and rely on the tail() method to
539 // skip dead items, removing them lazily next time we traverse the list.
540 SetFlag(kIsDead);
541 for (int i = 0; i < OperandCount(); ++i) {
542 HValue* operand = OperandAt(i);
543 if (operand == NULL) continue;
544 HUseListNode* first = operand->use_list_;
545 if (first != NULL && first->value()->CheckFlag(kIsDead)) {
546 operand->use_list_ = first->tail();
547 }
548 }
549 }
550
551
SetBlock(HBasicBlock * block)552 void HValue::SetBlock(HBasicBlock* block) {
553 DCHECK(block_ == NULL || block == NULL);
554 block_ = block;
555 if (id_ == kNoNumber && block != NULL) {
556 id_ = block->graph()->GetNextValueID(this);
557 }
558 }
559
560
operator <<(std::ostream & os,const HValue & v)561 std::ostream& operator<<(std::ostream& os, const HValue& v) {
562 return v.PrintTo(os);
563 }
564
565
operator <<(std::ostream & os,const TypeOf & t)566 std::ostream& operator<<(std::ostream& os, const TypeOf& t) {
567 if (t.value->representation().IsTagged() &&
568 !t.value->type().Equals(HType::Tagged()))
569 return os;
570 return os << " type:" << t.value->type();
571 }
572
573
operator <<(std::ostream & os,const ChangesOf & c)574 std::ostream& operator<<(std::ostream& os, const ChangesOf& c) {
575 GVNFlagSet changes_flags = c.value->ChangesFlags();
576 if (changes_flags.IsEmpty()) return os;
577 os << " changes[";
578 if (changes_flags == c.value->AllSideEffectsFlagSet()) {
579 os << "*";
580 } else {
581 bool add_comma = false;
582 #define PRINT_DO(Type) \
583 if (changes_flags.Contains(k##Type)) { \
584 if (add_comma) os << ","; \
585 add_comma = true; \
586 os << #Type; \
587 }
588 GVN_TRACKED_FLAG_LIST(PRINT_DO);
589 GVN_UNTRACKED_FLAG_LIST(PRINT_DO);
590 #undef PRINT_DO
591 }
592 return os << "]";
593 }
594
595
HasMonomorphicJSObjectType()596 bool HValue::HasMonomorphicJSObjectType() {
597 return !GetMonomorphicJSObjectMap().is_null();
598 }
599
600
UpdateInferredType()601 bool HValue::UpdateInferredType() {
602 HType type = CalculateInferredType();
603 bool result = (!type.Equals(type_));
604 type_ = type;
605 return result;
606 }
607
608
RegisterUse(int index,HValue * new_value)609 void HValue::RegisterUse(int index, HValue* new_value) {
610 HValue* old_value = OperandAt(index);
611 if (old_value == new_value) return;
612
613 HUseListNode* removed = NULL;
614 if (old_value != NULL) {
615 removed = old_value->RemoveUse(this, index);
616 }
617
618 if (new_value != NULL) {
619 if (removed == NULL) {
620 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode(
621 this, index, new_value->use_list_);
622 } else {
623 removed->set_tail(new_value->use_list_);
624 new_value->use_list_ = removed;
625 }
626 }
627 }
628
629
AddNewRange(Range * r,Zone * zone)630 void HValue::AddNewRange(Range* r, Zone* zone) {
631 if (!HasRange()) ComputeInitialRange(zone);
632 if (!HasRange()) range_ = new(zone) Range();
633 DCHECK(HasRange());
634 r->StackUpon(range_);
635 range_ = r;
636 }
637
638
RemoveLastAddedRange()639 void HValue::RemoveLastAddedRange() {
640 DCHECK(HasRange());
641 DCHECK(range_->next() != NULL);
642 range_ = range_->next();
643 }
644
645
ComputeInitialRange(Zone * zone)646 void HValue::ComputeInitialRange(Zone* zone) {
647 DCHECK(!HasRange());
648 range_ = InferRange(zone);
649 DCHECK(HasRange());
650 }
651
652
PrintTo(std::ostream & os) const653 std::ostream& HInstruction::PrintTo(std::ostream& os) const { // NOLINT
654 os << Mnemonic() << " ";
655 PrintDataTo(os) << ChangesOf(this) << TypeOf(this);
656 if (CheckFlag(HValue::kHasNoObservableSideEffects)) os << " [noOSE]";
657 if (CheckFlag(HValue::kIsDead)) os << " [dead]";
658 return os;
659 }
660
661
PrintDataTo(std::ostream & os) const662 std::ostream& HInstruction::PrintDataTo(std::ostream& os) const { // NOLINT
663 for (int i = 0; i < OperandCount(); ++i) {
664 if (i > 0) os << " ";
665 os << NameOf(OperandAt(i));
666 }
667 return os;
668 }
669
670
Unlink()671 void HInstruction::Unlink() {
672 DCHECK(IsLinked());
673 DCHECK(!IsControlInstruction()); // Must never move control instructions.
674 DCHECK(!IsBlockEntry()); // Doesn't make sense to delete these.
675 DCHECK(previous_ != NULL);
676 previous_->next_ = next_;
677 if (next_ == NULL) {
678 DCHECK(block()->last() == this);
679 block()->set_last(previous_);
680 } else {
681 next_->previous_ = previous_;
682 }
683 clear_block();
684 }
685
686
InsertBefore(HInstruction * next)687 void HInstruction::InsertBefore(HInstruction* next) {
688 DCHECK(!IsLinked());
689 DCHECK(!next->IsBlockEntry());
690 DCHECK(!IsControlInstruction());
691 DCHECK(!next->block()->IsStartBlock());
692 DCHECK(next->previous_ != NULL);
693 HInstruction* prev = next->previous();
694 prev->next_ = this;
695 next->previous_ = this;
696 next_ = next;
697 previous_ = prev;
698 SetBlock(next->block());
699 if (!has_position() && next->has_position()) {
700 set_position(next->position());
701 }
702 }
703
704
InsertAfter(HInstruction * previous)705 void HInstruction::InsertAfter(HInstruction* previous) {
706 DCHECK(!IsLinked());
707 DCHECK(!previous->IsControlInstruction());
708 DCHECK(!IsControlInstruction() || previous->next_ == NULL);
709 HBasicBlock* block = previous->block();
710 // Never insert anything except constants into the start block after finishing
711 // it.
712 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) {
713 DCHECK(block->end()->SecondSuccessor() == NULL);
714 InsertAfter(block->end()->FirstSuccessor()->first());
715 return;
716 }
717
718 // If we're inserting after an instruction with side-effects that is
719 // followed by a simulate instruction, we need to insert after the
720 // simulate instruction instead.
721 HInstruction* next = previous->next_;
722 if (previous->HasObservableSideEffects() && next != NULL) {
723 DCHECK(next->IsSimulate());
724 previous = next;
725 next = previous->next_;
726 }
727
728 previous_ = previous;
729 next_ = next;
730 SetBlock(block);
731 previous->next_ = this;
732 if (next != NULL) next->previous_ = this;
733 if (block->last() == previous) {
734 block->set_last(this);
735 }
736 if (!has_position() && previous->has_position()) {
737 set_position(previous->position());
738 }
739 }
740
741
Dominates(HInstruction * other)742 bool HInstruction::Dominates(HInstruction* other) {
743 if (block() != other->block()) {
744 return block()->Dominates(other->block());
745 }
746 // Both instructions are in the same basic block. This instruction
747 // should precede the other one in order to dominate it.
748 for (HInstruction* instr = next(); instr != NULL; instr = instr->next()) {
749 if (instr == other) {
750 return true;
751 }
752 }
753 return false;
754 }
755
756
757 #ifdef DEBUG
Verify()758 void HInstruction::Verify() {
759 // Verify that input operands are defined before use.
760 HBasicBlock* cur_block = block();
761 for (int i = 0; i < OperandCount(); ++i) {
762 HValue* other_operand = OperandAt(i);
763 if (other_operand == NULL) continue;
764 HBasicBlock* other_block = other_operand->block();
765 if (cur_block == other_block) {
766 if (!other_operand->IsPhi()) {
767 HInstruction* cur = this->previous();
768 while (cur != NULL) {
769 if (cur == other_operand) break;
770 cur = cur->previous();
771 }
772 // Must reach other operand in the same block!
773 DCHECK(cur == other_operand);
774 }
775 } else {
776 // If the following assert fires, you may have forgotten an
777 // AddInstruction.
778 DCHECK(other_block->Dominates(cur_block));
779 }
780 }
781
782 // Verify that instructions that may have side-effects are followed
783 // by a simulate instruction.
784 if (HasObservableSideEffects() && !IsOsrEntry()) {
785 DCHECK(next()->IsSimulate());
786 }
787
788 // Verify that instructions that can be eliminated by GVN have overridden
789 // HValue::DataEquals. The default implementation is UNREACHABLE. We
790 // don't actually care whether DataEquals returns true or false here.
791 if (CheckFlag(kUseGVN)) DataEquals(this);
792
793 // Verify that all uses are in the graph.
794 for (HUseIterator use = uses(); !use.Done(); use.Advance()) {
795 if (use.value()->IsInstruction()) {
796 DCHECK(HInstruction::cast(use.value())->IsLinked());
797 }
798 }
799 }
800 #endif
801
802
CanDeoptimize()803 bool HInstruction::CanDeoptimize() {
804 switch (opcode()) {
805 case HValue::kAbnormalExit:
806 case HValue::kAccessArgumentsAt:
807 case HValue::kAllocate:
808 case HValue::kArgumentsElements:
809 case HValue::kArgumentsLength:
810 case HValue::kArgumentsObject:
811 case HValue::kBlockEntry:
812 case HValue::kCallNewArray:
813 case HValue::kCapturedObject:
814 case HValue::kClassOfTestAndBranch:
815 case HValue::kCompareGeneric:
816 case HValue::kCompareHoleAndBranch:
817 case HValue::kCompareMap:
818 case HValue::kCompareNumericAndBranch:
819 case HValue::kCompareObjectEqAndBranch:
820 case HValue::kConstant:
821 case HValue::kContext:
822 case HValue::kDebugBreak:
823 case HValue::kDeclareGlobals:
824 case HValue::kDummyUse:
825 case HValue::kEnterInlined:
826 case HValue::kEnvironmentMarker:
827 case HValue::kForceRepresentation:
828 case HValue::kGoto:
829 case HValue::kHasInstanceTypeAndBranch:
830 case HValue::kInnerAllocatedObject:
831 case HValue::kIsSmiAndBranch:
832 case HValue::kIsStringAndBranch:
833 case HValue::kIsUndetectableAndBranch:
834 case HValue::kLeaveInlined:
835 case HValue::kLoadFieldByIndex:
836 case HValue::kLoadNamedField:
837 case HValue::kLoadRoot:
838 case HValue::kMathMinMax:
839 case HValue::kParameter:
840 case HValue::kPhi:
841 case HValue::kPushArguments:
842 case HValue::kReturn:
843 case HValue::kSeqStringGetChar:
844 case HValue::kStoreCodeEntry:
845 case HValue::kStoreKeyed:
846 case HValue::kStoreNamedField:
847 case HValue::kStringCharCodeAt:
848 case HValue::kStringCharFromCode:
849 case HValue::kThisFunction:
850 case HValue::kTypeofIsAndBranch:
851 case HValue::kUnknownOSRValue:
852 case HValue::kUseConst:
853 return false;
854
855 case HValue::kAdd:
856 case HValue::kApplyArguments:
857 case HValue::kBitwise:
858 case HValue::kBoundsCheck:
859 case HValue::kBranch:
860 case HValue::kCallRuntime:
861 case HValue::kCallWithDescriptor:
862 case HValue::kChange:
863 case HValue::kCheckArrayBufferNotNeutered:
864 case HValue::kCheckHeapObject:
865 case HValue::kCheckInstanceType:
866 case HValue::kCheckMapValue:
867 case HValue::kCheckMaps:
868 case HValue::kCheckSmi:
869 case HValue::kCheckValue:
870 case HValue::kClampToUint8:
871 case HValue::kDeoptimize:
872 case HValue::kDiv:
873 case HValue::kForInCacheArray:
874 case HValue::kForInPrepareMap:
875 case HValue::kHasInPrototypeChainAndBranch:
876 case HValue::kInvokeFunction:
877 case HValue::kLoadContextSlot:
878 case HValue::kLoadFunctionPrototype:
879 case HValue::kLoadKeyed:
880 case HValue::kMathFloorOfDiv:
881 case HValue::kMaybeGrowElements:
882 case HValue::kMod:
883 case HValue::kMul:
884 case HValue::kOsrEntry:
885 case HValue::kPower:
886 case HValue::kPrologue:
887 case HValue::kRor:
888 case HValue::kSar:
889 case HValue::kSeqStringSetChar:
890 case HValue::kShl:
891 case HValue::kShr:
892 case HValue::kSimulate:
893 case HValue::kStackCheck:
894 case HValue::kStoreContextSlot:
895 case HValue::kStringAdd:
896 case HValue::kStringCompareAndBranch:
897 case HValue::kSub:
898 case HValue::kTransitionElementsKind:
899 case HValue::kTrapAllocationMemento:
900 case HValue::kTypeof:
901 case HValue::kUnaryMathOperation:
902 case HValue::kWrapReceiver:
903 return true;
904 }
905 UNREACHABLE();
906 return true;
907 }
908
909
operator <<(std::ostream & os,const NameOf & v)910 std::ostream& operator<<(std::ostream& os, const NameOf& v) {
911 return os << v.value->representation().Mnemonic() << v.value->id();
912 }
913
PrintDataTo(std::ostream & os) const914 std::ostream& HDummyUse::PrintDataTo(std::ostream& os) const { // NOLINT
915 return os << NameOf(value());
916 }
917
918
PrintDataTo(std::ostream & os) const919 std::ostream& HEnvironmentMarker::PrintDataTo(
920 std::ostream& os) const { // NOLINT
921 return os << (kind() == BIND ? "bind" : "lookup") << " var[" << index()
922 << "]";
923 }
924
925
PrintDataTo(std::ostream & os) const926 std::ostream& HUnaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
927 return os << NameOf(value()) << " #" << argument_count();
928 }
929
930
PrintDataTo(std::ostream & os) const931 std::ostream& HBinaryCall::PrintDataTo(std::ostream& os) const { // NOLINT
932 return os << NameOf(first()) << " " << NameOf(second()) << " #"
933 << argument_count();
934 }
935
PrintTo(std::ostream & os) const936 std::ostream& HInvokeFunction::PrintTo(std::ostream& os) const { // NOLINT
937 if (tail_call_mode() == TailCallMode::kAllow) os << "Tail";
938 return HBinaryCall::PrintTo(os);
939 }
940
PrintDataTo(std::ostream & os) const941 std::ostream& HInvokeFunction::PrintDataTo(std::ostream& os) const { // NOLINT
942 HBinaryCall::PrintDataTo(os);
943 if (syntactic_tail_call_mode() == TailCallMode::kAllow) {
944 os << ", JSTailCall";
945 }
946 return os;
947 }
948
PrintDataTo(std::ostream & os) const949 std::ostream& HBoundsCheck::PrintDataTo(std::ostream& os) const { // NOLINT
950 os << NameOf(index()) << " " << NameOf(length());
951 if (base() != NULL && (offset() != 0 || scale() != 0)) {
952 os << " base: ((";
953 if (base() != index()) {
954 os << NameOf(index());
955 } else {
956 os << "index";
957 }
958 os << " + " << offset() << ") >> " << scale() << ")";
959 }
960 if (skip_check()) os << " [DISABLED]";
961 return os;
962 }
963
964
InferRepresentation(HInferRepresentationPhase * h_infer)965 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
966 DCHECK(CheckFlag(kFlexibleRepresentation));
967 HValue* actual_index = index()->ActualValue();
968 HValue* actual_length = length()->ActualValue();
969 Representation index_rep = actual_index->representation();
970 Representation length_rep = actual_length->representation();
971 if (index_rep.IsTagged() && actual_index->type().IsSmi()) {
972 index_rep = Representation::Smi();
973 }
974 if (length_rep.IsTagged() && actual_length->type().IsSmi()) {
975 length_rep = Representation::Smi();
976 }
977 Representation r = index_rep.generalize(length_rep);
978 if (r.is_more_general_than(Representation::Integer32())) {
979 r = Representation::Integer32();
980 }
981 UpdateRepresentation(r, h_infer, "boundscheck");
982 }
983
984
InferRange(Zone * zone)985 Range* HBoundsCheck::InferRange(Zone* zone) {
986 Representation r = representation();
987 if (r.IsSmiOrInteger32() && length()->HasRange()) {
988 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1);
989 int lower = 0;
990
991 Range* result = new(zone) Range(lower, upper);
992 if (index()->HasRange()) {
993 result->Intersect(index()->range());
994 }
995
996 // In case of Smi representation, clamp result to Smi::kMaxValue.
997 if (r.IsSmi()) result->ClampToSmi();
998 return result;
999 }
1000 return HValue::InferRange(zone);
1001 }
1002
1003
PrintDataTo(std::ostream & os) const1004 std::ostream& HCallWithDescriptor::PrintDataTo(
1005 std::ostream& os) const { // NOLINT
1006 for (int i = 0; i < OperandCount(); i++) {
1007 os << NameOf(OperandAt(i)) << " ";
1008 }
1009 os << "#" << argument_count();
1010 if (syntactic_tail_call_mode() == TailCallMode::kAllow) {
1011 os << ", JSTailCall";
1012 }
1013 return os;
1014 }
1015
1016
PrintDataTo(std::ostream & os) const1017 std::ostream& HCallNewArray::PrintDataTo(std::ostream& os) const { // NOLINT
1018 os << ElementsKindToString(elements_kind()) << " ";
1019 return HBinaryCall::PrintDataTo(os);
1020 }
1021
1022
PrintDataTo(std::ostream & os) const1023 std::ostream& HCallRuntime::PrintDataTo(std::ostream& os) const { // NOLINT
1024 os << function()->name << " ";
1025 if (save_doubles() == kSaveFPRegs) os << "[save doubles] ";
1026 return os << "#" << argument_count();
1027 }
1028
1029
PrintDataTo(std::ostream & os) const1030 std::ostream& HClassOfTestAndBranch::PrintDataTo(
1031 std::ostream& os) const { // NOLINT
1032 return os << "class_of_test(" << NameOf(value()) << ", \""
1033 << class_name()->ToCString().get() << "\")";
1034 }
1035
1036
PrintDataTo(std::ostream & os) const1037 std::ostream& HWrapReceiver::PrintDataTo(std::ostream& os) const { // NOLINT
1038 return os << NameOf(receiver()) << " " << NameOf(function());
1039 }
1040
1041
PrintDataTo(std::ostream & os) const1042 std::ostream& HAccessArgumentsAt::PrintDataTo(
1043 std::ostream& os) const { // NOLINT
1044 return os << NameOf(arguments()) << "[" << NameOf(index()) << "], length "
1045 << NameOf(length());
1046 }
1047
1048
PrintDataTo(std::ostream & os) const1049 std::ostream& HControlInstruction::PrintDataTo(
1050 std::ostream& os) const { // NOLINT
1051 os << " goto (";
1052 bool first_block = true;
1053 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) {
1054 if (!first_block) os << ", ";
1055 os << *it.Current();
1056 first_block = false;
1057 }
1058 return os << ")";
1059 }
1060
1061
PrintDataTo(std::ostream & os) const1062 std::ostream& HUnaryControlInstruction::PrintDataTo(
1063 std::ostream& os) const { // NOLINT
1064 os << NameOf(value());
1065 return HControlInstruction::PrintDataTo(os);
1066 }
1067
1068
PrintDataTo(std::ostream & os) const1069 std::ostream& HReturn::PrintDataTo(std::ostream& os) const { // NOLINT
1070 return os << NameOf(value()) << " (pop " << NameOf(parameter_count())
1071 << " values)";
1072 }
1073
1074
observed_input_representation(int index)1075 Representation HBranch::observed_input_representation(int index) {
1076 if (expected_input_types_ &
1077 (ToBooleanHint::kNull | ToBooleanHint::kReceiver |
1078 ToBooleanHint::kString | ToBooleanHint::kSymbol)) {
1079 return Representation::Tagged();
1080 }
1081 if (expected_input_types_ & ToBooleanHint::kUndefined) {
1082 if (expected_input_types_ & ToBooleanHint::kHeapNumber) {
1083 return Representation::Double();
1084 }
1085 return Representation::Tagged();
1086 }
1087 if (expected_input_types_ & ToBooleanHint::kHeapNumber) {
1088 return Representation::Double();
1089 }
1090 if (expected_input_types_ & ToBooleanHint::kSmallInteger) {
1091 return Representation::Smi();
1092 }
1093 return Representation::None();
1094 }
1095
1096
KnownSuccessorBlock(HBasicBlock ** block)1097 bool HBranch::KnownSuccessorBlock(HBasicBlock** block) {
1098 HValue* value = this->value();
1099 if (value->EmitAtUses()) {
1100 DCHECK(value->IsConstant());
1101 DCHECK(!value->representation().IsDouble());
1102 *block = HConstant::cast(value)->BooleanValue()
1103 ? FirstSuccessor()
1104 : SecondSuccessor();
1105 return true;
1106 }
1107 *block = NULL;
1108 return false;
1109 }
1110
1111
PrintDataTo(std::ostream & os) const1112 std::ostream& HBranch::PrintDataTo(std::ostream& os) const { // NOLINT
1113 return HUnaryControlInstruction::PrintDataTo(os) << " "
1114 << expected_input_types();
1115 }
1116
1117
PrintDataTo(std::ostream & os) const1118 std::ostream& HCompareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1119 os << NameOf(value()) << " (" << *map().handle() << ")";
1120 HControlInstruction::PrintDataTo(os);
1121 if (known_successor_index() == 0) {
1122 os << " [true]";
1123 } else if (known_successor_index() == 1) {
1124 os << " [false]";
1125 }
1126 return os;
1127 }
1128
1129
OpName() const1130 const char* HUnaryMathOperation::OpName() const {
1131 switch (op()) {
1132 case kMathFloor:
1133 return "floor";
1134 case kMathFround:
1135 return "fround";
1136 case kMathRound:
1137 return "round";
1138 case kMathAbs:
1139 return "abs";
1140 case kMathCos:
1141 return "cos";
1142 case kMathLog:
1143 return "log";
1144 case kMathExp:
1145 return "exp";
1146 case kMathSin:
1147 return "sin";
1148 case kMathSqrt:
1149 return "sqrt";
1150 case kMathPowHalf:
1151 return "pow-half";
1152 case kMathClz32:
1153 return "clz32";
1154 default:
1155 UNREACHABLE();
1156 return NULL;
1157 }
1158 }
1159
1160
InferRange(Zone * zone)1161 Range* HUnaryMathOperation::InferRange(Zone* zone) {
1162 Representation r = representation();
1163 if (op() == kMathClz32) return new(zone) Range(0, 32);
1164 if (r.IsSmiOrInteger32() && value()->HasRange()) {
1165 if (op() == kMathAbs) {
1166 int upper = value()->range()->upper();
1167 int lower = value()->range()->lower();
1168 bool spans_zero = value()->range()->CanBeZero();
1169 // Math.abs(kMinInt) overflows its representation, on which the
1170 // instruction deopts. Hence clamp it to kMaxInt.
1171 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper);
1172 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower);
1173 Range* result =
1174 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper),
1175 Max(abs_lower, abs_upper));
1176 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to
1177 // Smi::kMaxValue.
1178 if (r.IsSmi()) result->ClampToSmi();
1179 return result;
1180 }
1181 }
1182 return HValue::InferRange(zone);
1183 }
1184
1185
PrintDataTo(std::ostream & os) const1186 std::ostream& HUnaryMathOperation::PrintDataTo(
1187 std::ostream& os) const { // NOLINT
1188 return os << OpName() << " " << NameOf(value());
1189 }
1190
1191
PrintDataTo(std::ostream & os) const1192 std::ostream& HUnaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
1193 return os << NameOf(value());
1194 }
1195
1196
PrintDataTo(std::ostream & os) const1197 std::ostream& HHasInstanceTypeAndBranch::PrintDataTo(
1198 std::ostream& os) const { // NOLINT
1199 os << NameOf(value());
1200 switch (from_) {
1201 case FIRST_JS_RECEIVER_TYPE:
1202 if (to_ == LAST_TYPE) os << " spec_object";
1203 break;
1204 case JS_REGEXP_TYPE:
1205 if (to_ == JS_REGEXP_TYPE) os << " reg_exp";
1206 break;
1207 case JS_ARRAY_TYPE:
1208 if (to_ == JS_ARRAY_TYPE) os << " array";
1209 break;
1210 case JS_FUNCTION_TYPE:
1211 if (to_ == JS_FUNCTION_TYPE) os << " function";
1212 break;
1213 default:
1214 break;
1215 }
1216 return os;
1217 }
1218
1219
PrintDataTo(std::ostream & os) const1220 std::ostream& HTypeofIsAndBranch::PrintDataTo(
1221 std::ostream& os) const { // NOLINT
1222 os << NameOf(value()) << " == " << type_literal()->ToCString().get();
1223 return HControlInstruction::PrintDataTo(os);
1224 }
1225
1226
1227 namespace {
1228
TypeOfString(HConstant * constant,Isolate * isolate)1229 String* TypeOfString(HConstant* constant, Isolate* isolate) {
1230 Heap* heap = isolate->heap();
1231 if (constant->HasNumberValue()) return heap->number_string();
1232 if (constant->HasStringValue()) return heap->string_string();
1233 switch (constant->GetInstanceType()) {
1234 case ODDBALL_TYPE: {
1235 Unique<Object> unique = constant->GetUnique();
1236 if (unique.IsKnownGlobal(heap->true_value()) ||
1237 unique.IsKnownGlobal(heap->false_value())) {
1238 return heap->boolean_string();
1239 }
1240 if (unique.IsKnownGlobal(heap->null_value())) {
1241 return heap->object_string();
1242 }
1243 DCHECK(unique.IsKnownGlobal(heap->undefined_value()));
1244 return heap->undefined_string();
1245 }
1246 case SYMBOL_TYPE:
1247 return heap->symbol_string();
1248 default:
1249 if (constant->IsUndetectable()) return heap->undefined_string();
1250 if (constant->IsCallable()) return heap->function_string();
1251 return heap->object_string();
1252 }
1253 }
1254
1255 } // namespace
1256
1257
KnownSuccessorBlock(HBasicBlock ** block)1258 bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
1259 if (FLAG_fold_constants && value()->IsConstant()) {
1260 HConstant* constant = HConstant::cast(value());
1261 String* type_string = TypeOfString(constant, isolate());
1262 bool same_type = type_literal_.IsKnownGlobal(type_string);
1263 *block = same_type ? FirstSuccessor() : SecondSuccessor();
1264 return true;
1265 } else if (value()->representation().IsSpecialization()) {
1266 bool number_type =
1267 type_literal_.IsKnownGlobal(isolate()->heap()->number_string());
1268 *block = number_type ? FirstSuccessor() : SecondSuccessor();
1269 return true;
1270 }
1271 *block = NULL;
1272 return false;
1273 }
1274
1275
PrintDataTo(std::ostream & os) const1276 std::ostream& HCheckMapValue::PrintDataTo(std::ostream& os) const { // NOLINT
1277 return os << NameOf(value()) << " " << NameOf(map());
1278 }
1279
1280
Canonicalize()1281 HValue* HCheckMapValue::Canonicalize() {
1282 if (map()->IsConstant()) {
1283 HConstant* c_map = HConstant::cast(map());
1284 return HCheckMaps::CreateAndInsertAfter(
1285 block()->graph()->zone(), value(), c_map->MapValue(),
1286 c_map->HasStableMapValue(), this);
1287 }
1288 return this;
1289 }
1290
1291
PrintDataTo(std::ostream & os) const1292 std::ostream& HForInPrepareMap::PrintDataTo(std::ostream& os) const { // NOLINT
1293 return os << NameOf(enumerable());
1294 }
1295
1296
PrintDataTo(std::ostream & os) const1297 std::ostream& HForInCacheArray::PrintDataTo(std::ostream& os) const { // NOLINT
1298 return os << NameOf(enumerable()) << " " << NameOf(map()) << "[" << idx_
1299 << "]";
1300 }
1301
1302
PrintDataTo(std::ostream & os) const1303 std::ostream& HLoadFieldByIndex::PrintDataTo(
1304 std::ostream& os) const { // NOLINT
1305 return os << NameOf(object()) << " " << NameOf(index());
1306 }
1307
1308
MatchLeftIsOnes(HValue * l,HValue * r,HValue ** negated)1309 static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
1310 if (!l->EqualsInteger32Constant(~0)) return false;
1311 *negated = r;
1312 return true;
1313 }
1314
1315
MatchNegationViaXor(HValue * instr,HValue ** negated)1316 static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
1317 if (!instr->IsBitwise()) return false;
1318 HBitwise* b = HBitwise::cast(instr);
1319 return (b->op() == Token::BIT_XOR) &&
1320 (MatchLeftIsOnes(b->left(), b->right(), negated) ||
1321 MatchLeftIsOnes(b->right(), b->left(), negated));
1322 }
1323
1324
MatchDoubleNegation(HValue * instr,HValue ** arg)1325 static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
1326 HValue* negated;
1327 return MatchNegationViaXor(instr, &negated) &&
1328 MatchNegationViaXor(negated, arg);
1329 }
1330
1331
Canonicalize()1332 HValue* HBitwise::Canonicalize() {
1333 if (!representation().IsSmiOrInteger32()) return this;
1334 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
1335 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
1336 if (left()->EqualsInteger32Constant(nop_constant) &&
1337 !right()->CheckFlag(kUint32)) {
1338 return right();
1339 }
1340 if (right()->EqualsInteger32Constant(nop_constant) &&
1341 !left()->CheckFlag(kUint32)) {
1342 return left();
1343 }
1344 // Optimize double negation, a common pattern used for ToInt32(x).
1345 HValue* arg;
1346 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) {
1347 return arg;
1348 }
1349 return this;
1350 }
1351
1352
1353 // static
New(Isolate * isolate,Zone * zone,HValue * context,HValue * left,HValue * right,ExternalAddType external_add_type)1354 HInstruction* HAdd::New(Isolate* isolate, Zone* zone, HValue* context,
1355 HValue* left, HValue* right,
1356 ExternalAddType external_add_type) {
1357 // For everything else, you should use the other factory method without
1358 // ExternalAddType.
1359 DCHECK_EQ(external_add_type, AddOfExternalAndTagged);
1360 return new (zone) HAdd(context, left, right, external_add_type);
1361 }
1362
1363
RepresentationFromInputs()1364 Representation HAdd::RepresentationFromInputs() {
1365 Representation left_rep = left()->representation();
1366 if (left_rep.IsExternal()) {
1367 return Representation::External();
1368 }
1369 return HArithmeticBinaryOperation::RepresentationFromInputs();
1370 }
1371
1372
RequiredInputRepresentation(int index)1373 Representation HAdd::RequiredInputRepresentation(int index) {
1374 if (index == 2) {
1375 Representation left_rep = left()->representation();
1376 if (left_rep.IsExternal()) {
1377 if (external_add_type_ == AddOfExternalAndTagged) {
1378 return Representation::Tagged();
1379 } else {
1380 return Representation::Integer32();
1381 }
1382 }
1383 }
1384 return HArithmeticBinaryOperation::RequiredInputRepresentation(index);
1385 }
1386
1387
IsIdentityOperation(HValue * arg1,HValue * arg2,int32_t identity)1388 static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) {
1389 return arg1->representation().IsSpecialization() &&
1390 arg2->EqualsInteger32Constant(identity);
1391 }
1392
1393
Canonicalize()1394 HValue* HAdd::Canonicalize() {
1395 // Adding 0 is an identity operation except in case of -0: -0 + 0 = +0
1396 if (IsIdentityOperation(left(), right(), 0) &&
1397 !left()->representation().IsDouble()) { // Left could be -0.
1398 return left();
1399 }
1400 if (IsIdentityOperation(right(), left(), 0) &&
1401 !left()->representation().IsDouble()) { // Right could be -0.
1402 return right();
1403 }
1404 return this;
1405 }
1406
1407
Canonicalize()1408 HValue* HSub::Canonicalize() {
1409 if (IsIdentityOperation(left(), right(), 0)) return left();
1410 return this;
1411 }
1412
1413
Canonicalize()1414 HValue* HMul::Canonicalize() {
1415 if (IsIdentityOperation(left(), right(), 1)) return left();
1416 if (IsIdentityOperation(right(), left(), 1)) return right();
1417 return this;
1418 }
1419
1420
MulMinusOne()1421 bool HMul::MulMinusOne() {
1422 if (left()->EqualsInteger32Constant(-1) ||
1423 right()->EqualsInteger32Constant(-1)) {
1424 return true;
1425 }
1426
1427 return false;
1428 }
1429
1430
Canonicalize()1431 HValue* HMod::Canonicalize() {
1432 return this;
1433 }
1434
1435
Canonicalize()1436 HValue* HDiv::Canonicalize() {
1437 if (IsIdentityOperation(left(), right(), 1)) return left();
1438 return this;
1439 }
1440
1441
Canonicalize()1442 HValue* HChange::Canonicalize() {
1443 return (from().Equals(to())) ? value() : this;
1444 }
1445
1446
Canonicalize()1447 HValue* HWrapReceiver::Canonicalize() {
1448 if (HasNoUses()) return NULL;
1449 if (receiver()->type().IsJSReceiver()) {
1450 return receiver();
1451 }
1452 return this;
1453 }
1454
1455
PrintDataTo(std::ostream & os) const1456 std::ostream& HTypeof::PrintDataTo(std::ostream& os) const { // NOLINT
1457 return os << NameOf(value());
1458 }
1459
1460
New(Isolate * isolate,Zone * zone,HValue * context,HValue * value,Representation representation)1461 HInstruction* HForceRepresentation::New(Isolate* isolate, Zone* zone,
1462 HValue* context, HValue* value,
1463 Representation representation) {
1464 if (FLAG_fold_constants && value->IsConstant()) {
1465 HConstant* c = HConstant::cast(value);
1466 c = c->CopyToRepresentation(representation, zone);
1467 if (c != NULL) return c;
1468 }
1469 return new(zone) HForceRepresentation(value, representation);
1470 }
1471
1472
PrintDataTo(std::ostream & os) const1473 std::ostream& HForceRepresentation::PrintDataTo(
1474 std::ostream& os) const { // NOLINT
1475 return os << representation().Mnemonic() << " " << NameOf(value());
1476 }
1477
1478
PrintDataTo(std::ostream & os) const1479 std::ostream& HChange::PrintDataTo(std::ostream& os) const { // NOLINT
1480 HUnaryOperation::PrintDataTo(os);
1481 os << " " << from().Mnemonic() << " to " << to().Mnemonic();
1482
1483 if (CanTruncateToSmi()) os << " truncating-smi";
1484 if (CanTruncateToInt32()) os << " truncating-int32";
1485 if (CanTruncateToNumber()) os << " truncating-number";
1486 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
1487 return os;
1488 }
1489
1490
Canonicalize()1491 HValue* HUnaryMathOperation::Canonicalize() {
1492 if (op() == kMathRound || op() == kMathFloor) {
1493 HValue* val = value();
1494 if (val->IsChange()) val = HChange::cast(val)->value();
1495 if (val->representation().IsSmiOrInteger32()) {
1496 if (val->representation().Equals(representation())) return val;
1497 return Prepend(new (block()->zone())
1498 HChange(val, representation(), false, false, true));
1499 }
1500 }
1501 if (op() == kMathFloor && representation().IsSmiOrInteger32() &&
1502 value()->IsDiv() && value()->HasOneUse()) {
1503 HDiv* hdiv = HDiv::cast(value());
1504
1505 HValue* left = hdiv->left();
1506 if (left->representation().IsInteger32() && !left->CheckFlag(kUint32)) {
1507 // A value with an integer representation does not need to be transformed.
1508 } else if (left->IsChange() && HChange::cast(left)->from().IsInteger32() &&
1509 !HChange::cast(left)->value()->CheckFlag(kUint32)) {
1510 // A change from an integer32 can be replaced by the integer32 value.
1511 left = HChange::cast(left)->value();
1512 } else if (hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
1513 left = Prepend(new (block()->zone()) HChange(
1514 left, Representation::Integer32(), false, false, true));
1515 } else {
1516 return this;
1517 }
1518
1519 HValue* right = hdiv->right();
1520 if (right->IsInteger32Constant()) {
1521 right = Prepend(HConstant::cast(right)->CopyToRepresentation(
1522 Representation::Integer32(), right->block()->zone()));
1523 } else if (right->representation().IsInteger32() &&
1524 !right->CheckFlag(kUint32)) {
1525 // A value with an integer representation does not need to be transformed.
1526 } else if (right->IsChange() &&
1527 HChange::cast(right)->from().IsInteger32() &&
1528 !HChange::cast(right)->value()->CheckFlag(kUint32)) {
1529 // A change from an integer32 can be replaced by the integer32 value.
1530 right = HChange::cast(right)->value();
1531 } else if (hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
1532 right = Prepend(new (block()->zone()) HChange(
1533 right, Representation::Integer32(), false, false, true));
1534 } else {
1535 return this;
1536 }
1537
1538 return Prepend(HMathFloorOfDiv::New(
1539 block()->graph()->isolate(), block()->zone(), context(), left, right));
1540 }
1541 return this;
1542 }
1543
1544
Canonicalize()1545 HValue* HCheckInstanceType::Canonicalize() {
1546 if ((check_ == IS_JS_RECEIVER && value()->type().IsJSReceiver()) ||
1547 (check_ == IS_JS_ARRAY && value()->type().IsJSArray()) ||
1548 (check_ == IS_STRING && value()->type().IsString())) {
1549 return value();
1550 }
1551
1552 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
1553 if (HConstant::cast(value())->HasInternalizedStringValue()) {
1554 return value();
1555 }
1556 }
1557 return this;
1558 }
1559
1560
GetCheckInterval(InstanceType * first,InstanceType * last)1561 void HCheckInstanceType::GetCheckInterval(InstanceType* first,
1562 InstanceType* last) {
1563 DCHECK(is_interval_check());
1564 switch (check_) {
1565 case IS_JS_RECEIVER:
1566 *first = FIRST_JS_RECEIVER_TYPE;
1567 *last = LAST_JS_RECEIVER_TYPE;
1568 return;
1569 case IS_JS_ARRAY:
1570 *first = *last = JS_ARRAY_TYPE;
1571 return;
1572 case IS_JS_FUNCTION:
1573 *first = *last = JS_FUNCTION_TYPE;
1574 return;
1575 case IS_JS_DATE:
1576 *first = *last = JS_DATE_TYPE;
1577 return;
1578 default:
1579 UNREACHABLE();
1580 }
1581 }
1582
1583
GetCheckMaskAndTag(uint8_t * mask,uint8_t * tag)1584 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
1585 DCHECK(!is_interval_check());
1586 switch (check_) {
1587 case IS_STRING:
1588 *mask = kIsNotStringMask;
1589 *tag = kStringTag;
1590 return;
1591 case IS_INTERNALIZED_STRING:
1592 *mask = kIsNotStringMask | kIsNotInternalizedMask;
1593 *tag = kInternalizedTag;
1594 return;
1595 default:
1596 UNREACHABLE();
1597 }
1598 }
1599
1600
PrintDataTo(std::ostream & os) const1601 std::ostream& HCheckMaps::PrintDataTo(std::ostream& os) const { // NOLINT
1602 os << NameOf(value()) << " [" << *maps()->at(0).handle();
1603 for (int i = 1; i < maps()->size(); ++i) {
1604 os << "," << *maps()->at(i).handle();
1605 }
1606 os << "]";
1607 if (IsStabilityCheck()) os << "(stability-check)";
1608 return os;
1609 }
1610
1611
Canonicalize()1612 HValue* HCheckMaps::Canonicalize() {
1613 if (!IsStabilityCheck() && maps_are_stable() && value()->IsConstant()) {
1614 HConstant* c_value = HConstant::cast(value());
1615 if (c_value->HasObjectMap()) {
1616 for (int i = 0; i < maps()->size(); ++i) {
1617 if (c_value->ObjectMap() == maps()->at(i)) {
1618 if (maps()->size() > 1) {
1619 set_maps(new(block()->graph()->zone()) UniqueSet<Map>(
1620 maps()->at(i), block()->graph()->zone()));
1621 }
1622 MarkAsStabilityCheck();
1623 break;
1624 }
1625 }
1626 }
1627 }
1628 return this;
1629 }
1630
1631
PrintDataTo(std::ostream & os) const1632 std::ostream& HCheckValue::PrintDataTo(std::ostream& os) const { // NOLINT
1633 return os << NameOf(value()) << " " << Brief(*object().handle());
1634 }
1635
1636
Canonicalize()1637 HValue* HCheckValue::Canonicalize() {
1638 return (value()->IsConstant() &&
1639 HConstant::cast(value())->EqualsUnique(object_)) ? NULL : this;
1640 }
1641
1642
GetCheckName() const1643 const char* HCheckInstanceType::GetCheckName() const {
1644 switch (check_) {
1645 case IS_JS_RECEIVER: return "object";
1646 case IS_JS_ARRAY: return "array";
1647 case IS_JS_FUNCTION:
1648 return "function";
1649 case IS_JS_DATE:
1650 return "date";
1651 case IS_STRING: return "string";
1652 case IS_INTERNALIZED_STRING: return "internalized_string";
1653 }
1654 UNREACHABLE();
1655 return "";
1656 }
1657
1658
PrintDataTo(std::ostream & os) const1659 std::ostream& HCheckInstanceType::PrintDataTo(
1660 std::ostream& os) const { // NOLINT
1661 os << GetCheckName() << " ";
1662 return HUnaryOperation::PrintDataTo(os);
1663 }
1664
1665
PrintDataTo(std::ostream & os) const1666 std::ostream& HUnknownOSRValue::PrintDataTo(std::ostream& os) const { // NOLINT
1667 const char* type = "expression";
1668 if (environment_->is_local_index(index_)) type = "local";
1669 if (environment_->is_special_index(index_)) type = "special";
1670 if (environment_->is_parameter_index(index_)) type = "parameter";
1671 return os << type << " @ " << index_;
1672 }
1673
1674
InferRange(Zone * zone)1675 Range* HValue::InferRange(Zone* zone) {
1676 Range* result;
1677 if (representation().IsSmi() || type().IsSmi()) {
1678 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue);
1679 result->set_can_be_minus_zero(false);
1680 } else {
1681 result = new(zone) Range();
1682 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32));
1683 // TODO(jkummerow): The range cannot be minus zero when the upper type
1684 // bound is Integer32.
1685 }
1686 return result;
1687 }
1688
1689
InferRange(Zone * zone)1690 Range* HChange::InferRange(Zone* zone) {
1691 Range* input_range = value()->range();
1692 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) &&
1693 (to().IsSmi() ||
1694 (to().IsTagged() &&
1695 input_range != NULL &&
1696 input_range->IsInSmiRange()))) {
1697 set_type(HType::Smi());
1698 ClearChangesFlag(kNewSpacePromotion);
1699 }
1700 if (to().IsSmiOrTagged() &&
1701 input_range != NULL &&
1702 input_range->IsInSmiRange() &&
1703 (!SmiValuesAre32Bits() ||
1704 !value()->CheckFlag(HValue::kUint32) ||
1705 input_range->upper() != kMaxInt)) {
1706 // The Range class can't express upper bounds in the (kMaxInt, kMaxUint32]
1707 // interval, so we treat kMaxInt as a sentinel for this entire interval.
1708 ClearFlag(kCanOverflow);
1709 }
1710 Range* result = (input_range != NULL)
1711 ? input_range->Copy(zone)
1712 : HValue::InferRange(zone);
1713 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() ||
1714 !(CheckFlag(kAllUsesTruncatingToInt32) ||
1715 CheckFlag(kAllUsesTruncatingToSmi)));
1716 if (to().IsSmi()) result->ClampToSmi();
1717 return result;
1718 }
1719
1720
InferRange(Zone * zone)1721 Range* HConstant::InferRange(Zone* zone) {
1722 if (HasInteger32Value()) {
1723 Range* result = new(zone) Range(int32_value_, int32_value_);
1724 result->set_can_be_minus_zero(false);
1725 return result;
1726 }
1727 return HValue::InferRange(zone);
1728 }
1729
1730
position() const1731 SourcePosition HPhi::position() const { return block()->first()->position(); }
1732
1733
InferRange(Zone * zone)1734 Range* HPhi::InferRange(Zone* zone) {
1735 Representation r = representation();
1736 if (r.IsSmiOrInteger32()) {
1737 if (block()->IsLoopHeader()) {
1738 Range* range = r.IsSmi()
1739 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue)
1740 : new(zone) Range(kMinInt, kMaxInt);
1741 return range;
1742 } else {
1743 Range* range = OperandAt(0)->range()->Copy(zone);
1744 for (int i = 1; i < OperandCount(); ++i) {
1745 range->Union(OperandAt(i)->range());
1746 }
1747 return range;
1748 }
1749 } else {
1750 return HValue::InferRange(zone);
1751 }
1752 }
1753
1754
InferRange(Zone * zone)1755 Range* HAdd::InferRange(Zone* zone) {
1756 Representation r = representation();
1757 if (r.IsSmiOrInteger32()) {
1758 Range* a = left()->range();
1759 Range* b = right()->range();
1760 Range* res = a->Copy(zone);
1761 if (!res->AddAndCheckOverflow(r, b) ||
1762 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1763 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1764 ClearFlag(kCanOverflow);
1765 }
1766 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1767 !CheckFlag(kAllUsesTruncatingToInt32) &&
1768 a->CanBeMinusZero() && b->CanBeMinusZero());
1769 return res;
1770 } else {
1771 return HValue::InferRange(zone);
1772 }
1773 }
1774
1775
InferRange(Zone * zone)1776 Range* HSub::InferRange(Zone* zone) {
1777 Representation r = representation();
1778 if (r.IsSmiOrInteger32()) {
1779 Range* a = left()->range();
1780 Range* b = right()->range();
1781 Range* res = a->Copy(zone);
1782 if (!res->SubAndCheckOverflow(r, b) ||
1783 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1784 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1785 ClearFlag(kCanOverflow);
1786 }
1787 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1788 !CheckFlag(kAllUsesTruncatingToInt32) &&
1789 a->CanBeMinusZero() && b->CanBeZero());
1790 return res;
1791 } else {
1792 return HValue::InferRange(zone);
1793 }
1794 }
1795
1796
InferRange(Zone * zone)1797 Range* HMul::InferRange(Zone* zone) {
1798 Representation r = representation();
1799 if (r.IsSmiOrInteger32()) {
1800 Range* a = left()->range();
1801 Range* b = right()->range();
1802 Range* res = a->Copy(zone);
1803 if (!res->MulAndCheckOverflow(r, b) ||
1804 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1805 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) &&
1806 MulMinusOne())) {
1807 // Truncated int multiplication is too precise and therefore not the
1808 // same as converting to Double and back.
1809 // Handle truncated integer multiplication by -1 special.
1810 ClearFlag(kCanOverflow);
1811 }
1812 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1813 !CheckFlag(kAllUsesTruncatingToInt32) &&
1814 ((a->CanBeZero() && b->CanBeNegative()) ||
1815 (a->CanBeNegative() && b->CanBeZero())));
1816 return res;
1817 } else {
1818 return HValue::InferRange(zone);
1819 }
1820 }
1821
1822
InferRange(Zone * zone)1823 Range* HDiv::InferRange(Zone* zone) {
1824 if (representation().IsInteger32()) {
1825 Range* a = left()->range();
1826 Range* b = right()->range();
1827 Range* result = new(zone) Range();
1828 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1829 (a->CanBeMinusZero() ||
1830 (a->CanBeZero() && b->CanBeNegative())));
1831 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1832 ClearFlag(kCanOverflow);
1833 }
1834
1835 if (!b->CanBeZero()) {
1836 ClearFlag(kCanBeDivByZero);
1837 }
1838 return result;
1839 } else {
1840 return HValue::InferRange(zone);
1841 }
1842 }
1843
1844
InferRange(Zone * zone)1845 Range* HMathFloorOfDiv::InferRange(Zone* zone) {
1846 if (representation().IsInteger32()) {
1847 Range* a = left()->range();
1848 Range* b = right()->range();
1849 Range* result = new(zone) Range();
1850 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1851 (a->CanBeMinusZero() ||
1852 (a->CanBeZero() && b->CanBeNegative())));
1853 if (!a->Includes(kMinInt)) {
1854 ClearFlag(kLeftCanBeMinInt);
1855 }
1856
1857 if (!a->CanBeNegative()) {
1858 ClearFlag(HValue::kLeftCanBeNegative);
1859 }
1860
1861 if (!a->CanBePositive()) {
1862 ClearFlag(HValue::kLeftCanBePositive);
1863 }
1864
1865 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1866 ClearFlag(kCanOverflow);
1867 }
1868
1869 if (!b->CanBeZero()) {
1870 ClearFlag(kCanBeDivByZero);
1871 }
1872 return result;
1873 } else {
1874 return HValue::InferRange(zone);
1875 }
1876 }
1877
1878
1879 // Returns the absolute value of its argument minus one, avoiding undefined
1880 // behavior at kMinInt.
AbsMinus1(int32_t a)1881 static int32_t AbsMinus1(int32_t a) { return a < 0 ? -(a + 1) : (a - 1); }
1882
1883
InferRange(Zone * zone)1884 Range* HMod::InferRange(Zone* zone) {
1885 if (representation().IsInteger32()) {
1886 Range* a = left()->range();
1887 Range* b = right()->range();
1888
1889 // The magnitude of the modulus is bounded by the right operand.
1890 int32_t positive_bound = Max(AbsMinus1(b->lower()), AbsMinus1(b->upper()));
1891
1892 // The result of the modulo operation has the sign of its left operand.
1893 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative();
1894 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0,
1895 a->CanBePositive() ? positive_bound : 0);
1896
1897 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1898 left_can_be_negative);
1899
1900 if (!a->CanBeNegative()) {
1901 ClearFlag(HValue::kLeftCanBeNegative);
1902 }
1903
1904 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1905 ClearFlag(HValue::kCanOverflow);
1906 }
1907
1908 if (!b->CanBeZero()) {
1909 ClearFlag(HValue::kCanBeDivByZero);
1910 }
1911 return result;
1912 } else {
1913 return HValue::InferRange(zone);
1914 }
1915 }
1916
1917
InferRange(Zone * zone)1918 Range* HMathMinMax::InferRange(Zone* zone) {
1919 if (representation().IsSmiOrInteger32()) {
1920 Range* a = left()->range();
1921 Range* b = right()->range();
1922 Range* res = a->Copy(zone);
1923 if (operation_ == kMathMax) {
1924 res->CombinedMax(b);
1925 } else {
1926 DCHECK(operation_ == kMathMin);
1927 res->CombinedMin(b);
1928 }
1929 return res;
1930 } else {
1931 return HValue::InferRange(zone);
1932 }
1933 }
1934
1935
AddInput(HValue * value)1936 void HPushArguments::AddInput(HValue* value) {
1937 inputs_.Add(NULL, value->block()->zone());
1938 SetOperandAt(OperandCount() - 1, value);
1939 }
1940
1941
PrintTo(std::ostream & os) const1942 std::ostream& HPhi::PrintTo(std::ostream& os) const { // NOLINT
1943 os << "[";
1944 for (int i = 0; i < OperandCount(); ++i) {
1945 os << " " << NameOf(OperandAt(i)) << " ";
1946 }
1947 return os << " uses" << UseCount()
1948 << representation_from_indirect_uses().Mnemonic() << " "
1949 << TypeOf(this) << "]";
1950 }
1951
1952
AddInput(HValue * value)1953 void HPhi::AddInput(HValue* value) {
1954 inputs_.Add(NULL, value->block()->zone());
1955 SetOperandAt(OperandCount() - 1, value);
1956 // Mark phis that may have 'arguments' directly or indirectly as an operand.
1957 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
1958 SetFlag(kIsArguments);
1959 }
1960 }
1961
1962
HasRealUses()1963 bool HPhi::HasRealUses() {
1964 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
1965 if (!it.value()->IsPhi()) return true;
1966 }
1967 return false;
1968 }
1969
1970
GetRedundantReplacement()1971 HValue* HPhi::GetRedundantReplacement() {
1972 HValue* candidate = NULL;
1973 int count = OperandCount();
1974 int position = 0;
1975 while (position < count && candidate == NULL) {
1976 HValue* current = OperandAt(position++);
1977 if (current != this) candidate = current;
1978 }
1979 while (position < count) {
1980 HValue* current = OperandAt(position++);
1981 if (current != this && current != candidate) return NULL;
1982 }
1983 DCHECK(candidate != this);
1984 return candidate;
1985 }
1986
1987
DeleteFromGraph()1988 void HPhi::DeleteFromGraph() {
1989 DCHECK(block() != NULL);
1990 block()->RemovePhi(this);
1991 DCHECK(block() == NULL);
1992 }
1993
1994
InitRealUses(int phi_id)1995 void HPhi::InitRealUses(int phi_id) {
1996 // Initialize real uses.
1997 phi_id_ = phi_id;
1998 // Compute a conservative approximation of truncating uses before inferring
1999 // representations. The proper, exact computation will be done later, when
2000 // inserting representation changes.
2001 SetFlag(kTruncatingToSmi);
2002 SetFlag(kTruncatingToInt32);
2003 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2004 HValue* value = it.value();
2005 if (!value->IsPhi()) {
2006 Representation rep = value->observed_input_representation(it.index());
2007 representation_from_non_phi_uses_ =
2008 representation_from_non_phi_uses().generalize(rep);
2009 if (rep.IsSmi() || rep.IsInteger32() || rep.IsDouble()) {
2010 has_type_feedback_from_uses_ = true;
2011 }
2012
2013 if (FLAG_trace_representation) {
2014 PrintF("#%d Phi is used by real #%d %s as %s\n",
2015 id(), value->id(), value->Mnemonic(), rep.Mnemonic());
2016 }
2017 if (!value->IsSimulate()) {
2018 if (!value->CheckFlag(kTruncatingToSmi)) {
2019 ClearFlag(kTruncatingToSmi);
2020 }
2021 if (!value->CheckFlag(kTruncatingToInt32)) {
2022 ClearFlag(kTruncatingToInt32);
2023 }
2024 }
2025 }
2026 }
2027 }
2028
2029
AddNonPhiUsesFrom(HPhi * other)2030 void HPhi::AddNonPhiUsesFrom(HPhi* other) {
2031 if (FLAG_trace_representation) {
2032 PrintF(
2033 "generalizing use representation '%s' of #%d Phi "
2034 "with uses of #%d Phi '%s'\n",
2035 representation_from_indirect_uses().Mnemonic(), id(), other->id(),
2036 other->representation_from_non_phi_uses().Mnemonic());
2037 }
2038
2039 representation_from_indirect_uses_ =
2040 representation_from_indirect_uses().generalize(
2041 other->representation_from_non_phi_uses());
2042 }
2043
2044
MergeWith(ZoneList<HSimulate * > * list)2045 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) {
2046 while (!list->is_empty()) {
2047 HSimulate* from = list->RemoveLast();
2048 ZoneList<HValue*>* from_values = &from->values_;
2049 for (int i = 0; i < from_values->length(); ++i) {
2050 if (from->HasAssignedIndexAt(i)) {
2051 int index = from->GetAssignedIndexAt(i);
2052 if (HasValueForIndex(index)) continue;
2053 AddAssignedValue(index, from_values->at(i));
2054 } else {
2055 if (pop_count_ > 0) {
2056 pop_count_--;
2057 } else {
2058 AddPushedValue(from_values->at(i));
2059 }
2060 }
2061 }
2062 pop_count_ += from->pop_count_;
2063 from->DeleteAndReplaceWith(NULL);
2064 }
2065 }
2066
2067
PrintDataTo(std::ostream & os) const2068 std::ostream& HSimulate::PrintDataTo(std::ostream& os) const { // NOLINT
2069 os << "id=" << ast_id().ToInt();
2070 if (pop_count_ > 0) os << " pop " << pop_count_;
2071 if (values_.length() > 0) {
2072 if (pop_count_ > 0) os << " /";
2073 for (int i = values_.length() - 1; i >= 0; --i) {
2074 if (HasAssignedIndexAt(i)) {
2075 os << " var[" << GetAssignedIndexAt(i) << "] = ";
2076 } else {
2077 os << " push ";
2078 }
2079 os << NameOf(values_[i]);
2080 if (i > 0) os << ",";
2081 }
2082 }
2083 return os;
2084 }
2085
2086
ReplayEnvironment(HEnvironment * env)2087 void HSimulate::ReplayEnvironment(HEnvironment* env) {
2088 if (is_done_with_replay()) return;
2089 DCHECK(env != NULL);
2090 env->set_ast_id(ast_id());
2091 env->Drop(pop_count());
2092 for (int i = values()->length() - 1; i >= 0; --i) {
2093 HValue* value = values()->at(i);
2094 if (HasAssignedIndexAt(i)) {
2095 env->Bind(GetAssignedIndexAt(i), value);
2096 } else {
2097 env->Push(value);
2098 }
2099 }
2100 set_done_with_replay();
2101 }
2102
2103
ReplayEnvironmentNested(const ZoneList<HValue * > * values,HCapturedObject * other)2104 static void ReplayEnvironmentNested(const ZoneList<HValue*>* values,
2105 HCapturedObject* other) {
2106 for (int i = 0; i < values->length(); ++i) {
2107 HValue* value = values->at(i);
2108 if (value->IsCapturedObject()) {
2109 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) {
2110 values->at(i) = other;
2111 } else {
2112 ReplayEnvironmentNested(HCapturedObject::cast(value)->values(), other);
2113 }
2114 }
2115 }
2116 }
2117
2118
2119 // Replay captured objects by replacing all captured objects with the
2120 // same capture id in the current and all outer environments.
ReplayEnvironment(HEnvironment * env)2121 void HCapturedObject::ReplayEnvironment(HEnvironment* env) {
2122 DCHECK(env != NULL);
2123 while (env != NULL) {
2124 ReplayEnvironmentNested(env->values(), this);
2125 env = env->outer();
2126 }
2127 }
2128
2129
PrintDataTo(std::ostream & os) const2130 std::ostream& HCapturedObject::PrintDataTo(std::ostream& os) const { // NOLINT
2131 os << "#" << capture_id() << " ";
2132 return HDematerializedObject::PrintDataTo(os);
2133 }
2134
2135
RegisterReturnTarget(HBasicBlock * return_target,Zone * zone)2136 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target,
2137 Zone* zone) {
2138 DCHECK(return_target->IsInlineReturnTarget());
2139 return_targets_.Add(return_target, zone);
2140 }
2141
2142
PrintDataTo(std::ostream & os) const2143 std::ostream& HEnterInlined::PrintDataTo(std::ostream& os) const { // NOLINT
2144 os << function()->debug_name()->ToCString().get();
2145 if (syntactic_tail_call_mode() == TailCallMode::kAllow) {
2146 os << ", JSTailCall";
2147 }
2148 return os;
2149 }
2150
2151
IsInteger32(double value)2152 static bool IsInteger32(double value) {
2153 if (value >= std::numeric_limits<int32_t>::min() &&
2154 value <= std::numeric_limits<int32_t>::max()) {
2155 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value));
2156 return bit_cast<int64_t>(roundtrip_value) == bit_cast<int64_t>(value);
2157 }
2158 return false;
2159 }
2160
2161
HConstant(Special special)2162 HConstant::HConstant(Special special)
2163 : HTemplateInstruction<0>(HType::TaggedNumber()),
2164 object_(Handle<Object>::null()),
2165 object_map_(Handle<Map>::null()),
2166 bit_field_(HasDoubleValueField::encode(true) |
2167 InstanceTypeField::encode(kUnknownInstanceType)),
2168 int32_value_(0) {
2169 DCHECK_EQ(kHoleNaN, special);
2170 // Manipulating the signaling NaN used for the hole in C++, e.g. with bit_cast
2171 // will change its value on ia32 (the x87 stack is used to return values
2172 // and stores to the stack silently clear the signalling bit).
2173 // Therefore we have to use memcpy for initializing |double_value_| with
2174 // kHoleNanInt64 here.
2175 std::memcpy(&double_value_, &kHoleNanInt64, sizeof(double_value_));
2176 Initialize(Representation::Double());
2177 }
2178
2179
HConstant(Handle<Object> object,Representation r)2180 HConstant::HConstant(Handle<Object> object, Representation r)
2181 : HTemplateInstruction<0>(HType::FromValue(object)),
2182 object_(Unique<Object>::CreateUninitialized(object)),
2183 object_map_(Handle<Map>::null()),
2184 bit_field_(
2185 HasStableMapValueField::encode(false) |
2186 HasSmiValueField::encode(false) | HasInt32ValueField::encode(false) |
2187 HasDoubleValueField::encode(false) |
2188 HasExternalReferenceValueField::encode(false) |
2189 IsNotInNewSpaceField::encode(true) |
2190 BooleanValueField::encode(object->BooleanValue()) |
2191 IsUndetectableField::encode(false) | IsCallableField::encode(false) |
2192 InstanceTypeField::encode(kUnknownInstanceType)) {
2193 if (object->IsNumber()) {
2194 double n = object->Number();
2195 bool has_int32_value = IsInteger32(n);
2196 bit_field_ = HasInt32ValueField::update(bit_field_, has_int32_value);
2197 int32_value_ = DoubleToInt32(n);
2198 bit_field_ = HasSmiValueField::update(
2199 bit_field_, has_int32_value && Smi::IsValid(int32_value_));
2200 if (std::isnan(n)) {
2201 double_value_ = std::numeric_limits<double>::quiet_NaN();
2202 // Canonicalize object with NaN value.
2203 DCHECK(object->IsHeapObject()); // NaN can't be a Smi.
2204 Isolate* isolate = HeapObject::cast(*object)->GetIsolate();
2205 object = isolate->factory()->nan_value();
2206 object_ = Unique<Object>::CreateUninitialized(object);
2207 } else {
2208 double_value_ = n;
2209 // Canonicalize object with -0.0 value.
2210 if (bit_cast<int64_t>(n) == bit_cast<int64_t>(-0.0)) {
2211 DCHECK(object->IsHeapObject()); // -0.0 can't be a Smi.
2212 Isolate* isolate = HeapObject::cast(*object)->GetIsolate();
2213 object = isolate->factory()->minus_zero_value();
2214 object_ = Unique<Object>::CreateUninitialized(object);
2215 }
2216 }
2217 bit_field_ = HasDoubleValueField::update(bit_field_, true);
2218 }
2219 if (object->IsHeapObject()) {
2220 Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
2221 Isolate* isolate = heap_object->GetIsolate();
2222 Handle<Map> map(heap_object->map(), isolate);
2223 bit_field_ = IsNotInNewSpaceField::update(
2224 bit_field_, !isolate->heap()->InNewSpace(*object));
2225 bit_field_ = InstanceTypeField::update(bit_field_, map->instance_type());
2226 bit_field_ =
2227 IsUndetectableField::update(bit_field_, map->is_undetectable());
2228 bit_field_ = IsCallableField::update(bit_field_, map->is_callable());
2229 if (map->is_stable()) object_map_ = Unique<Map>::CreateImmovable(map);
2230 bit_field_ = HasStableMapValueField::update(
2231 bit_field_,
2232 HasMapValue() && Handle<Map>::cast(heap_object)->is_stable());
2233 }
2234
2235 Initialize(r);
2236 }
2237
2238
HConstant(Unique<Object> object,Unique<Map> object_map,bool has_stable_map_value,Representation r,HType type,bool is_not_in_new_space,bool boolean_value,bool is_undetectable,InstanceType instance_type)2239 HConstant::HConstant(Unique<Object> object, Unique<Map> object_map,
2240 bool has_stable_map_value, Representation r, HType type,
2241 bool is_not_in_new_space, bool boolean_value,
2242 bool is_undetectable, InstanceType instance_type)
2243 : HTemplateInstruction<0>(type),
2244 object_(object),
2245 object_map_(object_map),
2246 bit_field_(HasStableMapValueField::encode(has_stable_map_value) |
2247 HasSmiValueField::encode(false) |
2248 HasInt32ValueField::encode(false) |
2249 HasDoubleValueField::encode(false) |
2250 HasExternalReferenceValueField::encode(false) |
2251 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2252 BooleanValueField::encode(boolean_value) |
2253 IsUndetectableField::encode(is_undetectable) |
2254 InstanceTypeField::encode(instance_type)) {
2255 DCHECK(!object.handle().is_null());
2256 DCHECK(!type.IsTaggedNumber() || type.IsNone());
2257 Initialize(r);
2258 }
2259
2260
HConstant(int32_t integer_value,Representation r,bool is_not_in_new_space,Unique<Object> object)2261 HConstant::HConstant(int32_t integer_value, Representation r,
2262 bool is_not_in_new_space, Unique<Object> object)
2263 : object_(object),
2264 object_map_(Handle<Map>::null()),
2265 bit_field_(HasStableMapValueField::encode(false) |
2266 HasSmiValueField::encode(Smi::IsValid(integer_value)) |
2267 HasInt32ValueField::encode(true) |
2268 HasDoubleValueField::encode(true) |
2269 HasExternalReferenceValueField::encode(false) |
2270 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2271 BooleanValueField::encode(integer_value != 0) |
2272 IsUndetectableField::encode(false) |
2273 InstanceTypeField::encode(kUnknownInstanceType)),
2274 int32_value_(integer_value),
2275 double_value_(FastI2D(integer_value)) {
2276 // It's possible to create a constant with a value in Smi-range but stored
2277 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2278 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2279 bool is_smi = HasSmiValue() && !could_be_heapobject;
2280 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2281 Initialize(r);
2282 }
2283
HConstant(double double_value,Representation r,bool is_not_in_new_space,Unique<Object> object)2284 HConstant::HConstant(double double_value, Representation r,
2285 bool is_not_in_new_space, Unique<Object> object)
2286 : object_(object),
2287 object_map_(Handle<Map>::null()),
2288 bit_field_(HasStableMapValueField::encode(false) |
2289 HasInt32ValueField::encode(IsInteger32(double_value)) |
2290 HasDoubleValueField::encode(true) |
2291 HasExternalReferenceValueField::encode(false) |
2292 IsNotInNewSpaceField::encode(is_not_in_new_space) |
2293 BooleanValueField::encode(double_value != 0 &&
2294 !std::isnan(double_value)) |
2295 IsUndetectableField::encode(false) |
2296 InstanceTypeField::encode(kUnknownInstanceType)),
2297 int32_value_(DoubleToInt32(double_value)) {
2298 bit_field_ = HasSmiValueField::update(
2299 bit_field_, HasInteger32Value() && Smi::IsValid(int32_value_));
2300 // It's possible to create a constant with a value in Smi-range but stored
2301 // in a (pre-existing) HeapNumber. See crbug.com/349878.
2302 bool could_be_heapobject = r.IsTagged() && !object.handle().is_null();
2303 bool is_smi = HasSmiValue() && !could_be_heapobject;
2304 set_type(is_smi ? HType::Smi() : HType::TaggedNumber());
2305 if (std::isnan(double_value)) {
2306 double_value_ = std::numeric_limits<double>::quiet_NaN();
2307 } else {
2308 double_value_ = double_value;
2309 }
2310 Initialize(r);
2311 }
2312
2313
HConstant(ExternalReference reference)2314 HConstant::HConstant(ExternalReference reference)
2315 : HTemplateInstruction<0>(HType::Any()),
2316 object_(Unique<Object>(Handle<Object>::null())),
2317 object_map_(Handle<Map>::null()),
2318 bit_field_(
2319 HasStableMapValueField::encode(false) |
2320 HasSmiValueField::encode(false) | HasInt32ValueField::encode(false) |
2321 HasDoubleValueField::encode(false) |
2322 HasExternalReferenceValueField::encode(true) |
2323 IsNotInNewSpaceField::encode(true) | BooleanValueField::encode(true) |
2324 IsUndetectableField::encode(false) |
2325 InstanceTypeField::encode(kUnknownInstanceType)),
2326 external_reference_value_(reference) {
2327 Initialize(Representation::External());
2328 }
2329
2330
Initialize(Representation r)2331 void HConstant::Initialize(Representation r) {
2332 if (r.IsNone()) {
2333 if (HasSmiValue() && SmiValuesAre31Bits()) {
2334 r = Representation::Smi();
2335 } else if (HasInteger32Value()) {
2336 r = Representation::Integer32();
2337 } else if (HasDoubleValue()) {
2338 r = Representation::Double();
2339 } else if (HasExternalReferenceValue()) {
2340 r = Representation::External();
2341 } else {
2342 Handle<Object> object = object_.handle();
2343 if (object->IsJSObject()) {
2344 // Try to eagerly migrate JSObjects that have deprecated maps.
2345 Handle<JSObject> js_object = Handle<JSObject>::cast(object);
2346 if (js_object->map()->is_deprecated()) {
2347 JSObject::TryMigrateInstance(js_object);
2348 }
2349 }
2350 r = Representation::Tagged();
2351 }
2352 }
2353 if (r.IsSmi()) {
2354 // If we have an existing handle, zap it, because it might be a heap
2355 // number which we must not re-use when copying this HConstant to
2356 // Tagged representation later, because having Smi representation now
2357 // could cause heap object checks not to get emitted.
2358 object_ = Unique<Object>(Handle<Object>::null());
2359 }
2360 if (r.IsSmiOrInteger32() && object_.handle().is_null()) {
2361 // If it's not a heap object, it can't be in new space.
2362 bit_field_ = IsNotInNewSpaceField::update(bit_field_, true);
2363 }
2364 set_representation(r);
2365 SetFlag(kUseGVN);
2366 }
2367
2368
ImmortalImmovable() const2369 bool HConstant::ImmortalImmovable() const {
2370 if (HasInteger32Value()) {
2371 return false;
2372 }
2373 if (HasDoubleValue()) {
2374 if (IsSpecialDouble()) {
2375 return true;
2376 }
2377 return false;
2378 }
2379 if (HasExternalReferenceValue()) {
2380 return false;
2381 }
2382
2383 DCHECK(!object_.handle().is_null());
2384 Heap* heap = isolate()->heap();
2385 DCHECK(!object_.IsKnownGlobal(heap->minus_zero_value()));
2386 DCHECK(!object_.IsKnownGlobal(heap->nan_value()));
2387 return
2388 #define IMMORTAL_IMMOVABLE_ROOT(name) \
2389 object_.IsKnownGlobal(heap->root(Heap::k##name##RootIndex)) ||
2390 IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT)
2391 #undef IMMORTAL_IMMOVABLE_ROOT
2392 #define INTERNALIZED_STRING(name, value) \
2393 object_.IsKnownGlobal(heap->name()) ||
2394 INTERNALIZED_STRING_LIST(INTERNALIZED_STRING)
2395 #undef INTERNALIZED_STRING
2396 #define STRING_TYPE(NAME, size, name, Name) \
2397 object_.IsKnownGlobal(heap->name##_map()) ||
2398 STRING_TYPE_LIST(STRING_TYPE)
2399 #undef STRING_TYPE
2400 false;
2401 }
2402
2403
EmitAtUses()2404 bool HConstant::EmitAtUses() {
2405 DCHECK(IsLinked());
2406 if (block()->graph()->has_osr() &&
2407 block()->graph()->IsStandardConstant(this)) {
2408 return true;
2409 }
2410 if (HasNoUses()) return true;
2411 if (IsCell()) return false;
2412 if (representation().IsDouble()) return false;
2413 if (representation().IsExternal()) return false;
2414 return true;
2415 }
2416
2417
CopyToRepresentation(Representation r,Zone * zone) const2418 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
2419 if (r.IsSmi() && !HasSmiValue()) return NULL;
2420 if (r.IsInteger32() && !HasInteger32Value()) return NULL;
2421 if (r.IsDouble() && !HasDoubleValue()) return NULL;
2422 if (r.IsExternal() && !HasExternalReferenceValue()) return NULL;
2423 if (HasInteger32Value()) {
2424 return new (zone) HConstant(int32_value_, r, NotInNewSpace(), object_);
2425 }
2426 if (HasDoubleValue()) {
2427 return new (zone) HConstant(double_value_, r, NotInNewSpace(), object_);
2428 }
2429 if (HasExternalReferenceValue()) {
2430 return new(zone) HConstant(external_reference_value_);
2431 }
2432 DCHECK(!object_.handle().is_null());
2433 return new (zone) HConstant(object_, object_map_, HasStableMapValue(), r,
2434 type_, NotInNewSpace(), BooleanValue(),
2435 IsUndetectable(), GetInstanceType());
2436 }
2437
2438
CopyToTruncatedInt32(Zone * zone)2439 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) {
2440 HConstant* res = NULL;
2441 if (HasInteger32Value()) {
2442 res = new (zone) HConstant(int32_value_, Representation::Integer32(),
2443 NotInNewSpace(), object_);
2444 } else if (HasDoubleValue()) {
2445 res = new (zone)
2446 HConstant(DoubleToInt32(double_value_), Representation::Integer32(),
2447 NotInNewSpace(), object_);
2448 }
2449 return res != NULL ? Just(res) : Nothing<HConstant*>();
2450 }
2451
2452
CopyToTruncatedNumber(Isolate * isolate,Zone * zone)2453 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Isolate* isolate,
2454 Zone* zone) {
2455 HConstant* res = NULL;
2456 Handle<Object> handle = this->handle(isolate);
2457 if (handle->IsBoolean()) {
2458 res = handle->BooleanValue() ?
2459 new(zone) HConstant(1) : new(zone) HConstant(0);
2460 } else if (handle->IsUndefined(isolate)) {
2461 res = new (zone) HConstant(std::numeric_limits<double>::quiet_NaN());
2462 } else if (handle->IsNull(isolate)) {
2463 res = new(zone) HConstant(0);
2464 } else if (handle->IsString()) {
2465 res = new(zone) HConstant(String::ToNumber(Handle<String>::cast(handle)));
2466 }
2467 return res != NULL ? Just(res) : Nothing<HConstant*>();
2468 }
2469
2470
PrintDataTo(std::ostream & os) const2471 std::ostream& HConstant::PrintDataTo(std::ostream& os) const { // NOLINT
2472 if (HasInteger32Value()) {
2473 os << int32_value_ << " ";
2474 } else if (HasDoubleValue()) {
2475 os << double_value_ << " ";
2476 } else if (HasExternalReferenceValue()) {
2477 os << reinterpret_cast<void*>(external_reference_value_.address()) << " ";
2478 } else {
2479 // The handle() method is silently and lazily mutating the object.
2480 Handle<Object> h = const_cast<HConstant*>(this)->handle(isolate());
2481 os << Brief(*h) << " ";
2482 if (HasStableMapValue()) os << "[stable-map] ";
2483 if (HasObjectMap()) os << "[map " << *ObjectMap().handle() << "] ";
2484 }
2485 if (!NotInNewSpace()) os << "[new space] ";
2486 return os;
2487 }
2488
2489
PrintDataTo(std::ostream & os) const2490 std::ostream& HBinaryOperation::PrintDataTo(std::ostream& os) const { // NOLINT
2491 os << NameOf(left()) << " " << NameOf(right());
2492 if (CheckFlag(kCanOverflow)) os << " !";
2493 if (CheckFlag(kBailoutOnMinusZero)) os << " -0?";
2494 return os;
2495 }
2496
2497
InferRepresentation(HInferRepresentationPhase * h_infer)2498 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) {
2499 DCHECK(CheckFlag(kFlexibleRepresentation));
2500 Representation new_rep = RepresentationFromInputs();
2501 UpdateRepresentation(new_rep, h_infer, "inputs");
2502
2503 if (representation().IsSmi() && HasNonSmiUse()) {
2504 UpdateRepresentation(
2505 Representation::Integer32(), h_infer, "use requirements");
2506 }
2507
2508 if (observed_output_representation_.IsNone()) {
2509 new_rep = RepresentationFromUses();
2510 UpdateRepresentation(new_rep, h_infer, "uses");
2511 } else {
2512 new_rep = RepresentationFromOutput();
2513 UpdateRepresentation(new_rep, h_infer, "output");
2514 }
2515 }
2516
2517
RepresentationFromInputs()2518 Representation HBinaryOperation::RepresentationFromInputs() {
2519 // Determine the worst case of observed input representations and
2520 // the currently assumed output representation.
2521 Representation rep = representation();
2522 for (int i = 1; i <= 2; ++i) {
2523 rep = rep.generalize(observed_input_representation(i));
2524 }
2525 // If any of the actual input representation is more general than what we
2526 // have so far but not Tagged, use that representation instead.
2527 Representation left_rep = left()->representation();
2528 Representation right_rep = right()->representation();
2529 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
2530 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
2531
2532 return rep;
2533 }
2534
2535
IgnoreObservedOutputRepresentation(Representation current_rep)2536 bool HBinaryOperation::IgnoreObservedOutputRepresentation(
2537 Representation current_rep) {
2538 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) ||
2539 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) &&
2540 // Mul in Integer32 mode would be too precise.
2541 (!this->IsMul() || HMul::cast(this)->MulMinusOne());
2542 }
2543
2544
RepresentationFromOutput()2545 Representation HBinaryOperation::RepresentationFromOutput() {
2546 Representation rep = representation();
2547 // Consider observed output representation, but ignore it if it's Double,
2548 // this instruction is not a division, and all its uses are truncating
2549 // to Integer32.
2550 if (observed_output_representation_.is_more_general_than(rep) &&
2551 !IgnoreObservedOutputRepresentation(rep)) {
2552 return observed_output_representation_;
2553 }
2554 return Representation::None();
2555 }
2556
2557
AssumeRepresentation(Representation r)2558 void HBinaryOperation::AssumeRepresentation(Representation r) {
2559 set_observed_input_representation(1, r);
2560 set_observed_input_representation(2, r);
2561 HValue::AssumeRepresentation(r);
2562 }
2563
2564
InferRepresentation(HInferRepresentationPhase * h_infer)2565 void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) {
2566 DCHECK(CheckFlag(kFlexibleRepresentation));
2567 Representation new_rep = RepresentationFromInputs();
2568 UpdateRepresentation(new_rep, h_infer, "inputs");
2569 // Do not care about uses.
2570 }
2571
2572
InferRange(Zone * zone)2573 Range* HBitwise::InferRange(Zone* zone) {
2574 if (op() == Token::BIT_XOR) {
2575 if (left()->HasRange() && right()->HasRange()) {
2576 // The maximum value has the high bit, and all bits below, set:
2577 // (1 << high) - 1.
2578 // If the range can be negative, the minimum int is a negative number with
2579 // the high bit, and all bits below, unset:
2580 // -(1 << high).
2581 // If it cannot be negative, conservatively choose 0 as minimum int.
2582 int64_t left_upper = left()->range()->upper();
2583 int64_t left_lower = left()->range()->lower();
2584 int64_t right_upper = right()->range()->upper();
2585 int64_t right_lower = right()->range()->lower();
2586
2587 if (left_upper < 0) left_upper = ~left_upper;
2588 if (left_lower < 0) left_lower = ~left_lower;
2589 if (right_upper < 0) right_upper = ~right_upper;
2590 if (right_lower < 0) right_lower = ~right_lower;
2591
2592 int high = MostSignificantBit(
2593 static_cast<uint32_t>(
2594 left_upper | left_lower | right_upper | right_lower));
2595
2596 int64_t limit = 1;
2597 limit <<= high;
2598 int32_t min = (left()->range()->CanBeNegative() ||
2599 right()->range()->CanBeNegative())
2600 ? static_cast<int32_t>(-limit) : 0;
2601 return new(zone) Range(min, static_cast<int32_t>(limit - 1));
2602 }
2603 Range* result = HValue::InferRange(zone);
2604 result->set_can_be_minus_zero(false);
2605 return result;
2606 }
2607 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff);
2608 int32_t left_mask = (left()->range() != NULL)
2609 ? left()->range()->Mask()
2610 : kDefaultMask;
2611 int32_t right_mask = (right()->range() != NULL)
2612 ? right()->range()->Mask()
2613 : kDefaultMask;
2614 int32_t result_mask = (op() == Token::BIT_AND)
2615 ? left_mask & right_mask
2616 : left_mask | right_mask;
2617 if (result_mask >= 0) return new(zone) Range(0, result_mask);
2618
2619 Range* result = HValue::InferRange(zone);
2620 result->set_can_be_minus_zero(false);
2621 return result;
2622 }
2623
2624
InferRange(Zone * zone)2625 Range* HSar::InferRange(Zone* zone) {
2626 if (right()->IsConstant()) {
2627 HConstant* c = HConstant::cast(right());
2628 if (c->HasInteger32Value()) {
2629 Range* result = (left()->range() != NULL)
2630 ? left()->range()->Copy(zone)
2631 : new(zone) Range();
2632 result->Sar(c->Integer32Value());
2633 return result;
2634 }
2635 }
2636 return HValue::InferRange(zone);
2637 }
2638
2639
InferRange(Zone * zone)2640 Range* HShr::InferRange(Zone* zone) {
2641 if (right()->IsConstant()) {
2642 HConstant* c = HConstant::cast(right());
2643 if (c->HasInteger32Value()) {
2644 int shift_count = c->Integer32Value() & 0x1f;
2645 if (left()->range()->CanBeNegative()) {
2646 // Only compute bounds if the result always fits into an int32.
2647 return (shift_count >= 1)
2648 ? new(zone) Range(0,
2649 static_cast<uint32_t>(0xffffffff) >> shift_count)
2650 : new(zone) Range();
2651 } else {
2652 // For positive inputs we can use the >> operator.
2653 Range* result = (left()->range() != NULL)
2654 ? left()->range()->Copy(zone)
2655 : new(zone) Range();
2656 result->Sar(c->Integer32Value());
2657 return result;
2658 }
2659 }
2660 }
2661 return HValue::InferRange(zone);
2662 }
2663
2664
InferRange(Zone * zone)2665 Range* HShl::InferRange(Zone* zone) {
2666 if (right()->IsConstant()) {
2667 HConstant* c = HConstant::cast(right());
2668 if (c->HasInteger32Value()) {
2669 Range* result = (left()->range() != NULL)
2670 ? left()->range()->Copy(zone)
2671 : new(zone) Range();
2672 result->Shl(c->Integer32Value());
2673 return result;
2674 }
2675 }
2676 return HValue::InferRange(zone);
2677 }
2678
2679
InferRange(Zone * zone)2680 Range* HLoadNamedField::InferRange(Zone* zone) {
2681 if (access().representation().IsInteger8()) {
2682 return new(zone) Range(kMinInt8, kMaxInt8);
2683 }
2684 if (access().representation().IsUInteger8()) {
2685 return new(zone) Range(kMinUInt8, kMaxUInt8);
2686 }
2687 if (access().representation().IsInteger16()) {
2688 return new(zone) Range(kMinInt16, kMaxInt16);
2689 }
2690 if (access().representation().IsUInteger16()) {
2691 return new(zone) Range(kMinUInt16, kMaxUInt16);
2692 }
2693 if (access().IsStringLength()) {
2694 return new(zone) Range(0, String::kMaxLength);
2695 }
2696 return HValue::InferRange(zone);
2697 }
2698
2699
InferRange(Zone * zone)2700 Range* HLoadKeyed::InferRange(Zone* zone) {
2701 switch (elements_kind()) {
2702 case INT8_ELEMENTS:
2703 return new(zone) Range(kMinInt8, kMaxInt8);
2704 case UINT8_ELEMENTS:
2705 case UINT8_CLAMPED_ELEMENTS:
2706 return new(zone) Range(kMinUInt8, kMaxUInt8);
2707 case INT16_ELEMENTS:
2708 return new(zone) Range(kMinInt16, kMaxInt16);
2709 case UINT16_ELEMENTS:
2710 return new(zone) Range(kMinUInt16, kMaxUInt16);
2711 default:
2712 return HValue::InferRange(zone);
2713 }
2714 }
2715
2716
PrintDataTo(std::ostream & os) const2717 std::ostream& HCompareGeneric::PrintDataTo(std::ostream& os) const { // NOLINT
2718 os << Token::Name(token()) << " ";
2719 return HBinaryOperation::PrintDataTo(os);
2720 }
2721
2722
PrintDataTo(std::ostream & os) const2723 std::ostream& HStringCompareAndBranch::PrintDataTo(
2724 std::ostream& os) const { // NOLINT
2725 os << Token::Name(token()) << " ";
2726 return HControlInstruction::PrintDataTo(os);
2727 }
2728
2729
PrintDataTo(std::ostream & os) const2730 std::ostream& HCompareNumericAndBranch::PrintDataTo(
2731 std::ostream& os) const { // NOLINT
2732 os << Token::Name(token()) << " " << NameOf(left()) << " " << NameOf(right());
2733 return HControlInstruction::PrintDataTo(os);
2734 }
2735
2736
PrintDataTo(std::ostream & os) const2737 std::ostream& HCompareObjectEqAndBranch::PrintDataTo(
2738 std::ostream& os) const { // NOLINT
2739 os << NameOf(left()) << " " << NameOf(right());
2740 return HControlInstruction::PrintDataTo(os);
2741 }
2742
2743
KnownSuccessorBlock(HBasicBlock ** block)2744 bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2745 if (known_successor_index() != kNoKnownSuccessorIndex) {
2746 *block = SuccessorAt(known_successor_index());
2747 return true;
2748 }
2749 if (FLAG_fold_constants && left()->IsConstant() && right()->IsConstant()) {
2750 *block = HConstant::cast(left())->DataEquals(HConstant::cast(right()))
2751 ? FirstSuccessor() : SecondSuccessor();
2752 return true;
2753 }
2754 *block = NULL;
2755 return false;
2756 }
2757
2758
KnownSuccessorBlock(HBasicBlock ** block)2759 bool HIsStringAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2760 if (known_successor_index() != kNoKnownSuccessorIndex) {
2761 *block = SuccessorAt(known_successor_index());
2762 return true;
2763 }
2764 if (FLAG_fold_constants && value()->IsConstant()) {
2765 *block = HConstant::cast(value())->HasStringValue()
2766 ? FirstSuccessor() : SecondSuccessor();
2767 return true;
2768 }
2769 if (value()->type().IsString()) {
2770 *block = FirstSuccessor();
2771 return true;
2772 }
2773 if (value()->type().IsSmi() ||
2774 value()->type().IsNull() ||
2775 value()->type().IsBoolean() ||
2776 value()->type().IsUndefined() ||
2777 value()->type().IsJSReceiver()) {
2778 *block = SecondSuccessor();
2779 return true;
2780 }
2781 *block = NULL;
2782 return false;
2783 }
2784
2785
KnownSuccessorBlock(HBasicBlock ** block)2786 bool HIsUndetectableAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2787 if (FLAG_fold_constants && value()->IsConstant()) {
2788 *block = HConstant::cast(value())->IsUndetectable()
2789 ? FirstSuccessor() : SecondSuccessor();
2790 return true;
2791 }
2792 if (value()->type().IsNull() || value()->type().IsUndefined()) {
2793 *block = FirstSuccessor();
2794 return true;
2795 }
2796 if (value()->type().IsBoolean() ||
2797 value()->type().IsSmi() ||
2798 value()->type().IsString() ||
2799 value()->type().IsJSReceiver()) {
2800 *block = SecondSuccessor();
2801 return true;
2802 }
2803 *block = NULL;
2804 return false;
2805 }
2806
2807
KnownSuccessorBlock(HBasicBlock ** block)2808 bool HHasInstanceTypeAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2809 if (FLAG_fold_constants && value()->IsConstant()) {
2810 InstanceType type = HConstant::cast(value())->GetInstanceType();
2811 *block = (from_ <= type) && (type <= to_)
2812 ? FirstSuccessor() : SecondSuccessor();
2813 return true;
2814 }
2815 *block = NULL;
2816 return false;
2817 }
2818
2819
InferRepresentation(HInferRepresentationPhase * h_infer)2820 void HCompareHoleAndBranch::InferRepresentation(
2821 HInferRepresentationPhase* h_infer) {
2822 ChangeRepresentation(value()->representation());
2823 }
2824
2825
KnownSuccessorBlock(HBasicBlock ** block)2826 bool HCompareNumericAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
2827 if (left() == right() &&
2828 left()->representation().IsSmiOrInteger32()) {
2829 *block = (token() == Token::EQ ||
2830 token() == Token::EQ_STRICT ||
2831 token() == Token::LTE ||
2832 token() == Token::GTE)
2833 ? FirstSuccessor() : SecondSuccessor();
2834 return true;
2835 }
2836 *block = NULL;
2837 return false;
2838 }
2839
2840
PrintDataTo(std::ostream & os) const2841 std::ostream& HGoto::PrintDataTo(std::ostream& os) const { // NOLINT
2842 return os << *SuccessorAt(0);
2843 }
2844
2845
InferRepresentation(HInferRepresentationPhase * h_infer)2846 void HCompareNumericAndBranch::InferRepresentation(
2847 HInferRepresentationPhase* h_infer) {
2848 Representation left_rep = left()->representation();
2849 Representation right_rep = right()->representation();
2850 Representation observed_left = observed_input_representation(0);
2851 Representation observed_right = observed_input_representation(1);
2852
2853 Representation rep = Representation::None();
2854 rep = rep.generalize(observed_left);
2855 rep = rep.generalize(observed_right);
2856 if (rep.IsNone() || rep.IsSmiOrInteger32()) {
2857 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
2858 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
2859 } else {
2860 rep = Representation::Double();
2861 }
2862
2863 if (rep.IsDouble()) {
2864 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, ===
2865 // and !=) have special handling of undefined, e.g. undefined == undefined
2866 // is 'true'. Relational comparisons have a different semantic, first
2867 // calling ToPrimitive() on their arguments. The standard Crankshaft
2868 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's
2869 // inputs are doubles caused 'undefined' to be converted to NaN. That's
2870 // compatible out-of-the box with ordered relational comparisons (<, >, <=,
2871 // >=). However, for equality comparisons (and for 'in' and 'instanceof'),
2872 // it is not consistent with the spec. For example, it would cause undefined
2873 // == undefined (should be true) to be evaluated as NaN == NaN
2874 // (false). Therefore, any comparisons other than ordered relational
2875 // comparisons must cause a deopt when one of their arguments is undefined.
2876 // See also v8:1434
2877 if (Token::IsOrderedRelationalCompareOp(token_)) {
2878 SetFlag(kTruncatingToNumber);
2879 }
2880 }
2881 ChangeRepresentation(rep);
2882 }
2883
2884
PrintDataTo(std::ostream & os) const2885 std::ostream& HParameter::PrintDataTo(std::ostream& os) const { // NOLINT
2886 return os << index();
2887 }
2888
2889
PrintDataTo(std::ostream & os) const2890 std::ostream& HLoadNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
2891 os << NameOf(object()) << access_;
2892
2893 if (maps() != NULL) {
2894 os << " [" << *maps()->at(0).handle();
2895 for (int i = 1; i < maps()->size(); ++i) {
2896 os << "," << *maps()->at(i).handle();
2897 }
2898 os << "]";
2899 }
2900
2901 if (HasDependency()) os << " " << NameOf(dependency());
2902 return os;
2903 }
2904
2905
PrintDataTo(std::ostream & os) const2906 std::ostream& HLoadKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
2907 if (!is_fixed_typed_array()) {
2908 os << NameOf(elements());
2909 } else {
2910 DCHECK(elements_kind() >= FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND &&
2911 elements_kind() <= LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
2912 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
2913 }
2914
2915 os << "[" << NameOf(key());
2916 if (IsDehoisted()) os << " + " << base_offset();
2917 os << "]";
2918
2919 if (HasDependency()) os << " " << NameOf(dependency());
2920 if (RequiresHoleCheck()) os << " check_hole";
2921 return os;
2922 }
2923
2924
TryIncreaseBaseOffset(uint32_t increase_by_value)2925 bool HLoadKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
2926 // The base offset is usually simply the size of the array header, except
2927 // with dehoisting adds an addition offset due to a array index key
2928 // manipulation, in which case it becomes (array header size +
2929 // constant-offset-from-key * kPointerSize)
2930 uint32_t base_offset = BaseOffsetField::decode(bit_field_);
2931 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset;
2932 addition_result += increase_by_value;
2933 if (!addition_result.IsValid()) return false;
2934 base_offset = addition_result.ValueOrDie();
2935 if (!BaseOffsetField::is_valid(base_offset)) return false;
2936 bit_field_ = BaseOffsetField::update(bit_field_, base_offset);
2937 return true;
2938 }
2939
2940
UsesMustHandleHole() const2941 bool HLoadKeyed::UsesMustHandleHole() const {
2942 if (IsFastPackedElementsKind(elements_kind())) {
2943 return false;
2944 }
2945
2946 if (IsFixedTypedArrayElementsKind(elements_kind())) {
2947 return false;
2948 }
2949
2950 if (hole_mode() == ALLOW_RETURN_HOLE) {
2951 if (IsFastDoubleElementsKind(elements_kind())) {
2952 return AllUsesCanTreatHoleAsNaN();
2953 }
2954 return true;
2955 }
2956
2957 if (IsFastDoubleElementsKind(elements_kind())) {
2958 return false;
2959 }
2960
2961 // Holes are only returned as tagged values.
2962 if (!representation().IsTagged()) {
2963 return false;
2964 }
2965
2966 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2967 HValue* use = it.value();
2968 if (!use->IsChange()) return false;
2969 }
2970
2971 return true;
2972 }
2973
2974
AllUsesCanTreatHoleAsNaN() const2975 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const {
2976 return IsFastDoubleElementsKind(elements_kind()) &&
2977 CheckUsesForFlag(HValue::kTruncatingToNumber);
2978 }
2979
2980
RequiresHoleCheck() const2981 bool HLoadKeyed::RequiresHoleCheck() const {
2982 if (IsFastPackedElementsKind(elements_kind())) {
2983 return false;
2984 }
2985
2986 if (IsFixedTypedArrayElementsKind(elements_kind())) {
2987 return false;
2988 }
2989
2990 if (hole_mode() == CONVERT_HOLE_TO_UNDEFINED) {
2991 return false;
2992 }
2993
2994 return !UsesMustHandleHole();
2995 }
2996
Canonicalize()2997 HValue* HCallWithDescriptor::Canonicalize() {
2998 if (kind() != Code::KEYED_LOAD_IC) return this;
2999
3000 // Recognize generic keyed loads that use property name generated
3001 // by for-in statement as a key and rewrite them into fast property load
3002 // by index.
3003 typedef LoadWithVectorDescriptor Descriptor;
3004 HValue* key = parameter(Descriptor::kName);
3005 if (key->IsLoadKeyed()) {
3006 HLoadKeyed* key_load = HLoadKeyed::cast(key);
3007 if (key_load->elements()->IsForInCacheArray()) {
3008 HForInCacheArray* names_cache =
3009 HForInCacheArray::cast(key_load->elements());
3010
3011 HValue* object = parameter(Descriptor::kReceiver);
3012 if (names_cache->enumerable() == object) {
3013 HForInCacheArray* index_cache =
3014 names_cache->index_cache();
3015 HCheckMapValue* map_check = HCheckMapValue::New(
3016 block()->graph()->isolate(), block()->graph()->zone(),
3017 block()->graph()->GetInvalidContext(), object, names_cache->map());
3018 HInstruction* index = HLoadKeyed::New(
3019 block()->graph()->isolate(), block()->graph()->zone(),
3020 block()->graph()->GetInvalidContext(), index_cache, key_load->key(),
3021 key_load->key(), nullptr, key_load->elements_kind());
3022 map_check->InsertBefore(this);
3023 index->InsertBefore(this);
3024 return Prepend(new (block()->zone()) HLoadFieldByIndex(object, index));
3025 }
3026 }
3027 }
3028 return this;
3029 }
3030
PrintDataTo(std::ostream & os) const3031 std::ostream& HStoreNamedField::PrintDataTo(std::ostream& os) const { // NOLINT
3032 os << NameOf(object()) << access_ << " = " << NameOf(value());
3033 if (NeedsWriteBarrier()) os << " (write-barrier)";
3034 if (has_transition()) os << " (transition map " << *transition_map() << ")";
3035 return os;
3036 }
3037
3038
PrintDataTo(std::ostream & os) const3039 std::ostream& HStoreKeyed::PrintDataTo(std::ostream& os) const { // NOLINT
3040 if (!is_fixed_typed_array()) {
3041 os << NameOf(elements());
3042 } else {
3043 DCHECK(elements_kind() >= FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND &&
3044 elements_kind() <= LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
3045 os << NameOf(elements()) << "." << ElementsKindToString(elements_kind());
3046 }
3047
3048 os << "[" << NameOf(key());
3049 if (IsDehoisted()) os << " + " << base_offset();
3050 return os << "] = " << NameOf(value());
3051 }
3052
3053
PrintDataTo(std::ostream & os) const3054 std::ostream& HTransitionElementsKind::PrintDataTo(
3055 std::ostream& os) const { // NOLINT
3056 os << NameOf(object());
3057 ElementsKind from_kind = original_map().handle()->elements_kind();
3058 ElementsKind to_kind = transitioned_map().handle()->elements_kind();
3059 os << " " << *original_map().handle() << " ["
3060 << ElementsAccessor::ForKind(from_kind)->name() << "] -> "
3061 << *transitioned_map().handle() << " ["
3062 << ElementsAccessor::ForKind(to_kind)->name() << "]";
3063 if (IsSimpleMapChangeTransition(from_kind, to_kind)) os << " (simple)";
3064 return os;
3065 }
3066
3067
PrintDataTo(std::ostream & os) const3068 std::ostream& HInnerAllocatedObject::PrintDataTo(
3069 std::ostream& os) const { // NOLINT
3070 os << NameOf(base_object()) << " offset ";
3071 return offset()->PrintTo(os);
3072 }
3073
3074
PrintDataTo(std::ostream & os) const3075 std::ostream& HLoadContextSlot::PrintDataTo(std::ostream& os) const { // NOLINT
3076 return os << NameOf(value()) << "[" << slot_index() << "]";
3077 }
3078
3079
PrintDataTo(std::ostream & os) const3080 std::ostream& HStoreContextSlot::PrintDataTo(
3081 std::ostream& os) const { // NOLINT
3082 return os << NameOf(context()) << "[" << slot_index()
3083 << "] = " << NameOf(value());
3084 }
3085
3086
3087 // Implementation of type inference and type conversions. Calculates
3088 // the inferred type of this instruction based on the input operands.
3089
CalculateInferredType()3090 HType HValue::CalculateInferredType() {
3091 return type_;
3092 }
3093
3094
CalculateInferredType()3095 HType HPhi::CalculateInferredType() {
3096 if (OperandCount() == 0) return HType::Tagged();
3097 HType result = OperandAt(0)->type();
3098 for (int i = 1; i < OperandCount(); ++i) {
3099 HType current = OperandAt(i)->type();
3100 result = result.Combine(current);
3101 }
3102 return result;
3103 }
3104
3105
CalculateInferredType()3106 HType HChange::CalculateInferredType() {
3107 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber();
3108 return type();
3109 }
3110
3111
RepresentationFromInputs()3112 Representation HUnaryMathOperation::RepresentationFromInputs() {
3113 if (SupportsFlexibleFloorAndRound() &&
3114 (op_ == kMathFloor || op_ == kMathRound)) {
3115 // Floor and Round always take a double input. The integral result can be
3116 // used as an integer or a double. Infer the representation from the uses.
3117 return Representation::None();
3118 }
3119 Representation rep = representation();
3120 // If any of the actual input representation is more general than what we
3121 // have so far but not Tagged, use that representation instead.
3122 Representation input_rep = value()->representation();
3123 if (!input_rep.IsTagged()) {
3124 rep = rep.generalize(input_rep);
3125 }
3126 return rep;
3127 }
3128
3129
HandleSideEffectDominator(GVNFlag side_effect,HValue * dominator)3130 bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
3131 HValue* dominator) {
3132 DCHECK(side_effect == kNewSpacePromotion);
3133 DCHECK(!IsAllocationFolded());
3134 Zone* zone = block()->zone();
3135 Isolate* isolate = block()->isolate();
3136 if (!FLAG_use_allocation_folding) return false;
3137
3138 // Try to fold allocations together with their dominating allocations.
3139 if (!dominator->IsAllocate()) {
3140 if (FLAG_trace_allocation_folding) {
3141 PrintF("#%d (%s) cannot fold into #%d (%s)\n",
3142 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3143 }
3144 return false;
3145 }
3146
3147 // Check whether we are folding within the same block for local folding.
3148 if (FLAG_use_local_allocation_folding && dominator->block() != block()) {
3149 if (FLAG_trace_allocation_folding) {
3150 PrintF("#%d (%s) cannot fold into #%d (%s), crosses basic blocks\n",
3151 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3152 }
3153 return false;
3154 }
3155
3156 HAllocate* dominator_allocate = HAllocate::cast(dominator);
3157 HValue* dominator_size = dominator_allocate->size();
3158 HValue* current_size = size();
3159
3160 // TODO(hpayer): Add support for non-constant allocation in dominator.
3161 if (!current_size->IsInteger32Constant() ||
3162 !dominator_size->IsInteger32Constant()) {
3163 if (FLAG_trace_allocation_folding) {
3164 PrintF("#%d (%s) cannot fold into #%d (%s), "
3165 "dynamic allocation size in dominator\n",
3166 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3167 }
3168 return false;
3169 }
3170
3171 if (IsAllocationFoldingDominator()) {
3172 if (FLAG_trace_allocation_folding) {
3173 PrintF("#%d (%s) cannot fold into #%d (%s), already dominator\n", id(),
3174 Mnemonic(), dominator->id(), dominator->Mnemonic());
3175 }
3176 return false;
3177 }
3178
3179 if (!IsFoldable(dominator_allocate)) {
3180 if (FLAG_trace_allocation_folding) {
3181 PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n", id(),
3182 Mnemonic(), dominator->id(), dominator->Mnemonic());
3183 }
3184 return false;
3185 }
3186
3187 DCHECK(
3188 (IsNewSpaceAllocation() && dominator_allocate->IsNewSpaceAllocation()) ||
3189 (IsOldSpaceAllocation() && dominator_allocate->IsOldSpaceAllocation()));
3190
3191 // First update the size of the dominator allocate instruction.
3192 dominator_size = dominator_allocate->size();
3193 int32_t original_object_size =
3194 HConstant::cast(dominator_size)->GetInteger32Constant();
3195 int32_t dominator_size_constant = original_object_size;
3196
3197 if (MustAllocateDoubleAligned()) {
3198 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
3199 dominator_size_constant += kDoubleSize / 2;
3200 }
3201 }
3202
3203 int32_t current_size_max_value = size()->GetInteger32Constant();
3204 int32_t new_dominator_size = dominator_size_constant + current_size_max_value;
3205
3206 // Since we clear the first word after folded memory, we cannot use the
3207 // whole kMaxRegularHeapObjectSize memory.
3208 if (new_dominator_size > kMaxRegularHeapObjectSize - kPointerSize) {
3209 if (FLAG_trace_allocation_folding) {
3210 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
3211 id(), Mnemonic(), dominator_allocate->id(),
3212 dominator_allocate->Mnemonic(), new_dominator_size);
3213 }
3214 return false;
3215 }
3216
3217 HInstruction* new_dominator_size_value = HConstant::CreateAndInsertBefore(
3218 isolate, zone, context(), new_dominator_size, Representation::None(),
3219 dominator_allocate);
3220
3221 dominator_allocate->UpdateSize(new_dominator_size_value);
3222
3223 if (MustAllocateDoubleAligned()) {
3224 if (!dominator_allocate->MustAllocateDoubleAligned()) {
3225 dominator_allocate->MakeDoubleAligned();
3226 }
3227 }
3228
3229 if (!dominator_allocate->IsAllocationFoldingDominator()) {
3230 HAllocate* first_alloc =
3231 HAllocate::New(isolate, zone, dominator_allocate->context(),
3232 dominator_size, dominator_allocate->type(),
3233 IsNewSpaceAllocation() ? NOT_TENURED : TENURED,
3234 JS_OBJECT_TYPE, block()->graph()->GetConstant0());
3235 first_alloc->InsertAfter(dominator_allocate);
3236 dominator_allocate->ReplaceAllUsesWith(first_alloc);
3237 dominator_allocate->MakeAllocationFoldingDominator();
3238 first_alloc->MakeFoldedAllocation(dominator_allocate);
3239 if (FLAG_trace_allocation_folding) {
3240 PrintF("#%d (%s) inserted for dominator #%d (%s)\n", first_alloc->id(),
3241 first_alloc->Mnemonic(), dominator_allocate->id(),
3242 dominator_allocate->Mnemonic());
3243 }
3244 }
3245
3246 MakeFoldedAllocation(dominator_allocate);
3247
3248 if (FLAG_trace_allocation_folding) {
3249 PrintF("#%d (%s) folded into #%d (%s), new dominator size: %d\n", id(),
3250 Mnemonic(), dominator_allocate->id(), dominator_allocate->Mnemonic(),
3251 new_dominator_size);
3252 }
3253 return true;
3254 }
3255
3256
PrintDataTo(std::ostream & os) const3257 std::ostream& HAllocate::PrintDataTo(std::ostream& os) const { // NOLINT
3258 os << NameOf(size()) << " (";
3259 if (IsNewSpaceAllocation()) os << "N";
3260 if (IsOldSpaceAllocation()) os << "P";
3261 if (MustAllocateDoubleAligned()) os << "A";
3262 if (MustPrefillWithFiller()) os << "F";
3263 if (IsAllocationFoldingDominator()) os << "d";
3264 if (IsAllocationFolded()) os << "f";
3265 return os << ")";
3266 }
3267
3268
TryIncreaseBaseOffset(uint32_t increase_by_value)3269 bool HStoreKeyed::TryIncreaseBaseOffset(uint32_t increase_by_value) {
3270 // The base offset is usually simply the size of the array header, except
3271 // with dehoisting adds an addition offset due to a array index key
3272 // manipulation, in which case it becomes (array header size +
3273 // constant-offset-from-key * kPointerSize)
3274 v8::base::internal::CheckedNumeric<uint32_t> addition_result = base_offset_;
3275 addition_result += increase_by_value;
3276 if (!addition_result.IsValid()) return false;
3277 base_offset_ = addition_result.ValueOrDie();
3278 return true;
3279 }
3280
3281
NeedsCanonicalization()3282 bool HStoreKeyed::NeedsCanonicalization() {
3283 switch (value()->opcode()) {
3284 case kLoadKeyed: {
3285 ElementsKind load_kind = HLoadKeyed::cast(value())->elements_kind();
3286 return IsFixedFloatElementsKind(load_kind);
3287 }
3288 case kChange: {
3289 Representation from = HChange::cast(value())->from();
3290 return from.IsTagged() || from.IsHeapObject();
3291 }
3292 case kConstant:
3293 // Double constants are canonicalized upon construction.
3294 return false;
3295 default:
3296 return !value()->IsBinaryOperation();
3297 }
3298 }
3299
3300
3301 #define H_CONSTANT_INT(val) \
3302 HConstant::New(isolate, zone, context, static_cast<int32_t>(val))
3303 #define H_CONSTANT_DOUBLE(val) \
3304 HConstant::New(isolate, zone, context, static_cast<double>(val))
3305
3306 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
3307 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
3308 HValue* left, HValue* right) { \
3309 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
3310 HConstant* c_left = HConstant::cast(left); \
3311 HConstant* c_right = HConstant::cast(right); \
3312 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
3313 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
3314 if (IsInt32Double(double_res)) { \
3315 return H_CONSTANT_INT(double_res); \
3316 } \
3317 return H_CONSTANT_DOUBLE(double_res); \
3318 } \
3319 } \
3320 return new (zone) HInstr(context, left, right); \
3321 }
3322
3323 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +)
3324 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *)
3325 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -)
3326
3327 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
3328
3329
New(Isolate * isolate,Zone * zone,HValue * context,HValue * left,HValue * right,PretenureFlag pretenure_flag,StringAddFlags flags,Handle<AllocationSite> allocation_site)3330 HInstruction* HStringAdd::New(Isolate* isolate, Zone* zone, HValue* context,
3331 HValue* left, HValue* right,
3332 PretenureFlag pretenure_flag,
3333 StringAddFlags flags,
3334 Handle<AllocationSite> allocation_site) {
3335 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3336 HConstant* c_right = HConstant::cast(right);
3337 HConstant* c_left = HConstant::cast(left);
3338 if (c_left->HasStringValue() && c_right->HasStringValue()) {
3339 Handle<String> left_string = c_left->StringValue();
3340 Handle<String> right_string = c_right->StringValue();
3341 // Prevent possible exception by invalid string length.
3342 if (left_string->length() + right_string->length() < String::kMaxLength) {
3343 MaybeHandle<String> concat = isolate->factory()->NewConsString(
3344 c_left->StringValue(), c_right->StringValue());
3345 return HConstant::New(isolate, zone, context, concat.ToHandleChecked());
3346 }
3347 }
3348 }
3349 return new (zone)
3350 HStringAdd(context, left, right, pretenure_flag, flags, allocation_site);
3351 }
3352
3353
PrintDataTo(std::ostream & os) const3354 std::ostream& HStringAdd::PrintDataTo(std::ostream& os) const { // NOLINT
3355 if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
3356 os << "_CheckBoth";
3357 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_LEFT) {
3358 os << "_CheckLeft";
3359 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_RIGHT) {
3360 os << "_CheckRight";
3361 }
3362 HBinaryOperation::PrintDataTo(os);
3363 os << " (";
3364 if (pretenure_flag() == NOT_TENURED)
3365 os << "N";
3366 else if (pretenure_flag() == TENURED)
3367 os << "D";
3368 return os << ")";
3369 }
3370
3371
New(Isolate * isolate,Zone * zone,HValue * context,HValue * char_code)3372 HInstruction* HStringCharFromCode::New(Isolate* isolate, Zone* zone,
3373 HValue* context, HValue* char_code) {
3374 if (FLAG_fold_constants && char_code->IsConstant()) {
3375 HConstant* c_code = HConstant::cast(char_code);
3376 if (c_code->HasNumberValue()) {
3377 if (std::isfinite(c_code->DoubleValue())) {
3378 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff;
3379 return HConstant::New(
3380 isolate, zone, context,
3381 isolate->factory()->LookupSingleCharacterStringFromCode(code));
3382 }
3383 return HConstant::New(isolate, zone, context,
3384 isolate->factory()->empty_string());
3385 }
3386 }
3387 return new(zone) HStringCharFromCode(context, char_code);
3388 }
3389
3390
New(Isolate * isolate,Zone * zone,HValue * context,HValue * value,BuiltinFunctionId op)3391 HInstruction* HUnaryMathOperation::New(Isolate* isolate, Zone* zone,
3392 HValue* context, HValue* value,
3393 BuiltinFunctionId op) {
3394 do {
3395 if (!FLAG_fold_constants) break;
3396 if (!value->IsConstant()) break;
3397 HConstant* constant = HConstant::cast(value);
3398 if (!constant->HasNumberValue()) break;
3399 double d = constant->DoubleValue();
3400 if (std::isnan(d)) { // NaN poisons everything.
3401 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
3402 }
3403 if (std::isinf(d)) { // +Infinity and -Infinity.
3404 switch (op) {
3405 case kMathCos:
3406 case kMathSin:
3407 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
3408 case kMathExp:
3409 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0);
3410 case kMathLog:
3411 case kMathSqrt:
3412 return H_CONSTANT_DOUBLE(
3413 (d > 0.0) ? d : std::numeric_limits<double>::quiet_NaN());
3414 case kMathPowHalf:
3415 case kMathAbs:
3416 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d);
3417 case kMathRound:
3418 case kMathFround:
3419 case kMathFloor:
3420 return H_CONSTANT_DOUBLE(d);
3421 case kMathClz32:
3422 return H_CONSTANT_INT(32);
3423 default:
3424 UNREACHABLE();
3425 break;
3426 }
3427 }
3428 switch (op) {
3429 case kMathCos:
3430 return H_CONSTANT_DOUBLE(base::ieee754::cos(d));
3431 case kMathExp:
3432 return H_CONSTANT_DOUBLE(base::ieee754::exp(d));
3433 case kMathLog:
3434 return H_CONSTANT_DOUBLE(base::ieee754::log(d));
3435 case kMathSin:
3436 return H_CONSTANT_DOUBLE(base::ieee754::sin(d));
3437 case kMathSqrt:
3438 lazily_initialize_fast_sqrt(isolate);
3439 return H_CONSTANT_DOUBLE(fast_sqrt(d, isolate));
3440 case kMathPowHalf:
3441 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5));
3442 case kMathAbs:
3443 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d);
3444 case kMathRound:
3445 // -0.5 .. -0.0 round to -0.0.
3446 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0);
3447 // Doubles are represented as Significant * 2 ^ Exponent. If the
3448 // Exponent is not negative, the double value is already an integer.
3449 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d);
3450 return H_CONSTANT_DOUBLE(Floor(d + 0.5));
3451 case kMathFround:
3452 return H_CONSTANT_DOUBLE(static_cast<double>(static_cast<float>(d)));
3453 case kMathFloor:
3454 return H_CONSTANT_DOUBLE(Floor(d));
3455 case kMathClz32: {
3456 uint32_t i = DoubleToUint32(d);
3457 return H_CONSTANT_INT(base::bits::CountLeadingZeros32(i));
3458 }
3459 default:
3460 UNREACHABLE();
3461 break;
3462 }
3463 } while (false);
3464 return new(zone) HUnaryMathOperation(context, value, op);
3465 }
3466
3467
RepresentationFromUses()3468 Representation HUnaryMathOperation::RepresentationFromUses() {
3469 if (op_ != kMathFloor && op_ != kMathRound) {
3470 return HValue::RepresentationFromUses();
3471 }
3472
3473 // The instruction can have an int32 or double output. Prefer a double
3474 // representation if there are double uses.
3475 bool use_double = false;
3476
3477 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3478 HValue* use = it.value();
3479 int use_index = it.index();
3480 Representation rep_observed = use->observed_input_representation(use_index);
3481 Representation rep_required = use->RequiredInputRepresentation(use_index);
3482 use_double |= (rep_observed.IsDouble() || rep_required.IsDouble());
3483 if (use_double && !FLAG_trace_representation) {
3484 // Having seen one double is enough.
3485 break;
3486 }
3487 if (FLAG_trace_representation) {
3488 if (!rep_required.IsDouble() || rep_observed.IsDouble()) {
3489 PrintF("#%d %s is used by #%d %s as %s%s\n",
3490 id(), Mnemonic(), use->id(),
3491 use->Mnemonic(), rep_observed.Mnemonic(),
3492 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
3493 } else {
3494 PrintF("#%d %s is required by #%d %s as %s%s\n",
3495 id(), Mnemonic(), use->id(),
3496 use->Mnemonic(), rep_required.Mnemonic(),
3497 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
3498 }
3499 }
3500 }
3501 return use_double ? Representation::Double() : Representation::Integer32();
3502 }
3503
3504
New(Isolate * isolate,Zone * zone,HValue * context,HValue * left,HValue * right)3505 HInstruction* HPower::New(Isolate* isolate, Zone* zone, HValue* context,
3506 HValue* left, HValue* right) {
3507 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3508 HConstant* c_left = HConstant::cast(left);
3509 HConstant* c_right = HConstant::cast(right);
3510 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
3511 double result =
3512 power_helper(isolate, c_left->DoubleValue(), c_right->DoubleValue());
3513 return H_CONSTANT_DOUBLE(std::isnan(result)
3514 ? std::numeric_limits<double>::quiet_NaN()
3515 : result);
3516 }
3517 }
3518 return new(zone) HPower(left, right);
3519 }
3520
3521
New(Isolate * isolate,Zone * zone,HValue * context,HValue * left,HValue * right,Operation op)3522 HInstruction* HMathMinMax::New(Isolate* isolate, Zone* zone, HValue* context,
3523 HValue* left, HValue* right, Operation op) {
3524 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3525 HConstant* c_left = HConstant::cast(left);
3526 HConstant* c_right = HConstant::cast(right);
3527 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
3528 double d_left = c_left->DoubleValue();
3529 double d_right = c_right->DoubleValue();
3530 if (op == kMathMin) {
3531 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right);
3532 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left);
3533 if (d_left == d_right) {
3534 // Handle +0 and -0.
3535 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left
3536 : d_right);
3537 }
3538 } else {
3539 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right);
3540 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left);
3541 if (d_left == d_right) {
3542 // Handle +0 and -0.
3543 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right
3544 : d_left);
3545 }
3546 }
3547 // All comparisons failed, must be NaN.
3548 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
3549 }
3550 }
3551 return new(zone) HMathMinMax(context, left, right, op);
3552 }
3553
New(Isolate * isolate,Zone * zone,HValue * context,HValue * left,HValue * right)3554 HInstruction* HMod::New(Isolate* isolate, Zone* zone, HValue* context,
3555 HValue* left, HValue* right) {
3556 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3557 HConstant* c_left = HConstant::cast(left);
3558 HConstant* c_right = HConstant::cast(right);
3559 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
3560 int32_t dividend = c_left->Integer32Value();
3561 int32_t divisor = c_right->Integer32Value();
3562 if (dividend == kMinInt && divisor == -1) {
3563 return H_CONSTANT_DOUBLE(-0.0);
3564 }
3565 if (divisor != 0) {
3566 int32_t res = dividend % divisor;
3567 if ((res == 0) && (dividend < 0)) {
3568 return H_CONSTANT_DOUBLE(-0.0);
3569 }
3570 return H_CONSTANT_INT(res);
3571 }
3572 }
3573 }
3574 return new (zone) HMod(context, left, right);
3575 }
3576
New(Isolate * isolate,Zone * zone,HValue * context,HValue * left,HValue * right)3577 HInstruction* HDiv::New(Isolate* isolate, Zone* zone, HValue* context,
3578 HValue* left, HValue* right) {
3579 // If left and right are constant values, try to return a constant value.
3580 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3581 HConstant* c_left = HConstant::cast(left);
3582 HConstant* c_right = HConstant::cast(right);
3583 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
3584 if (std::isnan(c_left->DoubleValue()) ||
3585 std::isnan(c_right->DoubleValue())) {
3586 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
3587 } else if (c_right->DoubleValue() != 0) {
3588 double double_res = c_left->DoubleValue() / c_right->DoubleValue();
3589 if (IsInt32Double(double_res)) {
3590 return H_CONSTANT_INT(double_res);
3591 }
3592 return H_CONSTANT_DOUBLE(double_res);
3593 } else if (c_left->DoubleValue() != 0) {
3594 int sign = Double(c_left->DoubleValue()).Sign() *
3595 Double(c_right->DoubleValue()).Sign(); // Right could be -0.
3596 return H_CONSTANT_DOUBLE(sign * V8_INFINITY);
3597 } else {
3598 return H_CONSTANT_DOUBLE(std::numeric_limits<double>::quiet_NaN());
3599 }
3600 }
3601 }
3602 return new (zone) HDiv(context, left, right);
3603 }
3604
New(Isolate * isolate,Zone * zone,HValue * context,Token::Value op,HValue * left,HValue * right)3605 HInstruction* HBitwise::New(Isolate* isolate, Zone* zone, HValue* context,
3606 Token::Value op, HValue* left, HValue* right) {
3607 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3608 HConstant* c_left = HConstant::cast(left);
3609 HConstant* c_right = HConstant::cast(right);
3610 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
3611 int32_t result;
3612 int32_t v_left = c_left->NumberValueAsInteger32();
3613 int32_t v_right = c_right->NumberValueAsInteger32();
3614 switch (op) {
3615 case Token::BIT_XOR:
3616 result = v_left ^ v_right;
3617 break;
3618 case Token::BIT_AND:
3619 result = v_left & v_right;
3620 break;
3621 case Token::BIT_OR:
3622 result = v_left | v_right;
3623 break;
3624 default:
3625 result = 0; // Please the compiler.
3626 UNREACHABLE();
3627 }
3628 return H_CONSTANT_INT(result);
3629 }
3630 }
3631 return new (zone) HBitwise(context, op, left, right);
3632 }
3633
3634 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
3635 HInstruction* HInstr::New(Isolate* isolate, Zone* zone, HValue* context, \
3636 HValue* left, HValue* right) { \
3637 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
3638 HConstant* c_left = HConstant::cast(left); \
3639 HConstant* c_right = HConstant::cast(right); \
3640 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
3641 return H_CONSTANT_INT(result); \
3642 } \
3643 } \
3644 return new (zone) HInstr(context, left, right); \
3645 }
3646
3647 DEFINE_NEW_H_BITWISE_INSTR(HSar,
3648 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
3649 DEFINE_NEW_H_BITWISE_INSTR(HShl,
3650 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f))
3651
3652 #undef DEFINE_NEW_H_BITWISE_INSTR
3653
New(Isolate * isolate,Zone * zone,HValue * context,HValue * left,HValue * right)3654 HInstruction* HShr::New(Isolate* isolate, Zone* zone, HValue* context,
3655 HValue* left, HValue* right) {
3656 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3657 HConstant* c_left = HConstant::cast(left);
3658 HConstant* c_right = HConstant::cast(right);
3659 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
3660 int32_t left_val = c_left->NumberValueAsInteger32();
3661 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
3662 if ((right_val == 0) && (left_val < 0)) {
3663 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val));
3664 }
3665 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val);
3666 }
3667 }
3668 return new (zone) HShr(context, left, right);
3669 }
3670
3671
New(Isolate * isolate,Zone * zone,HValue * context,String::Encoding encoding,HValue * string,HValue * index)3672 HInstruction* HSeqStringGetChar::New(Isolate* isolate, Zone* zone,
3673 HValue* context, String::Encoding encoding,
3674 HValue* string, HValue* index) {
3675 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) {
3676 HConstant* c_string = HConstant::cast(string);
3677 HConstant* c_index = HConstant::cast(index);
3678 if (c_string->HasStringValue() && c_index->HasInteger32Value()) {
3679 Handle<String> s = c_string->StringValue();
3680 int32_t i = c_index->Integer32Value();
3681 DCHECK_LE(0, i);
3682 DCHECK_LT(i, s->length());
3683 return H_CONSTANT_INT(s->Get(i));
3684 }
3685 }
3686 return new(zone) HSeqStringGetChar(encoding, string, index);
3687 }
3688
3689
3690 #undef H_CONSTANT_INT
3691 #undef H_CONSTANT_DOUBLE
3692
3693
PrintDataTo(std::ostream & os) const3694 std::ostream& HBitwise::PrintDataTo(std::ostream& os) const { // NOLINT
3695 os << Token::Name(op_) << " ";
3696 return HBitwiseBinaryOperation::PrintDataTo(os);
3697 }
3698
3699
SimplifyConstantInputs()3700 void HPhi::SimplifyConstantInputs() {
3701 // Convert constant inputs to integers when all uses are truncating.
3702 // This must happen before representation inference takes place.
3703 if (!CheckUsesForFlag(kTruncatingToInt32)) return;
3704 for (int i = 0; i < OperandCount(); ++i) {
3705 if (!OperandAt(i)->IsConstant()) return;
3706 }
3707 HGraph* graph = block()->graph();
3708 for (int i = 0; i < OperandCount(); ++i) {
3709 HConstant* operand = HConstant::cast(OperandAt(i));
3710 if (operand->HasInteger32Value()) {
3711 continue;
3712 } else if (operand->HasDoubleValue()) {
3713 HConstant* integer_input = HConstant::New(
3714 graph->isolate(), graph->zone(), graph->GetInvalidContext(),
3715 DoubleToInt32(operand->DoubleValue()));
3716 integer_input->InsertAfter(operand);
3717 SetOperandAt(i, integer_input);
3718 } else if (operand->HasBooleanValue()) {
3719 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1()
3720 : graph->GetConstant0());
3721 } else if (operand->ImmortalImmovable()) {
3722 SetOperandAt(i, graph->GetConstant0());
3723 }
3724 }
3725 // Overwrite observed input representations because they are likely Tagged.
3726 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3727 HValue* use = it.value();
3728 if (use->IsBinaryOperation()) {
3729 HBinaryOperation::cast(use)->set_observed_input_representation(
3730 it.index(), Representation::Smi());
3731 }
3732 }
3733 }
3734
3735
InferRepresentation(HInferRepresentationPhase * h_infer)3736 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) {
3737 DCHECK(CheckFlag(kFlexibleRepresentation));
3738 Representation new_rep = RepresentationFromUses();
3739 UpdateRepresentation(new_rep, h_infer, "uses");
3740 new_rep = RepresentationFromInputs();
3741 UpdateRepresentation(new_rep, h_infer, "inputs");
3742 new_rep = RepresentationFromUseRequirements();
3743 UpdateRepresentation(new_rep, h_infer, "use requirements");
3744 }
3745
3746
RepresentationFromInputs()3747 Representation HPhi::RepresentationFromInputs() {
3748 Representation r = representation();
3749 for (int i = 0; i < OperandCount(); ++i) {
3750 // Ignore conservative Tagged assumption of parameters if we have
3751 // reason to believe that it's too conservative.
3752 if (has_type_feedback_from_uses() && OperandAt(i)->IsParameter()) {
3753 continue;
3754 }
3755
3756 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation());
3757 }
3758 return r;
3759 }
3760
3761
3762 // Returns a representation if all uses agree on the same representation.
3763 // Integer32 is also returned when some uses are Smi but others are Integer32.
RepresentationFromUseRequirements()3764 Representation HValue::RepresentationFromUseRequirements() {
3765 Representation rep = Representation::None();
3766 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3767 // Ignore the use requirement from never run code
3768 if (it.value()->block()->IsUnreachable()) continue;
3769
3770 // We check for observed_input_representation elsewhere.
3771 Representation use_rep =
3772 it.value()->RequiredInputRepresentation(it.index());
3773 if (rep.IsNone()) {
3774 rep = use_rep;
3775 continue;
3776 }
3777 if (use_rep.IsNone() || rep.Equals(use_rep)) continue;
3778 if (rep.generalize(use_rep).IsInteger32()) {
3779 rep = Representation::Integer32();
3780 continue;
3781 }
3782 return Representation::None();
3783 }
3784 return rep;
3785 }
3786
3787
HasNonSmiUse()3788 bool HValue::HasNonSmiUse() {
3789 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3790 // We check for observed_input_representation elsewhere.
3791 Representation use_rep =
3792 it.value()->RequiredInputRepresentation(it.index());
3793 if (!use_rep.IsNone() &&
3794 !use_rep.IsSmi() &&
3795 !use_rep.IsTagged()) {
3796 return true;
3797 }
3798 }
3799 return false;
3800 }
3801
3802
3803 // Node-specific verification code is only included in debug mode.
3804 #ifdef DEBUG
3805
Verify()3806 void HPhi::Verify() {
3807 DCHECK(OperandCount() == block()->predecessors()->length());
3808 for (int i = 0; i < OperandCount(); ++i) {
3809 HValue* value = OperandAt(i);
3810 HBasicBlock* defining_block = value->block();
3811 HBasicBlock* predecessor_block = block()->predecessors()->at(i);
3812 DCHECK(defining_block == predecessor_block ||
3813 defining_block->Dominates(predecessor_block));
3814 }
3815 }
3816
3817
Verify()3818 void HSimulate::Verify() {
3819 HInstruction::Verify();
3820 DCHECK(HasAstId() || next()->IsEnterInlined());
3821 }
3822
3823
Verify()3824 void HCheckHeapObject::Verify() {
3825 HInstruction::Verify();
3826 DCHECK(HasNoUses());
3827 }
3828
3829
Verify()3830 void HCheckValue::Verify() {
3831 HInstruction::Verify();
3832 DCHECK(HasNoUses());
3833 }
3834
3835 #endif
3836
3837
ForFixedArrayHeader(int offset)3838 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) {
3839 DCHECK(offset >= 0);
3840 DCHECK(offset < FixedArray::kHeaderSize);
3841 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength();
3842 return HObjectAccess(kInobject, offset);
3843 }
3844
3845
ForMapAndOffset(Handle<Map> map,int offset,Representation representation)3846 HObjectAccess HObjectAccess::ForMapAndOffset(Handle<Map> map, int offset,
3847 Representation representation) {
3848 DCHECK(offset >= 0);
3849 Portion portion = kInobject;
3850
3851 if (offset == JSObject::kElementsOffset) {
3852 portion = kElementsPointer;
3853 } else if (offset == JSObject::kMapOffset) {
3854 portion = kMaps;
3855 }
3856 bool existing_inobject_property = true;
3857 if (!map.is_null()) {
3858 existing_inobject_property = (offset <
3859 map->instance_size() - map->unused_property_fields() * kPointerSize);
3860 }
3861 return HObjectAccess(portion, offset, representation, Handle<String>::null(),
3862 false, existing_inobject_property);
3863 }
3864
3865
ForAllocationSiteOffset(int offset)3866 HObjectAccess HObjectAccess::ForAllocationSiteOffset(int offset) {
3867 switch (offset) {
3868 case AllocationSite::kTransitionInfoOffset:
3869 return HObjectAccess(kInobject, offset, Representation::Tagged());
3870 case AllocationSite::kNestedSiteOffset:
3871 return HObjectAccess(kInobject, offset, Representation::Tagged());
3872 case AllocationSite::kPretenureDataOffset:
3873 return HObjectAccess(kInobject, offset, Representation::Smi());
3874 case AllocationSite::kPretenureCreateCountOffset:
3875 return HObjectAccess(kInobject, offset, Representation::Smi());
3876 case AllocationSite::kDependentCodeOffset:
3877 return HObjectAccess(kInobject, offset, Representation::Tagged());
3878 case AllocationSite::kWeakNextOffset:
3879 return HObjectAccess(kInobject, offset, Representation::Tagged());
3880 default:
3881 UNREACHABLE();
3882 }
3883 return HObjectAccess(kInobject, offset);
3884 }
3885
3886
ForContextSlot(int index)3887 HObjectAccess HObjectAccess::ForContextSlot(int index) {
3888 DCHECK(index >= 0);
3889 Portion portion = kInobject;
3890 int offset = Context::kHeaderSize + index * kPointerSize;
3891 DCHECK_EQ(offset, Context::SlotOffset(index) + kHeapObjectTag);
3892 return HObjectAccess(portion, offset, Representation::Tagged());
3893 }
3894
3895
ForScriptContext(int index)3896 HObjectAccess HObjectAccess::ForScriptContext(int index) {
3897 DCHECK(index >= 0);
3898 Portion portion = kInobject;
3899 int offset = ScriptContextTable::GetContextOffset(index);
3900 return HObjectAccess(portion, offset, Representation::Tagged());
3901 }
3902
3903
ForJSArrayOffset(int offset)3904 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) {
3905 DCHECK(offset >= 0);
3906 Portion portion = kInobject;
3907
3908 if (offset == JSObject::kElementsOffset) {
3909 portion = kElementsPointer;
3910 } else if (offset == JSArray::kLengthOffset) {
3911 portion = kArrayLengths;
3912 } else if (offset == JSObject::kMapOffset) {
3913 portion = kMaps;
3914 }
3915 return HObjectAccess(portion, offset);
3916 }
3917
3918
ForBackingStoreOffset(int offset,Representation representation)3919 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset,
3920 Representation representation) {
3921 DCHECK(offset >= 0);
3922 return HObjectAccess(kBackingStore, offset, representation,
3923 Handle<String>::null(), false, false);
3924 }
3925
3926
ForField(Handle<Map> map,int index,Representation representation,Handle<Name> name)3927 HObjectAccess HObjectAccess::ForField(Handle<Map> map, int index,
3928 Representation representation,
3929 Handle<Name> name) {
3930 if (index < 0) {
3931 // Negative property indices are in-object properties, indexed
3932 // from the end of the fixed part of the object.
3933 int offset = (index * kPointerSize) + map->instance_size();
3934 return HObjectAccess(kInobject, offset, representation, name, false, true);
3935 } else {
3936 // Non-negative property indices are in the properties array.
3937 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
3938 return HObjectAccess(kBackingStore, offset, representation, name,
3939 false, false);
3940 }
3941 }
3942
3943
SetGVNFlags(HValue * instr,PropertyAccessType access_type)3944 void HObjectAccess::SetGVNFlags(HValue *instr, PropertyAccessType access_type) {
3945 // set the appropriate GVN flags for a given load or store instruction
3946 if (access_type == STORE) {
3947 // track dominating allocations in order to eliminate write barriers
3948 instr->SetDependsOnFlag(::v8::internal::kNewSpacePromotion);
3949 instr->SetFlag(HValue::kTrackSideEffectDominators);
3950 } else {
3951 // try to GVN loads, but don't hoist above map changes
3952 instr->SetFlag(HValue::kUseGVN);
3953 instr->SetDependsOnFlag(::v8::internal::kMaps);
3954 }
3955
3956 switch (portion()) {
3957 case kArrayLengths:
3958 if (access_type == STORE) {
3959 instr->SetChangesFlag(::v8::internal::kArrayLengths);
3960 } else {
3961 instr->SetDependsOnFlag(::v8::internal::kArrayLengths);
3962 }
3963 break;
3964 case kStringLengths:
3965 if (access_type == STORE) {
3966 instr->SetChangesFlag(::v8::internal::kStringLengths);
3967 } else {
3968 instr->SetDependsOnFlag(::v8::internal::kStringLengths);
3969 }
3970 break;
3971 case kInobject:
3972 if (access_type == STORE) {
3973 instr->SetChangesFlag(::v8::internal::kInobjectFields);
3974 } else {
3975 instr->SetDependsOnFlag(::v8::internal::kInobjectFields);
3976 }
3977 break;
3978 case kDouble:
3979 if (access_type == STORE) {
3980 instr->SetChangesFlag(::v8::internal::kDoubleFields);
3981 } else {
3982 instr->SetDependsOnFlag(::v8::internal::kDoubleFields);
3983 }
3984 break;
3985 case kBackingStore:
3986 if (access_type == STORE) {
3987 instr->SetChangesFlag(::v8::internal::kBackingStoreFields);
3988 } else {
3989 instr->SetDependsOnFlag(::v8::internal::kBackingStoreFields);
3990 }
3991 break;
3992 case kElementsPointer:
3993 if (access_type == STORE) {
3994 instr->SetChangesFlag(::v8::internal::kElementsPointer);
3995 } else {
3996 instr->SetDependsOnFlag(::v8::internal::kElementsPointer);
3997 }
3998 break;
3999 case kMaps:
4000 if (access_type == STORE) {
4001 instr->SetChangesFlag(::v8::internal::kMaps);
4002 } else {
4003 instr->SetDependsOnFlag(::v8::internal::kMaps);
4004 }
4005 break;
4006 case kExternalMemory:
4007 if (access_type == STORE) {
4008 instr->SetChangesFlag(::v8::internal::kExternalMemory);
4009 } else {
4010 instr->SetDependsOnFlag(::v8::internal::kExternalMemory);
4011 }
4012 break;
4013 }
4014 }
4015
4016
operator <<(std::ostream & os,const HObjectAccess & access)4017 std::ostream& operator<<(std::ostream& os, const HObjectAccess& access) {
4018 os << ".";
4019
4020 switch (access.portion()) {
4021 case HObjectAccess::kArrayLengths:
4022 case HObjectAccess::kStringLengths:
4023 os << "%length";
4024 break;
4025 case HObjectAccess::kElementsPointer:
4026 os << "%elements";
4027 break;
4028 case HObjectAccess::kMaps:
4029 os << "%map";
4030 break;
4031 case HObjectAccess::kDouble: // fall through
4032 case HObjectAccess::kInobject:
4033 if (!access.name().is_null() && access.name()->IsString()) {
4034 os << Handle<String>::cast(access.name())->ToCString().get();
4035 }
4036 os << "[in-object]";
4037 break;
4038 case HObjectAccess::kBackingStore:
4039 if (!access.name().is_null() && access.name()->IsString()) {
4040 os << Handle<String>::cast(access.name())->ToCString().get();
4041 }
4042 os << "[backing-store]";
4043 break;
4044 case HObjectAccess::kExternalMemory:
4045 os << "[external-memory]";
4046 break;
4047 }
4048
4049 return os << "@" << access.offset();
4050 }
4051
4052 } // namespace internal
4053 } // namespace v8
4054