1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_COMPILER_NODE_MATCHERS_H_
6 #define V8_COMPILER_NODE_MATCHERS_H_
7
8 #include <cmath>
9 #include <limits>
10
11 #include "src/base/bounds.h"
12 #include "src/base/compiler-specific.h"
13 #include "src/base/numbers/double.h"
14 #include "src/codegen/external-reference.h"
15 #include "src/common/globals.h"
16 #include "src/compiler/common-operator.h"
17 #include "src/compiler/machine-operator.h"
18 #include "src/compiler/node.h"
19 #include "src/compiler/opcodes.h"
20 #include "src/compiler/operator.h"
21 #include "src/objects/heap-object.h"
22
23 namespace v8 {
24 namespace internal {
25 namespace compiler {
26
27 class JSHeapBroker;
28
29 // A pattern matcher for nodes.
30 struct NodeMatcher {
NodeMatcherNodeMatcher31 explicit NodeMatcher(Node* node) : node_(node) {}
32
nodeNodeMatcher33 Node* node() const { return node_; }
opNodeMatcher34 const Operator* op() const { return node()->op(); }
opcodeNodeMatcher35 IrOpcode::Value opcode() const { return node()->opcode(); }
36
HasPropertyNodeMatcher37 bool HasProperty(Operator::Property property) const {
38 return op()->HasProperty(property);
39 }
InputAtNodeMatcher40 Node* InputAt(int index) const { return node()->InputAt(index); }
41
EqualsNodeMatcher42 bool Equals(const Node* node) const { return node_ == node; }
43
44 bool IsComparison() const;
45
46 #define DEFINE_IS_OPCODE(Opcode, ...) \
47 bool Is##Opcode() const { return opcode() == IrOpcode::k##Opcode; }
48 ALL_OP_LIST(DEFINE_IS_OPCODE)
49 #undef DEFINE_IS_OPCODE
50
51 private:
52 Node* node_;
53 };
54
SkipValueIdentities(Node * node)55 inline Node* SkipValueIdentities(Node* node) {
56 #ifdef DEBUG
57 bool seen_fold_constant = false;
58 #endif
59 do {
60 #ifdef DEBUG
61 if (node->opcode() == IrOpcode::kFoldConstant) {
62 DCHECK(!seen_fold_constant);
63 seen_fold_constant = true;
64 }
65 #endif
66 } while (NodeProperties::IsValueIdentity(node, &node));
67 DCHECK_NOT_NULL(node);
68 return node;
69 }
70
71 // A pattern matcher for abitrary value constants.
72 //
73 // Note that value identities on the input node are skipped when matching. The
74 // resolved value may not be a parameter of the input node. The node() method
75 // returns the unmodified input node. This is by design, as reducers may wish to
76 // match value constants but delay reducing the node until a later phase. For
77 // example, binary operator reducers may opt to keep FoldConstant operands while
78 // applying a reduction that match on the constant value of the FoldConstant.
79 template <typename T, IrOpcode::Value kOpcode>
80 struct ValueMatcher : public NodeMatcher {
81 using ValueType = T;
82
ValueMatcherValueMatcher83 explicit ValueMatcher(Node* node)
84 : NodeMatcher(node), resolved_value_(), has_resolved_value_(false) {
85 node = SkipValueIdentities(node);
86 has_resolved_value_ = node->opcode() == kOpcode;
87 if (has_resolved_value_) {
88 resolved_value_ = OpParameter<T>(node->op());
89 }
90 }
91
HasResolvedValueValueMatcher92 bool HasResolvedValue() const { return has_resolved_value_; }
ResolvedValueValueMatcher93 const T& ResolvedValue() const {
94 CHECK(HasResolvedValue());
95 return resolved_value_;
96 }
97
98 private:
99 T resolved_value_;
100 bool has_resolved_value_;
101 };
102
103 template <>
ValueMatcher(Node * node)104 inline ValueMatcher<uint32_t, IrOpcode::kInt32Constant>::ValueMatcher(
105 Node* node)
106 : NodeMatcher(node), resolved_value_(), has_resolved_value_(false) {
107 node = SkipValueIdentities(node);
108 has_resolved_value_ = node->opcode() == IrOpcode::kInt32Constant;
109 if (has_resolved_value_) {
110 resolved_value_ = static_cast<uint32_t>(OpParameter<int32_t>(node->op()));
111 }
112 }
113
114 template <>
ValueMatcher(Node * node)115 inline ValueMatcher<int64_t, IrOpcode::kInt64Constant>::ValueMatcher(Node* node)
116 : NodeMatcher(node), resolved_value_(), has_resolved_value_(false) {
117 node = SkipValueIdentities(node);
118 if (node->opcode() == IrOpcode::kInt32Constant) {
119 resolved_value_ = OpParameter<int32_t>(node->op());
120 has_resolved_value_ = true;
121 } else if (node->opcode() == IrOpcode::kInt64Constant) {
122 resolved_value_ = OpParameter<int64_t>(node->op());
123 has_resolved_value_ = true;
124 }
125 }
126
127 template <>
ValueMatcher(Node * node)128 inline ValueMatcher<uint64_t, IrOpcode::kInt64Constant>::ValueMatcher(
129 Node* node)
130 : NodeMatcher(node), resolved_value_(), has_resolved_value_(false) {
131 node = SkipValueIdentities(node);
132 if (node->opcode() == IrOpcode::kInt32Constant) {
133 resolved_value_ = static_cast<uint32_t>(OpParameter<int32_t>(node->op()));
134 has_resolved_value_ = true;
135 } else if (node->opcode() == IrOpcode::kInt64Constant) {
136 resolved_value_ = static_cast<uint64_t>(OpParameter<int64_t>(node->op()));
137 has_resolved_value_ = true;
138 }
139 }
140
141 // A pattern matcher for integer constants.
142 template <typename T, IrOpcode::Value kOpcode>
143 struct IntMatcher final : public ValueMatcher<T, kOpcode> {
IntMatcherfinal144 explicit IntMatcher(Node* node) : ValueMatcher<T, kOpcode>(node) {}
145
Isfinal146 bool Is(const T& value) const {
147 return this->HasResolvedValue() && this->ResolvedValue() == value;
148 }
IsInRangefinal149 bool IsInRange(const T& low, const T& high) const {
150 return this->HasResolvedValue() &&
151 base::IsInRange(this->ResolvedValue(), low, high);
152 }
IsMultipleOffinal153 bool IsMultipleOf(T n) const {
154 return this->HasResolvedValue() && (this->ResolvedValue() % n) == 0;
155 }
IsPowerOf2final156 bool IsPowerOf2() const {
157 return this->HasResolvedValue() && this->ResolvedValue() > 0 &&
158 (this->ResolvedValue() & (this->ResolvedValue() - 1)) == 0;
159 }
IsNegativePowerOf2final160 bool IsNegativePowerOf2() const {
161 return this->HasResolvedValue() && this->ResolvedValue() < 0 &&
162 ((this->ResolvedValue() == std::numeric_limits<T>::min()) ||
163 (-this->ResolvedValue() & (-this->ResolvedValue() - 1)) == 0);
164 }
IsNegativefinal165 bool IsNegative() const {
166 return this->HasResolvedValue() && this->ResolvedValue() < 0;
167 }
168 };
169
170 using Int32Matcher = IntMatcher<int32_t, IrOpcode::kInt32Constant>;
171 using Uint32Matcher = IntMatcher<uint32_t, IrOpcode::kInt32Constant>;
172 using Int64Matcher = IntMatcher<int64_t, IrOpcode::kInt64Constant>;
173 using Uint64Matcher = IntMatcher<uint64_t, IrOpcode::kInt64Constant>;
174 using V128ConstMatcher =
175 ValueMatcher<S128ImmediateParameter, IrOpcode::kS128Const>;
176 #if V8_HOST_ARCH_32_BIT
177 using IntPtrMatcher = Int32Matcher;
178 using UintPtrMatcher = Uint32Matcher;
179 #else
180 using IntPtrMatcher = Int64Matcher;
181 using UintPtrMatcher = Uint64Matcher;
182 #endif
183
184
185 // A pattern matcher for floating point constants.
186 template <typename T, IrOpcode::Value kOpcode>
187 struct FloatMatcher final : public ValueMatcher<T, kOpcode> {
FloatMatcherfinal188 explicit FloatMatcher(Node* node) : ValueMatcher<T, kOpcode>(node) {}
189
Isfinal190 bool Is(const T& value) const {
191 return this->HasResolvedValue() && this->ResolvedValue() == value;
192 }
IsInRangefinal193 bool IsInRange(const T& low, const T& high) const {
194 return this->HasResolvedValue() && low <= this->ResolvedValue() &&
195 this->ResolvedValue() <= high;
196 }
IsMinusZerofinal197 bool IsMinusZero() const {
198 return this->Is(0.0) && std::signbit(this->ResolvedValue());
199 }
IsNegativefinal200 bool IsNegative() const {
201 return this->HasResolvedValue() && this->ResolvedValue() < 0.0;
202 }
IsNaNfinal203 bool IsNaN() const {
204 return this->HasResolvedValue() && std::isnan(this->ResolvedValue());
205 }
IsZerofinal206 bool IsZero() const {
207 return this->Is(0.0) && !std::signbit(this->ResolvedValue());
208 }
IsNormalfinal209 bool IsNormal() const {
210 return this->HasResolvedValue() && std::isnormal(this->ResolvedValue());
211 }
IsIntegerfinal212 bool IsInteger() const {
213 return this->HasResolvedValue() &&
214 std::nearbyint(this->ResolvedValue()) == this->ResolvedValue();
215 }
IsPositiveOrNegativePowerOf2final216 bool IsPositiveOrNegativePowerOf2() const {
217 if (!this->HasResolvedValue() || (this->ResolvedValue() == 0.0)) {
218 return false;
219 }
220 base::Double value = base::Double(this->ResolvedValue());
221 return !value.IsInfinite() && base::bits::IsPowerOfTwo(value.Significand());
222 }
223 };
224
225 using Float32Matcher = FloatMatcher<float, IrOpcode::kFloat32Constant>;
226 using Float64Matcher = FloatMatcher<double, IrOpcode::kFloat64Constant>;
227 using NumberMatcher = FloatMatcher<double, IrOpcode::kNumberConstant>;
228
229 // A pattern matcher for heap object constants.
230 template <IrOpcode::Value kHeapConstantOpcode>
231 struct HeapObjectMatcherImpl final
232 : public ValueMatcher<Handle<HeapObject>, kHeapConstantOpcode> {
HeapObjectMatcherImplfinal233 explicit HeapObjectMatcherImpl(Node* node)
234 : ValueMatcher<Handle<HeapObject>, kHeapConstantOpcode>(node) {}
235
Isfinal236 bool Is(Handle<HeapObject> const& value) const {
237 return this->HasResolvedValue() &&
238 this->ResolvedValue().address() == value.address();
239 }
240
Reffinal241 HeapObjectRef Ref(JSHeapBroker* broker) const {
242 // TODO(jgruber,chromium:1209798): Using kAssumeMemoryFence works around
243 // the fact that the graph stores handles (and not refs). The assumption is
244 // that any handle inserted into the graph is safe to read; but we don't
245 // preserve the reason why it is safe to read. Thus we must over-approximate
246 // here and assume the existence of a memory fence. In the future, we should
247 // consider having the graph store ObjectRefs or ObjectData pointer instead,
248 // which would make new ref construction here unnecessary.
249 return MakeRefAssumeMemoryFence(broker, this->ResolvedValue());
250 }
251 };
252
253 using HeapObjectMatcher = HeapObjectMatcherImpl<IrOpcode::kHeapConstant>;
254 using CompressedHeapObjectMatcher =
255 HeapObjectMatcherImpl<IrOpcode::kCompressedHeapConstant>;
256
257 // A pattern matcher for external reference constants.
258 struct ExternalReferenceMatcher final
259 : public ValueMatcher<ExternalReference, IrOpcode::kExternalConstant> {
ExternalReferenceMatcherfinal260 explicit ExternalReferenceMatcher(Node* node)
261 : ValueMatcher<ExternalReference, IrOpcode::kExternalConstant>(node) {}
Isfinal262 bool Is(const ExternalReference& value) const {
263 return this->HasResolvedValue() && this->ResolvedValue() == value;
264 }
265 };
266
267
268 // For shorter pattern matching code, this struct matches the inputs to
269 // machine-level load operations.
270 template <typename Object>
271 struct LoadMatcher : public NodeMatcher {
LoadMatcherLoadMatcher272 explicit LoadMatcher(Node* node)
273 : NodeMatcher(node), object_(InputAt(0)), index_(InputAt(1)) {}
274
275 using ObjectMatcher = Object;
276
objectLoadMatcher277 Object const& object() const { return object_; }
indexLoadMatcher278 IntPtrMatcher const& index() const { return index_; }
279
280 private:
281 Object const object_;
282 IntPtrMatcher const index_;
283 };
284
285
286 // For shorter pattern matching code, this struct matches both the left and
287 // right hand sides of a binary operation and can put constants on the right
288 // if they appear on the left hand side of a commutative operation.
289 template <typename Left, typename Right>
290 struct BinopMatcher : public NodeMatcher {
BinopMatcherBinopMatcher291 explicit BinopMatcher(Node* node)
292 : NodeMatcher(node), left_(InputAt(0)), right_(InputAt(1)) {
293 if (HasProperty(Operator::kCommutative)) PutConstantOnRight();
294 }
BinopMatcherBinopMatcher295 BinopMatcher(Node* node, bool allow_input_swap)
296 : NodeMatcher(node), left_(InputAt(0)), right_(InputAt(1)) {
297 if (allow_input_swap) PutConstantOnRight();
298 }
299
300 using LeftMatcher = Left;
301 using RightMatcher = Right;
302
leftBinopMatcher303 const Left& left() const { return left_; }
rightBinopMatcher304 const Right& right() const { return right_; }
305
IsFoldableBinopMatcher306 bool IsFoldable() const {
307 return left().HasResolvedValue() && right().HasResolvedValue();
308 }
LeftEqualsRightBinopMatcher309 bool LeftEqualsRight() const { return left().node() == right().node(); }
310
OwnsInputBinopMatcher311 bool OwnsInput(Node* input) {
312 for (Node* use : input->uses()) {
313 if (use != node()) {
314 return false;
315 }
316 }
317 return true;
318 }
319
320 protected:
SwapInputsBinopMatcher321 void SwapInputs() {
322 std::swap(left_, right_);
323 // TODO(turbofan): This modification should notify the reducers using
324 // BinopMatcher. Alternatively, all reducers (especially value numbering)
325 // could ignore the ordering for commutative binops.
326 node()->ReplaceInput(0, left().node());
327 node()->ReplaceInput(1, right().node());
328 }
329
330 private:
PutConstantOnRightBinopMatcher331 void PutConstantOnRight() {
332 if (left().HasResolvedValue() && !right().HasResolvedValue()) {
333 SwapInputs();
334 }
335 }
336
337 Left left_;
338 Right right_;
339 };
340
341 using Int32BinopMatcher = BinopMatcher<Int32Matcher, Int32Matcher>;
342 using Uint32BinopMatcher = BinopMatcher<Uint32Matcher, Uint32Matcher>;
343 using Int64BinopMatcher = BinopMatcher<Int64Matcher, Int64Matcher>;
344 using Uint64BinopMatcher = BinopMatcher<Uint64Matcher, Uint64Matcher>;
345 using IntPtrBinopMatcher = BinopMatcher<IntPtrMatcher, IntPtrMatcher>;
346 using UintPtrBinopMatcher = BinopMatcher<UintPtrMatcher, UintPtrMatcher>;
347 using Float32BinopMatcher = BinopMatcher<Float32Matcher, Float32Matcher>;
348 using Float64BinopMatcher = BinopMatcher<Float64Matcher, Float64Matcher>;
349 using NumberBinopMatcher = BinopMatcher<NumberMatcher, NumberMatcher>;
350 using HeapObjectBinopMatcher =
351 BinopMatcher<HeapObjectMatcher, HeapObjectMatcher>;
352 using CompressedHeapObjectBinopMatcher =
353 BinopMatcher<CompressedHeapObjectMatcher, CompressedHeapObjectMatcher>;
354
355 template <class BinopMatcher, IrOpcode::Value kMulOpcode,
356 IrOpcode::Value kShiftOpcode>
357 struct ScaleMatcher {
358 explicit ScaleMatcher(Node* node, bool allow_power_of_two_plus_one = false)
359 : scale_(-1), power_of_two_plus_one_(false) {
360 if (node->InputCount() < 2) return;
361 BinopMatcher m(node);
362 if (node->opcode() == kShiftOpcode) {
363 if (m.right().HasResolvedValue()) {
364 typename BinopMatcher::RightMatcher::ValueType value =
365 m.right().ResolvedValue();
366 if (value >= 0 && value <= 3) {
367 scale_ = static_cast<int>(value);
368 }
369 }
370 } else if (node->opcode() == kMulOpcode) {
371 if (m.right().HasResolvedValue()) {
372 typename BinopMatcher::RightMatcher::ValueType value =
373 m.right().ResolvedValue();
374 if (value == 1) {
375 scale_ = 0;
376 } else if (value == 2) {
377 scale_ = 1;
378 } else if (value == 4) {
379 scale_ = 2;
380 } else if (value == 8) {
381 scale_ = 3;
382 } else if (allow_power_of_two_plus_one) {
383 if (value == 3) {
384 scale_ = 1;
385 power_of_two_plus_one_ = true;
386 } else if (value == 5) {
387 scale_ = 2;
388 power_of_two_plus_one_ = true;
389 } else if (value == 9) {
390 scale_ = 3;
391 power_of_two_plus_one_ = true;
392 }
393 }
394 }
395 }
396 }
397
matchesScaleMatcher398 bool matches() const { return scale_ != -1; }
scaleScaleMatcher399 int scale() const { return scale_; }
power_of_two_plus_oneScaleMatcher400 bool power_of_two_plus_one() const { return power_of_two_plus_one_; }
401
402 private:
403 int scale_;
404 bool power_of_two_plus_one_;
405 };
406
407 using Int32ScaleMatcher =
408 ScaleMatcher<Int32BinopMatcher, IrOpcode::kInt32Mul, IrOpcode::kWord32Shl>;
409 using Int64ScaleMatcher =
410 ScaleMatcher<Int64BinopMatcher, IrOpcode::kInt64Mul, IrOpcode::kWord64Shl>;
411
412 template <class BinopMatcher, IrOpcode::Value AddOpcode,
413 IrOpcode::Value SubOpcode, IrOpcode::Value kMulOpcode,
414 IrOpcode::Value kShiftOpcode>
415 struct AddMatcher : public BinopMatcher {
416 static const IrOpcode::Value kAddOpcode = AddOpcode;
417 static const IrOpcode::Value kSubOpcode = SubOpcode;
418 using Matcher = ScaleMatcher<BinopMatcher, kMulOpcode, kShiftOpcode>;
419
AddMatcherAddMatcher420 AddMatcher(Node* node, bool allow_input_swap)
421 : BinopMatcher(node, allow_input_swap),
422 scale_(-1),
423 power_of_two_plus_one_(false) {
424 Initialize(node, allow_input_swap);
425 }
AddMatcherAddMatcher426 explicit AddMatcher(Node* node)
427 : BinopMatcher(node, node->op()->HasProperty(Operator::kCommutative)),
428 scale_(-1),
429 power_of_two_plus_one_(false) {
430 Initialize(node, node->op()->HasProperty(Operator::kCommutative));
431 }
432
HasIndexInputAddMatcher433 bool HasIndexInput() const { return scale_ != -1; }
IndexInputAddMatcher434 Node* IndexInput() const {
435 DCHECK(HasIndexInput());
436 return this->left().node()->InputAt(0);
437 }
scaleAddMatcher438 int scale() const {
439 DCHECK(HasIndexInput());
440 return scale_;
441 }
power_of_two_plus_oneAddMatcher442 bool power_of_two_plus_one() const { return power_of_two_plus_one_; }
443
444 private:
InitializeAddMatcher445 void Initialize(Node* node, bool allow_input_swap) {
446 Matcher left_matcher(this->left().node(), true);
447 if (left_matcher.matches()) {
448 scale_ = left_matcher.scale();
449 power_of_two_plus_one_ = left_matcher.power_of_two_plus_one();
450 return;
451 }
452
453 if (!allow_input_swap) {
454 return;
455 }
456
457 Matcher right_matcher(this->right().node(), true);
458 if (right_matcher.matches()) {
459 scale_ = right_matcher.scale();
460 power_of_two_plus_one_ = right_matcher.power_of_two_plus_one();
461 this->SwapInputs();
462 return;
463 }
464
465 if ((this->left().opcode() != kSubOpcode &&
466 this->left().opcode() != kAddOpcode) &&
467 (this->right().opcode() == kAddOpcode ||
468 this->right().opcode() == kSubOpcode)) {
469 this->SwapInputs();
470 }
471 }
472
473 int scale_;
474 bool power_of_two_plus_one_;
475 };
476
477 using Int32AddMatcher =
478 AddMatcher<Int32BinopMatcher, IrOpcode::kInt32Add, IrOpcode::kInt32Sub,
479 IrOpcode::kInt32Mul, IrOpcode::kWord32Shl>;
480 using Int64AddMatcher =
481 AddMatcher<Int64BinopMatcher, IrOpcode::kInt64Add, IrOpcode::kInt64Sub,
482 IrOpcode::kInt64Mul, IrOpcode::kWord64Shl>;
483
484 enum DisplacementMode { kPositiveDisplacement, kNegativeDisplacement };
485
486 enum class AddressOption : uint8_t {
487 kAllowNone = 0u,
488 kAllowInputSwap = 1u << 0,
489 kAllowScale = 1u << 1,
490 kAllowAll = kAllowInputSwap | kAllowScale
491 };
492
493 using AddressOptions = base::Flags<AddressOption, uint8_t>;
494 DEFINE_OPERATORS_FOR_FLAGS(AddressOptions)
495
496 template <class AddMatcher>
497 struct BaseWithIndexAndDisplacementMatcher {
BaseWithIndexAndDisplacementMatcherBaseWithIndexAndDisplacementMatcher498 BaseWithIndexAndDisplacementMatcher(Node* node, AddressOptions options)
499 : matches_(false),
500 index_(nullptr),
501 scale_(0),
502 base_(nullptr),
503 displacement_(nullptr),
504 displacement_mode_(kPositiveDisplacement) {
505 Initialize(node, options);
506 }
507
BaseWithIndexAndDisplacementMatcherBaseWithIndexAndDisplacementMatcher508 explicit BaseWithIndexAndDisplacementMatcher(Node* node)
509 : matches_(false),
510 index_(nullptr),
511 scale_(0),
512 base_(nullptr),
513 displacement_(nullptr),
514 displacement_mode_(kPositiveDisplacement) {
515 Initialize(node, AddressOption::kAllowScale |
516 (node->op()->HasProperty(Operator::kCommutative)
517 ? AddressOption::kAllowInputSwap
518 : AddressOption::kAllowNone));
519 }
520
matchesBaseWithIndexAndDisplacementMatcher521 bool matches() const { return matches_; }
indexBaseWithIndexAndDisplacementMatcher522 Node* index() const { return index_; }
scaleBaseWithIndexAndDisplacementMatcher523 int scale() const { return scale_; }
baseBaseWithIndexAndDisplacementMatcher524 Node* base() const { return base_; }
displacementBaseWithIndexAndDisplacementMatcher525 Node* displacement() const { return displacement_; }
displacement_modeBaseWithIndexAndDisplacementMatcher526 DisplacementMode displacement_mode() const { return displacement_mode_; }
527
528 private:
529 bool matches_;
530 Node* index_;
531 int scale_;
532 Node* base_;
533 Node* displacement_;
534 DisplacementMode displacement_mode_;
535
InitializeBaseWithIndexAndDisplacementMatcher536 void Initialize(Node* node, AddressOptions options) {
537 // The BaseWithIndexAndDisplacementMatcher canonicalizes the order of
538 // displacements and scale factors that are used as inputs, so instead of
539 // enumerating all possible patterns by brute force, checking for node
540 // clusters using the following templates in the following order suffices to
541 // find all of the interesting cases (S = index * scale, B = base input, D =
542 // displacement input):
543 // (S + (B + D))
544 // (S + (B + B))
545 // (S + D)
546 // (S + B)
547 // ((S + D) + B)
548 // ((S + B) + D)
549 // ((B + D) + B)
550 // ((B + B) + D)
551 // (B + D)
552 // (B + B)
553 if (node->InputCount() < 2) return;
554 AddMatcher m(node, options & AddressOption::kAllowInputSwap);
555 Node* left = m.left().node();
556 Node* right = m.right().node();
557 Node* displacement = nullptr;
558 Node* base = nullptr;
559 Node* index = nullptr;
560 Node* scale_expression = nullptr;
561 bool power_of_two_plus_one = false;
562 DisplacementMode displacement_mode = kPositiveDisplacement;
563 int scale = 0;
564 if (m.HasIndexInput() && OwnedByAddressingOperand(left)) {
565 index = m.IndexInput();
566 scale = m.scale();
567 scale_expression = left;
568 power_of_two_plus_one = m.power_of_two_plus_one();
569 bool match_found = false;
570 if (right->opcode() == AddMatcher::kSubOpcode &&
571 OwnedByAddressingOperand(right)) {
572 AddMatcher right_matcher(right);
573 if (right_matcher.right().HasResolvedValue()) {
574 // (S + (B - D))
575 base = right_matcher.left().node();
576 displacement = right_matcher.right().node();
577 displacement_mode = kNegativeDisplacement;
578 match_found = true;
579 }
580 }
581 if (!match_found) {
582 if (right->opcode() == AddMatcher::kAddOpcode &&
583 OwnedByAddressingOperand(right)) {
584 AddMatcher right_matcher(right);
585 if (right_matcher.right().HasResolvedValue()) {
586 // (S + (B + D))
587 base = right_matcher.left().node();
588 displacement = right_matcher.right().node();
589 } else {
590 // (S + (B + B))
591 base = right;
592 }
593 } else if (m.right().HasResolvedValue()) {
594 // (S + D)
595 displacement = right;
596 } else {
597 // (S + B)
598 base = right;
599 }
600 }
601 } else {
602 bool match_found = false;
603 if (left->opcode() == AddMatcher::kSubOpcode &&
604 OwnedByAddressingOperand(left)) {
605 AddMatcher left_matcher(left);
606 Node* left_left = left_matcher.left().node();
607 Node* left_right = left_matcher.right().node();
608 if (left_matcher.right().HasResolvedValue()) {
609 if (left_matcher.HasIndexInput() &&
610 OwnedByAddressingOperand(left_left)) {
611 // ((S - D) + B)
612 index = left_matcher.IndexInput();
613 scale = left_matcher.scale();
614 scale_expression = left_left;
615 power_of_two_plus_one = left_matcher.power_of_two_plus_one();
616 displacement = left_right;
617 displacement_mode = kNegativeDisplacement;
618 base = right;
619 } else {
620 // ((B - D) + B)
621 index = left_left;
622 displacement = left_right;
623 displacement_mode = kNegativeDisplacement;
624 base = right;
625 }
626 match_found = true;
627 }
628 }
629 if (!match_found) {
630 if (left->opcode() == AddMatcher::kAddOpcode &&
631 OwnedByAddressingOperand(left)) {
632 AddMatcher left_matcher(left);
633 Node* left_left = left_matcher.left().node();
634 Node* left_right = left_matcher.right().node();
635 if (left_matcher.HasIndexInput() &&
636 OwnedByAddressingOperand(left_left)) {
637 if (left_matcher.right().HasResolvedValue()) {
638 // ((S + D) + B)
639 index = left_matcher.IndexInput();
640 scale = left_matcher.scale();
641 scale_expression = left_left;
642 power_of_two_plus_one = left_matcher.power_of_two_plus_one();
643 displacement = left_right;
644 base = right;
645 } else if (m.right().HasResolvedValue()) {
646 if (left->OwnedBy(node)) {
647 // ((S + B) + D)
648 index = left_matcher.IndexInput();
649 scale = left_matcher.scale();
650 scale_expression = left_left;
651 power_of_two_plus_one = left_matcher.power_of_two_plus_one();
652 base = left_right;
653 displacement = right;
654 } else {
655 // (B + D)
656 base = left;
657 displacement = right;
658 }
659 } else {
660 // (B + B)
661 index = left;
662 base = right;
663 }
664 } else {
665 if (left_matcher.right().HasResolvedValue()) {
666 // ((B + D) + B)
667 index = left_left;
668 displacement = left_right;
669 base = right;
670 } else if (m.right().HasResolvedValue()) {
671 if (left->OwnedBy(node)) {
672 // ((B + B) + D)
673 index = left_left;
674 base = left_right;
675 displacement = right;
676 } else {
677 // (B + D)
678 base = left;
679 displacement = right;
680 }
681 } else {
682 // (B + B)
683 index = left;
684 base = right;
685 }
686 }
687 } else {
688 if (m.right().HasResolvedValue()) {
689 // (B + D)
690 base = left;
691 displacement = right;
692 } else {
693 // (B + B)
694 base = left;
695 index = right;
696 }
697 }
698 }
699 }
700 int64_t value = 0;
701 if (displacement != nullptr) {
702 switch (displacement->opcode()) {
703 case IrOpcode::kInt32Constant: {
704 value = OpParameter<int32_t>(displacement->op());
705 break;
706 }
707 case IrOpcode::kInt64Constant: {
708 value = OpParameter<int64_t>(displacement->op());
709 break;
710 }
711 default:
712 UNREACHABLE();
713 break;
714 }
715 if (value == 0) {
716 displacement = nullptr;
717 }
718 }
719 if (power_of_two_plus_one) {
720 if (base != nullptr) {
721 // If the scale requires explicitly using the index as the base, but a
722 // base is already part of the match, then the (1 << N + 1) scale factor
723 // can't be folded into the match and the entire index * scale
724 // calculation must be computed separately.
725 index = scale_expression;
726 scale = 0;
727 } else {
728 base = index;
729 }
730 }
731 if (!(options & AddressOption::kAllowScale) && scale != 0) {
732 index = scale_expression;
733 scale = 0;
734 }
735 base_ = base;
736 displacement_ = displacement;
737 displacement_mode_ = displacement_mode;
738 index_ = index;
739 scale_ = scale;
740 matches_ = true;
741 }
742
743 // Warning: When {node} is used by a Add/Sub instruction, this function does
744 // not guarantee the Add/Sub will be part of a addressing operand.
OwnedByAddressingOperandBaseWithIndexAndDisplacementMatcher745 static bool OwnedByAddressingOperand(Node* node) {
746 for (auto use : node->use_edges()) {
747 Node* from = use.from();
748 switch (from->opcode()) {
749 case IrOpcode::kLoad:
750 case IrOpcode::kLoadImmutable:
751 case IrOpcode::kProtectedLoad:
752 case IrOpcode::kInt32Add:
753 case IrOpcode::kInt64Add:
754 // Skip addressing uses.
755 break;
756 case IrOpcode::kInt32Sub:
757 // If the subtrahend is not a constant, it is not an addressing use.
758 if (from->InputAt(1)->opcode() != IrOpcode::kInt32Constant)
759 return false;
760 break;
761 case IrOpcode::kInt64Sub:
762 // If the subtrahend is not a constant, it is not an addressing use.
763 if (from->InputAt(1)->opcode() != IrOpcode::kInt64Constant)
764 return false;
765 break;
766 case IrOpcode::kStore:
767 case IrOpcode::kProtectedStore:
768 // If the stored value is this node, it is not an addressing use.
769 if (from->InputAt(2) == node) return false;
770 // Otherwise it is used as an address and skipped.
771 break;
772 default:
773 // Non-addressing use found.
774 return false;
775 }
776 }
777 return true;
778 }
779 };
780
781 using BaseWithIndexAndDisplacement32Matcher =
782 BaseWithIndexAndDisplacementMatcher<Int32AddMatcher>;
783 using BaseWithIndexAndDisplacement64Matcher =
784 BaseWithIndexAndDisplacementMatcher<Int64AddMatcher>;
785
786 struct V8_EXPORT_PRIVATE BranchMatcher : public NON_EXPORTED_BASE(NodeMatcher) {
787 explicit BranchMatcher(Node* branch);
788
MatchedBranchMatcher789 bool Matched() const { return if_true_ && if_false_; }
790
BranchBranchMatcher791 Node* Branch() const { return node(); }
IfTrueBranchMatcher792 Node* IfTrue() const { return if_true_; }
IfFalseBranchMatcher793 Node* IfFalse() const { return if_false_; }
794
795 private:
796 Node* if_true_;
797 Node* if_false_;
798 };
799
800 struct V8_EXPORT_PRIVATE DiamondMatcher
801 : public NON_EXPORTED_BASE(NodeMatcher) {
802 explicit DiamondMatcher(Node* merge);
803
MatchedDiamondMatcher804 bool Matched() const { return branch_; }
IfProjectionsAreOwnedDiamondMatcher805 bool IfProjectionsAreOwned() const {
806 return if_true_->OwnedBy(node()) && if_false_->OwnedBy(node());
807 }
808
BranchDiamondMatcher809 Node* Branch() const { return branch_; }
IfTrueDiamondMatcher810 Node* IfTrue() const { return if_true_; }
IfFalseDiamondMatcher811 Node* IfFalse() const { return if_false_; }
MergeDiamondMatcher812 Node* Merge() const { return node(); }
813
TrueInputOfDiamondMatcher814 Node* TrueInputOf(Node* phi) const {
815 DCHECK(IrOpcode::IsPhiOpcode(phi->opcode()));
816 DCHECK_EQ(3, phi->InputCount());
817 DCHECK_EQ(Merge(), phi->InputAt(2));
818 return phi->InputAt(if_true_ == Merge()->InputAt(0) ? 0 : 1);
819 }
820
FalseInputOfDiamondMatcher821 Node* FalseInputOf(Node* phi) const {
822 DCHECK(IrOpcode::IsPhiOpcode(phi->opcode()));
823 DCHECK_EQ(3, phi->InputCount());
824 DCHECK_EQ(Merge(), phi->InputAt(2));
825 return phi->InputAt(if_true_ == Merge()->InputAt(0) ? 1 : 0);
826 }
827
828 private:
829 Node* branch_;
830 Node* if_true_;
831 Node* if_false_;
832 };
833
834 struct LoadTransformMatcher
835 : ValueMatcher<LoadTransformParameters, IrOpcode::kLoadTransform> {
LoadTransformMatcherLoadTransformMatcher836 explicit LoadTransformMatcher(Node* node) : ValueMatcher(node) {}
IsLoadTransformMatcher837 bool Is(LoadTransformation t) {
838 return HasResolvedValue() && ResolvedValue().transformation == t;
839 }
840 };
841
842 } // namespace compiler
843 } // namespace internal
844 } // namespace v8
845
846 #endif // V8_COMPILER_NODE_MATCHERS_H_
847