• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "test/unittests/interpreter/interpreter-assembler-unittest.h"
6 
7 #include "src/code-factory.h"
8 #include "src/compiler/graph.h"
9 #include "src/compiler/node.h"
10 #include "src/interface-descriptors.h"
11 #include "src/isolate.h"
12 #include "test/unittests/compiler/compiler-test-utils.h"
13 #include "test/unittests/compiler/node-test-utils.h"
14 
15 using ::testing::_;
16 
17 namespace v8 {
18 namespace internal {
19 
20 using namespace compiler;
21 
22 namespace interpreter {
23 
24 const interpreter::Bytecode kBytecodes[] = {
25 #define DEFINE_BYTECODE(Name, ...) interpreter::Bytecode::k##Name,
26     BYTECODE_LIST(DEFINE_BYTECODE)
27 #undef DEFINE_BYTECODE
28 };
29 
IsIntPtrConstant(const intptr_t value)30 Matcher<Node*> IsIntPtrConstant(const intptr_t value) {
31   return kPointerSize == 8 ? IsInt64Constant(static_cast<int64_t>(value))
32                            : IsInt32Constant(static_cast<int32_t>(value));
33 }
34 
IsIntPtrAdd(const Matcher<Node * > & lhs_matcher,const Matcher<Node * > & rhs_matcher)35 Matcher<Node*> IsIntPtrAdd(const Matcher<Node*>& lhs_matcher,
36                            const Matcher<Node*>& rhs_matcher) {
37   return kPointerSize == 8 ? IsInt64Add(lhs_matcher, rhs_matcher)
38                            : IsInt32Add(lhs_matcher, rhs_matcher);
39 }
40 
IsIntPtrSub(const Matcher<Node * > & lhs_matcher,const Matcher<Node * > & rhs_matcher)41 Matcher<Node*> IsIntPtrSub(const Matcher<Node*>& lhs_matcher,
42                            const Matcher<Node*>& rhs_matcher) {
43   return kPointerSize == 8 ? IsInt64Sub(lhs_matcher, rhs_matcher)
44                            : IsInt32Sub(lhs_matcher, rhs_matcher);
45 }
46 
IsWordShl(const Matcher<Node * > & lhs_matcher,const Matcher<Node * > & rhs_matcher)47 Matcher<Node*> IsWordShl(const Matcher<Node*>& lhs_matcher,
48                          const Matcher<Node*>& rhs_matcher) {
49   return kPointerSize == 8 ? IsWord64Shl(lhs_matcher, rhs_matcher)
50                            : IsWord32Shl(lhs_matcher, rhs_matcher);
51 }
52 
IsWordSar(const Matcher<Node * > & lhs_matcher,const Matcher<Node * > & rhs_matcher)53 Matcher<Node*> IsWordSar(const Matcher<Node*>& lhs_matcher,
54                          const Matcher<Node*>& rhs_matcher) {
55   return kPointerSize == 8 ? IsWord64Sar(lhs_matcher, rhs_matcher)
56                            : IsWord32Sar(lhs_matcher, rhs_matcher);
57 }
58 
IsWordOr(const Matcher<Node * > & lhs_matcher,const Matcher<Node * > & rhs_matcher)59 Matcher<Node*> IsWordOr(const Matcher<Node*>& lhs_matcher,
60                         const Matcher<Node*>& rhs_matcher) {
61   return kPointerSize == 8 ? IsWord64Or(lhs_matcher, rhs_matcher)
62                            : IsWord32Or(lhs_matcher, rhs_matcher);
63 }
64 
65 InterpreterAssemblerTest::InterpreterAssemblerForTest::
~InterpreterAssemblerForTest()66     ~InterpreterAssemblerForTest() {
67   // Tests don't necessarily read and write accumulator but
68   // InterpreterAssembler checks accumulator uses.
69   if (Bytecodes::ReadsAccumulator(bytecode())) {
70     GetAccumulator();
71   }
72   if (Bytecodes::WritesAccumulator(bytecode())) {
73     SetAccumulator(nullptr);
74   }
75 }
76 
IsLoad(const Matcher<LoadRepresentation> & rep_matcher,const Matcher<Node * > & base_matcher,const Matcher<Node * > & index_matcher)77 Matcher<Node*> InterpreterAssemblerTest::InterpreterAssemblerForTest::IsLoad(
78     const Matcher<LoadRepresentation>& rep_matcher,
79     const Matcher<Node*>& base_matcher, const Matcher<Node*>& index_matcher) {
80   return ::i::compiler::IsLoad(rep_matcher, base_matcher, index_matcher, _, _);
81 }
82 
IsStore(const Matcher<StoreRepresentation> & rep_matcher,const Matcher<Node * > & base_matcher,const Matcher<Node * > & index_matcher,const Matcher<Node * > & value_matcher)83 Matcher<Node*> InterpreterAssemblerTest::InterpreterAssemblerForTest::IsStore(
84     const Matcher<StoreRepresentation>& rep_matcher,
85     const Matcher<Node*>& base_matcher, const Matcher<Node*>& index_matcher,
86     const Matcher<Node*>& value_matcher) {
87   return ::i::compiler::IsStore(rep_matcher, base_matcher, index_matcher,
88                                 value_matcher, _, _);
89 }
90 
91 Matcher<Node*>
IsUnsignedByteOperand(int offset)92 InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedByteOperand(
93     int offset) {
94   return IsLoad(
95       MachineType::Uint8(),
96       IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
97       IsIntPtrAdd(
98           IsParameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
99           IsIntPtrConstant(offset)));
100 }
101 
102 Matcher<Node*>
IsSignedByteOperand(int offset)103 InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedByteOperand(
104     int offset) {
105   Matcher<Node*> load_matcher = IsLoad(
106       MachineType::Int8(),
107       IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
108       IsIntPtrAdd(
109           IsParameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
110           IsIntPtrConstant(offset)));
111   if (kPointerSize == 8) {
112     load_matcher = IsChangeInt32ToInt64(load_matcher);
113   }
114   return load_matcher;
115 }
116 
117 Matcher<Node*>
IsUnsignedShortOperand(int offset)118 InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedShortOperand(
119     int offset) {
120   if (TargetSupportsUnalignedAccess()) {
121     return IsLoad(
122         MachineType::Uint16(),
123         IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
124         IsIntPtrAdd(
125             IsParameter(
126                 InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
127             IsIntPtrConstant(offset)));
128   } else {
129 #if V8_TARGET_LITTLE_ENDIAN
130     const int kStep = -1;
131     const int kMsbOffset = 1;
132 #elif V8_TARGET_BIG_ENDIAN
133     const int kStep = 1;
134     const int kMsbOffset = 0;
135 #else
136 #error "Unknown Architecture"
137 #endif
138     Matcher<Node*> bytes[2];
139     for (int i = 0; i < static_cast<int>(arraysize(bytes)); i++) {
140       bytes[i] = IsLoad(
141           MachineType::Uint8(),
142           IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
143           IsIntPtrAdd(
144               IsParameter(
145                   InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
146               IsIntPtrConstant(offset + kMsbOffset + kStep * i)));
147     }
148     return IsWord32Or(IsWord32Shl(bytes[0], IsInt32Constant(kBitsPerByte)),
149                       bytes[1]);
150   }
151 }
152 
153 Matcher<Node*>
IsSignedShortOperand(int offset)154 InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedShortOperand(
155     int offset) {
156   Matcher<Node*> load_matcher;
157   if (TargetSupportsUnalignedAccess()) {
158     load_matcher = IsLoad(
159         MachineType::Int16(),
160         IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
161         IsIntPtrAdd(
162             IsParameter(
163                 InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
164             IsIntPtrConstant(offset)));
165   } else {
166 #if V8_TARGET_LITTLE_ENDIAN
167     const int kStep = -1;
168     const int kMsbOffset = 1;
169 #elif V8_TARGET_BIG_ENDIAN
170     const int kStep = 1;
171     const int kMsbOffset = 0;
172 #else
173 #error "Unknown Architecture"
174 #endif
175     Matcher<Node*> bytes[2];
176     for (int i = 0; i < static_cast<int>(arraysize(bytes)); i++) {
177       bytes[i] = IsLoad(
178           (i == 0) ? MachineType::Int8() : MachineType::Uint8(),
179           IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
180           IsIntPtrAdd(
181               IsParameter(
182                   InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
183               IsIntPtrConstant(offset + kMsbOffset + kStep * i)));
184     }
185     load_matcher = IsWord32Or(
186         IsWord32Shl(bytes[0], IsInt32Constant(kBitsPerByte)), bytes[1]);
187   }
188 
189   if (kPointerSize == 8) {
190     load_matcher = IsChangeInt32ToInt64(load_matcher);
191   }
192   return load_matcher;
193 }
194 
195 Matcher<Node*>
IsUnsignedQuadOperand(int offset)196 InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedQuadOperand(
197     int offset) {
198   if (TargetSupportsUnalignedAccess()) {
199     return IsLoad(
200         MachineType::Uint32(),
201         IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
202         IsIntPtrAdd(
203             IsParameter(
204                 InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
205             IsIntPtrConstant(offset)));
206   } else {
207 #if V8_TARGET_LITTLE_ENDIAN
208     const int kStep = -1;
209     const int kMsbOffset = 3;
210 #elif V8_TARGET_BIG_ENDIAN
211     const int kStep = 1;
212     const int kMsbOffset = 0;
213 #else
214 #error "Unknown Architecture"
215 #endif
216     Matcher<Node*> bytes[4];
217     for (int i = 0; i < static_cast<int>(arraysize(bytes)); i++) {
218       bytes[i] = IsLoad(
219           MachineType::Uint8(),
220           IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
221           IsIntPtrAdd(
222               IsParameter(
223                   InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
224               IsIntPtrConstant(offset + kMsbOffset + kStep * i)));
225     }
226     return IsWord32Or(
227         IsWord32Shl(bytes[0], IsInt32Constant(3 * kBitsPerByte)),
228         IsWord32Or(
229             IsWord32Shl(bytes[1], IsInt32Constant(2 * kBitsPerByte)),
230             IsWord32Or(IsWord32Shl(bytes[2], IsInt32Constant(1 * kBitsPerByte)),
231                        bytes[3])));
232   }
233 }
234 
235 Matcher<Node*>
IsSignedQuadOperand(int offset)236 InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedQuadOperand(
237     int offset) {
238   Matcher<Node*> load_matcher;
239   if (TargetSupportsUnalignedAccess()) {
240     load_matcher = IsLoad(
241         MachineType::Int32(),
242         IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
243         IsIntPtrAdd(
244             IsParameter(
245                 InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
246             IsIntPtrConstant(offset)));
247   } else {
248 #if V8_TARGET_LITTLE_ENDIAN
249     const int kStep = -1;
250     int kMsbOffset = 3;
251 #elif V8_TARGET_BIG_ENDIAN
252     const int kStep = 1;
253     int kMsbOffset = 0;
254 #else
255 #error "Unknown Architecture"
256 #endif
257     Matcher<Node*> bytes[4];
258     for (int i = 0; i < static_cast<int>(arraysize(bytes)); i++) {
259       bytes[i] = IsLoad(
260           (i == 0) ? MachineType::Int8() : MachineType::Uint8(),
261           IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
262           IsIntPtrAdd(
263               IsParameter(
264                   InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
265               IsIntPtrConstant(offset + kMsbOffset + kStep * i)));
266     }
267     load_matcher = IsWord32Or(
268         IsWord32Shl(bytes[0], IsInt32Constant(3 * kBitsPerByte)),
269         IsWord32Or(
270             IsWord32Shl(bytes[1], IsInt32Constant(2 * kBitsPerByte)),
271             IsWord32Or(IsWord32Shl(bytes[2], IsInt32Constant(1 * kBitsPerByte)),
272                        bytes[3])));
273   }
274 
275   if (kPointerSize == 8) {
276     load_matcher = IsChangeInt32ToInt64(load_matcher);
277   }
278   return load_matcher;
279 }
280 
281 Matcher<Node*>
IsSignedOperand(int offset,OperandSize operand_size)282 InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedOperand(
283     int offset, OperandSize operand_size) {
284   switch (operand_size) {
285     case OperandSize::kByte:
286       return IsSignedByteOperand(offset);
287     case OperandSize::kShort:
288       return IsSignedShortOperand(offset);
289     case OperandSize::kQuad:
290       return IsSignedQuadOperand(offset);
291     case OperandSize::kNone:
292       UNREACHABLE();
293   }
294   return nullptr;
295 }
296 
297 Matcher<Node*>
IsUnsignedOperand(int offset,OperandSize operand_size)298 InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedOperand(
299     int offset, OperandSize operand_size) {
300   switch (operand_size) {
301     case OperandSize::kByte:
302       return IsUnsignedByteOperand(offset);
303     case OperandSize::kShort:
304       return IsUnsignedShortOperand(offset);
305     case OperandSize::kQuad:
306       return IsUnsignedQuadOperand(offset);
307     case OperandSize::kNone:
308       UNREACHABLE();
309   }
310   return nullptr;
311 }
312 
TARGET_TEST_F(InterpreterAssemblerTest,Dispatch)313 TARGET_TEST_F(InterpreterAssemblerTest, Dispatch) {
314   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
315     InterpreterAssemblerForTest m(this, bytecode);
316     Node* tail_call_node = m.Dispatch();
317 
318     OperandScale operand_scale = OperandScale::kSingle;
319     Matcher<Node*> next_bytecode_offset_matcher = IsIntPtrAdd(
320         IsParameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
321         IsIntPtrConstant(
322             interpreter::Bytecodes::Size(bytecode, operand_scale)));
323     Matcher<Node*> target_bytecode_matcher = m.IsLoad(
324         MachineType::Uint8(),
325         IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
326         next_bytecode_offset_matcher);
327     if (kPointerSize == 8) {
328       target_bytecode_matcher = IsChangeUint32ToUint64(target_bytecode_matcher);
329     }
330     Matcher<Node*> code_target_matcher = m.IsLoad(
331         MachineType::Pointer(),
332         IsParameter(InterpreterDispatchDescriptor::kDispatchTableParameter),
333         IsWordShl(target_bytecode_matcher, IsIntPtrConstant(kPointerSizeLog2)));
334 
335     EXPECT_THAT(
336         tail_call_node,
337         IsTailCall(
338             _, code_target_matcher,
339             IsParameter(InterpreterDispatchDescriptor::kAccumulatorParameter),
340             next_bytecode_offset_matcher,
341             IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
342             IsParameter(InterpreterDispatchDescriptor::kDispatchTableParameter),
343             _, _));
344   }
345 }
346 
TARGET_TEST_F(InterpreterAssemblerTest,Jump)347 TARGET_TEST_F(InterpreterAssemblerTest, Jump) {
348   // If debug code is enabled we emit extra code in Jump.
349   if (FLAG_debug_code) return;
350 
351   int jump_offsets[] = {-9710, -77, 0, +3, +97109};
352   TRACED_FOREACH(int, jump_offset, jump_offsets) {
353     TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
354       InterpreterAssemblerForTest m(this, bytecode);
355       Node* tail_call_node = m.Jump(m.IntPtrConstant(jump_offset));
356 
357       Matcher<Node*> next_bytecode_offset_matcher = IsIntPtrAdd(
358           IsParameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter),
359           IsIntPtrConstant(jump_offset));
360       Matcher<Node*> target_bytecode_matcher =
361           m.IsLoad(MachineType::Uint8(), _, next_bytecode_offset_matcher);
362       if (kPointerSize == 8) {
363         target_bytecode_matcher =
364             IsChangeUint32ToUint64(target_bytecode_matcher);
365       }
366       Matcher<Node*> code_target_matcher = m.IsLoad(
367           MachineType::Pointer(),
368           IsParameter(InterpreterDispatchDescriptor::kDispatchTableParameter),
369           IsWordShl(target_bytecode_matcher,
370                     IsIntPtrConstant(kPointerSizeLog2)));
371 
372       EXPECT_THAT(
373           tail_call_node,
374           IsTailCall(
375               _, code_target_matcher,
376               IsParameter(InterpreterDispatchDescriptor::kAccumulatorParameter),
377               next_bytecode_offset_matcher, _,
378               IsParameter(
379                   InterpreterDispatchDescriptor::kDispatchTableParameter),
380               _, _));
381     }
382   }
383 }
384 
TARGET_TEST_F(InterpreterAssemblerTest,BytecodeOperand)385 TARGET_TEST_F(InterpreterAssemblerTest, BytecodeOperand) {
386   static const OperandScale kOperandScales[] = {
387       OperandScale::kSingle, OperandScale::kDouble, OperandScale::kQuadruple};
388   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
389     TRACED_FOREACH(interpreter::OperandScale, operand_scale, kOperandScales) {
390       InterpreterAssemblerForTest m(this, bytecode, operand_scale);
391       int number_of_operands =
392           interpreter::Bytecodes::NumberOfOperands(bytecode);
393       for (int i = 0; i < number_of_operands; i++) {
394         int offset = interpreter::Bytecodes::GetOperandOffset(bytecode, i,
395                                                               operand_scale);
396         OperandType operand_type =
397             interpreter::Bytecodes::GetOperandType(bytecode, i);
398         OperandSize operand_size =
399             Bytecodes::SizeOfOperand(operand_type, operand_scale);
400         switch (interpreter::Bytecodes::GetOperandType(bytecode, i)) {
401           case interpreter::OperandType::kRegCount:
402             EXPECT_THAT(m.BytecodeOperandCount(i),
403                         m.IsUnsignedOperand(offset, operand_size));
404             break;
405           case interpreter::OperandType::kFlag8:
406             EXPECT_THAT(m.BytecodeOperandFlag(i),
407                         m.IsUnsignedOperand(offset, operand_size));
408             break;
409           case interpreter::OperandType::kIdx:
410             EXPECT_THAT(m.BytecodeOperandIdx(i),
411                         m.IsUnsignedOperand(offset, operand_size));
412             break;
413           case interpreter::OperandType::kImm: {
414             EXPECT_THAT(m.BytecodeOperandImm(i),
415                         m.IsSignedOperand(offset, operand_size));
416             break;
417           }
418           case interpreter::OperandType::kMaybeReg:
419           case interpreter::OperandType::kReg:
420           case interpreter::OperandType::kRegOut:
421           case interpreter::OperandType::kRegOutPair:
422           case interpreter::OperandType::kRegOutTriple:
423           case interpreter::OperandType::kRegPair:
424             EXPECT_THAT(m.BytecodeOperandReg(i),
425                         m.IsSignedOperand(offset, operand_size));
426             break;
427           case interpreter::OperandType::kRuntimeId:
428             EXPECT_THAT(m.BytecodeOperandRuntimeId(i),
429                         m.IsUnsignedOperand(offset, operand_size));
430             break;
431           case interpreter::OperandType::kIntrinsicId:
432             EXPECT_THAT(m.BytecodeOperandIntrinsicId(i),
433                         m.IsUnsignedOperand(offset, operand_size));
434             break;
435           case interpreter::OperandType::kNone:
436             UNREACHABLE();
437             break;
438         }
439       }
440     }
441   }
442 }
443 
TARGET_TEST_F(InterpreterAssemblerTest,GetSetAccumulator)444 TARGET_TEST_F(InterpreterAssemblerTest, GetSetAccumulator) {
445   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
446     if (!interpreter::Bytecodes::ReadsAccumulator(bytecode) ||
447         !interpreter::Bytecodes::WritesAccumulator(bytecode)) {
448       continue;
449     }
450 
451     InterpreterAssemblerForTest m(this, bytecode);
452     // Should be incoming accumulator if not set.
453     EXPECT_THAT(
454         m.GetAccumulator(),
455         IsParameter(InterpreterDispatchDescriptor::kAccumulatorParameter));
456     // Should be set by SetAccumulator.
457     Node* accumulator_value_1 = m.Int32Constant(0xdeadbeef);
458     m.SetAccumulator(accumulator_value_1);
459     EXPECT_THAT(m.GetAccumulator(), accumulator_value_1);
460     Node* accumulator_value_2 = m.Int32Constant(42);
461     m.SetAccumulator(accumulator_value_2);
462     EXPECT_THAT(m.GetAccumulator(), accumulator_value_2);
463 
464     // Should be passed to next bytecode handler on dispatch.
465     Node* tail_call_node = m.Dispatch();
466 
467     EXPECT_THAT(tail_call_node,
468                 IsTailCall(_, _, accumulator_value_2, _, _, _, _));
469   }
470 }
471 
TARGET_TEST_F(InterpreterAssemblerTest,GetContext)472 TARGET_TEST_F(InterpreterAssemblerTest, GetContext) {
473   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
474     InterpreterAssemblerForTest m(this, bytecode);
475     EXPECT_THAT(
476         m.GetContext(),
477         m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(),
478                  IsIntPtrConstant(Register::current_context().ToOperand()
479                                   << kPointerSizeLog2)));
480   }
481 }
482 
TARGET_TEST_F(InterpreterAssemblerTest,RegisterLocation)483 TARGET_TEST_F(InterpreterAssemblerTest, RegisterLocation) {
484   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
485     InterpreterAssemblerForTest m(this, bytecode);
486     Node* reg_index_node = m.IntPtrConstant(44);
487     Node* reg_location_node = m.RegisterLocation(reg_index_node);
488     EXPECT_THAT(reg_location_node,
489                 IsIntPtrAdd(IsLoadParentFramePointer(),
490                             IsWordShl(reg_index_node,
491                                       IsIntPtrConstant(kPointerSizeLog2))));
492   }
493 }
494 
TARGET_TEST_F(InterpreterAssemblerTest,LoadRegister)495 TARGET_TEST_F(InterpreterAssemblerTest, LoadRegister) {
496   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
497     InterpreterAssemblerForTest m(this, bytecode);
498     Node* reg_index_node = m.IntPtrConstant(44);
499     Node* load_reg_node = m.LoadRegister(reg_index_node);
500     EXPECT_THAT(load_reg_node,
501                 m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(),
502                          IsWordShl(reg_index_node,
503                                    IsIntPtrConstant(kPointerSizeLog2))));
504   }
505 }
506 
TARGET_TEST_F(InterpreterAssemblerTest,StoreRegister)507 TARGET_TEST_F(InterpreterAssemblerTest, StoreRegister) {
508   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
509     InterpreterAssemblerForTest m(this, bytecode);
510     Node* store_value = m.Int32Constant(0xdeadbeef);
511     Node* reg_index_node = m.IntPtrConstant(44);
512     Node* store_reg_node = m.StoreRegister(store_value, reg_index_node);
513     EXPECT_THAT(
514         store_reg_node,
515         m.IsStore(StoreRepresentation(MachineRepresentation::kTagged,
516                                       kNoWriteBarrier),
517                   IsLoadParentFramePointer(),
518                   IsWordShl(reg_index_node, IsIntPtrConstant(kPointerSizeLog2)),
519                   store_value));
520   }
521 }
522 
TARGET_TEST_F(InterpreterAssemblerTest,SmiTag)523 TARGET_TEST_F(InterpreterAssemblerTest, SmiTag) {
524   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
525     InterpreterAssemblerForTest m(this, bytecode);
526     Node* value = m.Int32Constant(44);
527     EXPECT_THAT(m.SmiTag(value),
528                 IsIntPtrConstant(static_cast<intptr_t>(44)
529                                  << (kSmiShiftSize + kSmiTagSize)));
530     EXPECT_THAT(
531         m.SmiUntag(value),
532         IsWordSar(value, IsIntPtrConstant(kSmiShiftSize + kSmiTagSize)));
533   }
534 }
535 
TARGET_TEST_F(InterpreterAssemblerTest,IntPtrAdd)536 TARGET_TEST_F(InterpreterAssemblerTest, IntPtrAdd) {
537   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
538     InterpreterAssemblerForTest m(this, bytecode);
539     Node* a = m.Int32Constant(0);
540     Node* b = m.Int32Constant(1);
541     Node* add = m.IntPtrAdd(a, b);
542     EXPECT_THAT(add, IsIntPtrAdd(a, b));
543   }
544 }
545 
TARGET_TEST_F(InterpreterAssemblerTest,IntPtrSub)546 TARGET_TEST_F(InterpreterAssemblerTest, IntPtrSub) {
547   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
548     InterpreterAssemblerForTest m(this, bytecode);
549     Node* a = m.Int32Constant(0);
550     Node* b = m.Int32Constant(1);
551     Node* add = m.IntPtrSub(a, b);
552     EXPECT_THAT(add, IsIntPtrSub(a, b));
553   }
554 }
555 
TARGET_TEST_F(InterpreterAssemblerTest,WordShl)556 TARGET_TEST_F(InterpreterAssemblerTest, WordShl) {
557   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
558     InterpreterAssemblerForTest m(this, bytecode);
559     Node* a = m.IntPtrConstant(0);
560     Node* add = m.WordShl(a, 10);
561     EXPECT_THAT(add, IsWordShl(a, IsIntPtrConstant(10)));
562   }
563 }
564 
TARGET_TEST_F(InterpreterAssemblerTest,LoadConstantPoolEntry)565 TARGET_TEST_F(InterpreterAssemblerTest, LoadConstantPoolEntry) {
566   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
567     InterpreterAssemblerForTest m(this, bytecode);
568     Node* index = m.IntPtrConstant(2);
569     Node* load_constant = m.LoadConstantPoolEntry(index);
570     Matcher<Node*> constant_pool_matcher = m.IsLoad(
571         MachineType::AnyTagged(),
572         IsParameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter),
573         IsIntPtrConstant(BytecodeArray::kConstantPoolOffset - kHeapObjectTag));
574     EXPECT_THAT(
575         load_constant,
576         m.IsLoad(MachineType::AnyTagged(), constant_pool_matcher,
577                  IsIntPtrAdd(
578                      IsIntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
579                      IsWordShl(index, IsIntPtrConstant(kPointerSizeLog2)))));
580   }
581 }
582 
TARGET_TEST_F(InterpreterAssemblerTest,LoadObjectField)583 TARGET_TEST_F(InterpreterAssemblerTest, LoadObjectField) {
584   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
585     InterpreterAssemblerForTest m(this, bytecode);
586     Node* object = m.IntPtrConstant(0xdeadbeef);
587     int offset = 16;
588     Node* load_field = m.LoadObjectField(object, offset);
589     EXPECT_THAT(load_field,
590                 m.IsLoad(MachineType::AnyTagged(), object,
591                          IsIntPtrConstant(offset - kHeapObjectTag)));
592   }
593 }
594 
TARGET_TEST_F(InterpreterAssemblerTest,LoadContextSlot)595 TARGET_TEST_F(InterpreterAssemblerTest, LoadContextSlot) {
596   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
597     InterpreterAssemblerForTest m(this, bytecode);
598     Node* context = m.IntPtrConstant(1);
599     Node* slot_index = m.IntPtrConstant(22);
600     Node* load_context_slot = m.LoadContextSlot(context, slot_index);
601 
602     Matcher<Node*> offset =
603         IsIntPtrAdd(IsWordShl(slot_index, IsIntPtrConstant(kPointerSizeLog2)),
604                     IsIntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
605     EXPECT_THAT(load_context_slot,
606                 m.IsLoad(MachineType::AnyTagged(), context, offset));
607   }
608 }
609 
TARGET_TEST_F(InterpreterAssemblerTest,StoreContextSlot)610 TARGET_TEST_F(InterpreterAssemblerTest, StoreContextSlot) {
611   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
612     InterpreterAssemblerForTest m(this, bytecode);
613     Node* context = m.IntPtrConstant(1);
614     Node* slot_index = m.IntPtrConstant(22);
615     Node* value = m.SmiConstant(Smi::FromInt(100));
616     Node* store_context_slot = m.StoreContextSlot(context, slot_index, value);
617 
618     Matcher<Node*> offset =
619         IsIntPtrAdd(IsWordShl(slot_index, IsIntPtrConstant(kPointerSizeLog2)),
620                     IsIntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
621     EXPECT_THAT(store_context_slot,
622                 m.IsStore(StoreRepresentation(MachineRepresentation::kTagged,
623                                               kFullWriteBarrier),
624                           context, offset, value));
625   }
626 }
627 
TARGET_TEST_F(InterpreterAssemblerTest,CallRuntime2)628 TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime2) {
629   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
630     InterpreterAssemblerForTest m(this, bytecode);
631     Node* arg1 = m.Int32Constant(2);
632     Node* arg2 = m.Int32Constant(3);
633     Node* context = m.Int32Constant(4);
634     Node* call_runtime = m.CallRuntime(Runtime::kAdd, context, arg1, arg2);
635     EXPECT_THAT(call_runtime,
636                 IsCall(_, _, arg1, arg2, _, IsInt32Constant(2), context, _, _));
637   }
638 }
639 
TARGET_TEST_F(InterpreterAssemblerTest,CallRuntime)640 TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime) {
641   const int kResultSizes[] = {1, 2};
642   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
643     TRACED_FOREACH(int, result_size, kResultSizes) {
644       InterpreterAssemblerForTest m(this, bytecode);
645       Callable builtin = CodeFactory::InterpreterCEntry(isolate(), result_size);
646 
647       Node* function_id = m.Int32Constant(0);
648       Node* first_arg = m.Int32Constant(1);
649       Node* arg_count = m.Int32Constant(2);
650       Node* context = m.Int32Constant(4);
651 
652       Matcher<Node*> function_table = IsExternalConstant(
653           ExternalReference::runtime_function_table_address(isolate()));
654       Matcher<Node*> function = IsIntPtrAdd(
655           function_table,
656           IsInt32Mul(function_id, IsInt32Constant(sizeof(Runtime::Function))));
657       Matcher<Node*> function_entry =
658           m.IsLoad(MachineType::Pointer(), function,
659                    IsIntPtrConstant(offsetof(Runtime::Function, entry)));
660 
661       Node* call_runtime = m.CallRuntimeN(function_id, context, first_arg,
662                                           arg_count, result_size);
663       EXPECT_THAT(call_runtime,
664                   IsCall(_, IsHeapConstant(builtin.code()), arg_count,
665                          first_arg, function_entry, context, _, _));
666     }
667   }
668 }
669 
TARGET_TEST_F(InterpreterAssemblerTest,CallJS)670 TARGET_TEST_F(InterpreterAssemblerTest, CallJS) {
671   TailCallMode tail_call_modes[] = {TailCallMode::kDisallow,
672                                     TailCallMode::kAllow};
673   TRACED_FOREACH(TailCallMode, tail_call_mode, tail_call_modes) {
674     TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
675       InterpreterAssemblerForTest m(this, bytecode);
676       Callable builtin =
677           CodeFactory::InterpreterPushArgsAndCall(isolate(), tail_call_mode);
678       Node* function = m.Int32Constant(0);
679       Node* first_arg = m.Int32Constant(1);
680       Node* arg_count = m.Int32Constant(2);
681       Node* context = m.Int32Constant(3);
682       Node* call_js =
683           m.CallJS(function, context, first_arg, arg_count, tail_call_mode);
684       EXPECT_THAT(call_js, IsCall(_, IsHeapConstant(builtin.code()), arg_count,
685                                   first_arg, function, context, _, _));
686     }
687   }
688 }
689 
TARGET_TEST_F(InterpreterAssemblerTest,LoadTypeFeedbackVector)690 TARGET_TEST_F(InterpreterAssemblerTest, LoadTypeFeedbackVector) {
691   TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
692     InterpreterAssemblerForTest m(this, bytecode);
693     Node* feedback_vector = m.LoadTypeFeedbackVector();
694 
695     Matcher<Node*> load_function_matcher =
696         m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(),
697                  IsIntPtrConstant(Register::function_closure().ToOperand()
698                                   << kPointerSizeLog2));
699     Matcher<Node*> load_literals_matcher = m.IsLoad(
700         MachineType::AnyTagged(), load_function_matcher,
701         IsIntPtrConstant(JSFunction::kLiteralsOffset - kHeapObjectTag));
702 
703     EXPECT_THAT(feedback_vector,
704                 m.IsLoad(MachineType::AnyTagged(), load_literals_matcher,
705                          IsIntPtrConstant(LiteralsArray::kFeedbackVectorOffset -
706                                           kHeapObjectTag)));
707   }
708 }
709 
710 }  // namespace interpreter
711 }  // namespace internal
712 }  // namespace v8
713