• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2021 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 // TODO(v8:11421): Remove #if once baseline compiler is ported to other
6 // architectures.
7 #include "src/flags/flags.h"
8 #if ENABLE_SPARKPLUG
9 
10 #include <algorithm>
11 #include <type_traits>
12 
13 #include "src/base/bits.h"
14 #include "src/baseline/baseline-assembler-inl.h"
15 #include "src/baseline/baseline-assembler.h"
16 #include "src/baseline/baseline-compiler.h"
17 #include "src/builtins/builtins-constructor.h"
18 #include "src/builtins/builtins-descriptors.h"
19 #include "src/builtins/builtins.h"
20 #include "src/codegen/assembler.h"
21 #include "src/codegen/compiler.h"
22 #include "src/codegen/interface-descriptors-inl.h"
23 #include "src/codegen/machine-type.h"
24 #include "src/codegen/macro-assembler-inl.h"
25 #include "src/common/globals.h"
26 #include "src/execution/frame-constants.h"
27 #include "src/heap/local-factory-inl.h"
28 #include "src/interpreter/bytecode-array-iterator.h"
29 #include "src/interpreter/bytecode-flags.h"
30 #include "src/logging/runtime-call-stats-scope.h"
31 #include "src/objects/code.h"
32 #include "src/objects/heap-object.h"
33 #include "src/objects/instance-type.h"
34 #include "src/objects/literal-objects-inl.h"
35 #include "src/objects/shared-function-info-inl.h"
36 #include "src/roots/roots.h"
37 
38 #if V8_TARGET_ARCH_X64
39 #include "src/baseline/x64/baseline-compiler-x64-inl.h"
40 #elif V8_TARGET_ARCH_ARM64
41 #include "src/baseline/arm64/baseline-compiler-arm64-inl.h"
42 #elif V8_TARGET_ARCH_IA32
43 #include "src/baseline/ia32/baseline-compiler-ia32-inl.h"
44 #elif V8_TARGET_ARCH_ARM
45 #include "src/baseline/arm/baseline-compiler-arm-inl.h"
46 #elif V8_TARGET_ARCH_PPC64
47 #include "src/baseline/ppc/baseline-compiler-ppc-inl.h"
48 #elif V8_TARGET_ARCH_S390X
49 #include "src/baseline/s390/baseline-compiler-s390-inl.h"
50 #elif V8_TARGET_ARCH_RISCV64
51 #include "src/baseline/riscv64/baseline-compiler-riscv64-inl.h"
52 #elif V8_TARGET_ARCH_MIPS64
53 #include "src/baseline/mips64/baseline-compiler-mips64-inl.h"
54 #elif V8_TARGET_ARCH_MIPS
55 #include "src/baseline/mips/baseline-compiler-mips-inl.h"
56 #elif V8_TARGET_ARCH_LOONG64
57 #include "src/baseline/loong64/baseline-compiler-loong64-inl.h"
58 #else
59 #error Unsupported target architecture.
60 #endif
61 
62 namespace v8 {
63 namespace internal {
64 namespace baseline {
65 
66 template <typename IsolateT>
ToBytecodeOffsetTable(IsolateT * isolate)67 Handle<ByteArray> BytecodeOffsetTableBuilder::ToBytecodeOffsetTable(
68     IsolateT* isolate) {
69   if (bytes_.empty()) return isolate->factory()->empty_byte_array();
70   Handle<ByteArray> table = isolate->factory()->NewByteArray(
71       static_cast<int>(bytes_.size()), AllocationType::kOld);
72   MemCopy(table->GetDataStartAddress(), bytes_.data(), bytes_.size());
73   return table;
74 }
75 
76 namespace detail {
77 
78 #ifdef DEBUG
Clobbers(Register target,Register reg)79 bool Clobbers(Register target, Register reg) { return target == reg; }
Clobbers(Register target,Handle<Object> handle)80 bool Clobbers(Register target, Handle<Object> handle) { return false; }
Clobbers(Register target,Smi smi)81 bool Clobbers(Register target, Smi smi) { return false; }
Clobbers(Register target,TaggedIndex index)82 bool Clobbers(Register target, TaggedIndex index) { return false; }
Clobbers(Register target,int32_t imm)83 bool Clobbers(Register target, int32_t imm) { return false; }
Clobbers(Register target,RootIndex index)84 bool Clobbers(Register target, RootIndex index) { return false; }
Clobbers(Register target,interpreter::Register reg)85 bool Clobbers(Register target, interpreter::Register reg) { return false; }
Clobbers(Register target,interpreter::RegisterList list)86 bool Clobbers(Register target, interpreter::RegisterList list) { return false; }
87 
88 // We don't know what's inside machine registers or operands, so assume they
89 // match.
MachineTypeMatches(MachineType type,Register reg)90 bool MachineTypeMatches(MachineType type, Register reg) { return true; }
MachineTypeMatches(MachineType type,MemOperand reg)91 bool MachineTypeMatches(MachineType type, MemOperand reg) { return true; }
MachineTypeMatches(MachineType type,Handle<HeapObject> handle)92 bool MachineTypeMatches(MachineType type, Handle<HeapObject> handle) {
93   return type.IsTagged() && !type.IsTaggedSigned();
94 }
MachineTypeMatches(MachineType type,Smi handle)95 bool MachineTypeMatches(MachineType type, Smi handle) {
96   return type.IsTagged() && !type.IsTaggedPointer();
97 }
MachineTypeMatches(MachineType type,TaggedIndex handle)98 bool MachineTypeMatches(MachineType type, TaggedIndex handle) {
99   // TaggedIndex doesn't have a separate type, so check for the same type as for
100   // Smis.
101   return type.IsTagged() && !type.IsTaggedPointer();
102 }
MachineTypeMatches(MachineType type,int32_t imm)103 bool MachineTypeMatches(MachineType type, int32_t imm) {
104   // 32-bit immediates can be used for 64-bit params -- they'll be
105   // zero-extended.
106   return type.representation() == MachineRepresentation::kWord32 ||
107          type.representation() == MachineRepresentation::kWord64;
108 }
MachineTypeMatches(MachineType type,RootIndex index)109 bool MachineTypeMatches(MachineType type, RootIndex index) {
110   return type.IsTagged() && !type.IsTaggedSigned();
111 }
MachineTypeMatches(MachineType type,interpreter::Register reg)112 bool MachineTypeMatches(MachineType type, interpreter::Register reg) {
113   return type.IsTagged();
114 }
115 
116 template <typename Descriptor, typename... Args>
117 struct CheckArgsHelper;
118 
119 template <typename Descriptor>
120 struct CheckArgsHelper<Descriptor> {
Checkv8::internal::baseline::detail::CheckArgsHelper121   static void Check(BaselineAssembler* masm, int i) {
122     if (Descriptor::AllowVarArgs()) {
123       CHECK_GE(i, Descriptor::GetParameterCount());
124     } else {
125       CHECK_EQ(i, Descriptor::GetParameterCount());
126     }
127   }
128 };
129 
130 template <typename Descriptor, typename Arg, typename... Args>
131 struct CheckArgsHelper<Descriptor, Arg, Args...> {
Checkv8::internal::baseline::detail::CheckArgsHelper132   static void Check(BaselineAssembler* masm, int i, Arg arg, Args... args) {
133     if (i >= Descriptor::GetParameterCount()) {
134       CHECK(Descriptor::AllowVarArgs());
135       return;
136     }
137     CHECK(MachineTypeMatches(Descriptor().GetParameterType(i), arg));
138     CheckArgsHelper<Descriptor, Args...>::Check(masm, i + 1, args...);
139   }
140 };
141 
142 template <typename Descriptor, typename... Args>
143 struct CheckArgsHelper<Descriptor, interpreter::RegisterList, Args...> {
Checkv8::internal::baseline::detail::CheckArgsHelper144   static void Check(BaselineAssembler* masm, int i,
145                     interpreter::RegisterList list, Args... args) {
146     for (int reg_index = 0; reg_index < list.register_count();
147          ++reg_index, ++i) {
148       if (i >= Descriptor::GetParameterCount()) {
149         CHECK(Descriptor::AllowVarArgs());
150         return;
151       }
152       CHECK(MachineTypeMatches(Descriptor().GetParameterType(i),
153                                list[reg_index]));
154     }
155     CheckArgsHelper<Descriptor, Args...>::Check(masm, i, args...);
156   }
157 };
158 
159 template <typename Descriptor, typename... Args>
CheckArgs(BaselineAssembler * masm,Args...args)160 void CheckArgs(BaselineAssembler* masm, Args... args) {
161   CheckArgsHelper<Descriptor, Args...>::Check(masm, 0, args...);
162 }
163 
CheckSettingDoesntClobber(Register target)164 void CheckSettingDoesntClobber(Register target) {}
165 template <typename Arg, typename... Args>
CheckSettingDoesntClobber(Register target,Arg arg,Args...args)166 void CheckSettingDoesntClobber(Register target, Arg arg, Args... args) {
167   DCHECK(!Clobbers(target, arg));
168   CheckSettingDoesntClobber(target, args...);
169 }
170 
171 #else  // DEBUG
172 
173 template <typename Descriptor, typename... Args>
174 void CheckArgs(Args... args) {}
175 
176 template <typename... Args>
177 void CheckSettingDoesntClobber(Register target, Args... args) {}
178 
179 #endif  // DEBUG
180 
181 template <typename Descriptor, int ArgIndex, bool kIsRegister, typename... Args>
182 struct ArgumentSettingHelper;
183 
184 template <typename Descriptor, int ArgIndex, bool kIsRegister>
185 struct ArgumentSettingHelper<Descriptor, ArgIndex, kIsRegister> {
Setv8::internal::baseline::detail::ArgumentSettingHelper186   static void Set(BaselineAssembler* masm) {
187     // Should only ever be called for the end of register arguments.
188     STATIC_ASSERT(ArgIndex == Descriptor::GetRegisterParameterCount());
189   }
190 };
191 
192 template <typename Descriptor, int ArgIndex, typename Arg, typename... Args>
193 struct ArgumentSettingHelper<Descriptor, ArgIndex, true, Arg, Args...> {
Setv8::internal::baseline::detail::ArgumentSettingHelper194   static void Set(BaselineAssembler* masm, Arg arg, Args... args) {
195     STATIC_ASSERT(ArgIndex < Descriptor::GetRegisterParameterCount());
196     Register target = Descriptor::GetRegisterParameter(ArgIndex);
197     CheckSettingDoesntClobber(target, args...);
198     masm->Move(target, arg);
199     ArgumentSettingHelper<Descriptor, ArgIndex + 1,
200                           (ArgIndex + 1 <
201                            Descriptor::GetRegisterParameterCount()),
202                           Args...>::Set(masm, args...);
203   }
204 };
205 
206 template <typename Descriptor, int ArgIndex>
207 struct ArgumentSettingHelper<Descriptor, ArgIndex, true,
208                              interpreter::RegisterList> {
Setv8::internal::baseline::detail::ArgumentSettingHelper209   static void Set(BaselineAssembler* masm, interpreter::RegisterList list) {
210     STATIC_ASSERT(ArgIndex < Descriptor::GetRegisterParameterCount());
211     DCHECK_EQ(ArgIndex + list.register_count(),
212               Descriptor::GetRegisterParameterCount());
213     for (int i = 0; ArgIndex + i < Descriptor::GetRegisterParameterCount();
214          ++i) {
215       Register target = Descriptor::GetRegisterParameter(ArgIndex + i);
216       masm->Move(target, masm->RegisterFrameOperand(list[i]));
217     }
218   }
219 };
220 
221 template <typename Descriptor, int ArgIndex, typename Arg, typename... Args>
222 struct ArgumentSettingHelper<Descriptor, ArgIndex, false, Arg, Args...> {
Setv8::internal::baseline::detail::ArgumentSettingHelper223   static void Set(BaselineAssembler* masm, Arg arg, Args... args) {
224     if (Descriptor::kStackArgumentOrder == StackArgumentOrder::kDefault) {
225       masm->Push(arg, args...);
226     } else {
227       masm->PushReverse(arg, args...);
228     }
229   }
230 };
231 
232 template <Builtin kBuiltin, typename... Args>
MoveArgumentsForBuiltin(BaselineAssembler * masm,Args...args)233 void MoveArgumentsForBuiltin(BaselineAssembler* masm, Args... args) {
234   using Descriptor = typename CallInterfaceDescriptorFor<kBuiltin>::type;
235   CheckArgs<Descriptor>(masm, args...);
236   ArgumentSettingHelper<Descriptor, 0,
237                         (0 < Descriptor::GetRegisterParameterCount()),
238                         Args...>::Set(masm, args...);
239   if (Descriptor::HasContextParameter()) {
240     masm->LoadContext(Descriptor::ContextRegister());
241   }
242 }
243 
244 }  // namespace detail
245 
246 namespace {
247 // Rough upper-bound estimate. Copying the data is most likely more expensive
248 // than pre-allocating a large enough buffer.
249 #ifdef V8_TARGET_ARCH_IA32
250 const int kAverageBytecodeToInstructionRatio = 5;
251 #else
252 const int kAverageBytecodeToInstructionRatio = 7;
253 #endif
AllocateBuffer(Handle<BytecodeArray> bytecodes)254 std::unique_ptr<AssemblerBuffer> AllocateBuffer(
255     Handle<BytecodeArray> bytecodes) {
256   int estimated_size;
257   {
258     DisallowHeapAllocation no_gc;
259     estimated_size = BaselineCompiler::EstimateInstructionSize(*bytecodes);
260   }
261   return NewAssemblerBuffer(RoundUp(estimated_size, 4 * KB));
262 }
263 }  // namespace
264 
BaselineCompiler(LocalIsolate * local_isolate,Handle<SharedFunctionInfo> shared_function_info,Handle<BytecodeArray> bytecode)265 BaselineCompiler::BaselineCompiler(
266     LocalIsolate* local_isolate,
267     Handle<SharedFunctionInfo> shared_function_info,
268     Handle<BytecodeArray> bytecode)
269     : local_isolate_(local_isolate),
270       stats_(local_isolate->runtime_call_stats()),
271       shared_function_info_(shared_function_info),
272       bytecode_(bytecode),
273       masm_(local_isolate->GetMainThreadIsolateUnsafe(),
274             CodeObjectRequired::kNo, AllocateBuffer(bytecode)),
275       basm_(&masm_),
276       iterator_(bytecode_),
277       zone_(local_isolate->allocator(), ZONE_NAME),
278       labels_(zone_.NewArray<BaselineLabels*>(bytecode_->length())) {
279   MemsetPointer(labels_, nullptr, bytecode_->length());
280 
281   // Empirically determined expected size of the offset table at the 95th %ile,
282   // based on the size of the bytecode, to be:
283   //
284   //   16 + (bytecode size) / 4
285   bytecode_offset_table_builder_.Reserve(
286       base::bits::RoundUpToPowerOfTwo(16 + bytecode_->Size() / 4));
287 }
288 
289 #define __ basm_.
290 
291 #define RCS_BASELINE_SCOPE(rcs)                               \
292   RCS_SCOPE(stats_,                                           \
293             local_isolate_->is_main_thread()                  \
294                 ? RuntimeCallCounterId::kCompileBaseline##rcs \
295                 : RuntimeCallCounterId::kCompileBackgroundBaseline##rcs)
296 
GenerateCode()297 void BaselineCompiler::GenerateCode() {
298   {
299     RCS_BASELINE_SCOPE(PreVisit);
300     for (; !iterator_.done(); iterator_.Advance()) {
301       PreVisitSingleBytecode();
302     }
303     iterator_.Reset();
304   }
305 
306   // No code generated yet.
307   DCHECK_EQ(__ pc_offset(), 0);
308   __ CodeEntry();
309 
310   {
311     RCS_BASELINE_SCOPE(Visit);
312     Prologue();
313     AddPosition();
314     for (; !iterator_.done(); iterator_.Advance()) {
315       VisitSingleBytecode();
316       AddPosition();
317     }
318   }
319 }
320 
Build(LocalIsolate * local_isolate)321 MaybeHandle<Code> BaselineCompiler::Build(LocalIsolate* local_isolate) {
322   CodeDesc desc;
323   __ GetCode(local_isolate->GetMainThreadIsolateUnsafe(), &desc);
324 
325   // Allocate the bytecode offset table.
326   Handle<ByteArray> bytecode_offset_table =
327       bytecode_offset_table_builder_.ToBytecodeOffsetTable(local_isolate);
328 
329   Factory::CodeBuilder code_builder(local_isolate, desc, CodeKind::BASELINE);
330   code_builder.set_bytecode_offset_table(bytecode_offset_table);
331   if (shared_function_info_->HasInterpreterData()) {
332     code_builder.set_interpreter_data(
333         handle(shared_function_info_->interpreter_data(), local_isolate));
334   } else {
335     code_builder.set_interpreter_data(bytecode_);
336   }
337   return code_builder.TryBuild();
338 }
339 
EstimateInstructionSize(BytecodeArray bytecode)340 int BaselineCompiler::EstimateInstructionSize(BytecodeArray bytecode) {
341   return bytecode.length() * kAverageBytecodeToInstructionRatio;
342 }
343 
RegisterOperand(int operand_index)344 interpreter::Register BaselineCompiler::RegisterOperand(int operand_index) {
345   return iterator().GetRegisterOperand(operand_index);
346 }
347 
LoadRegister(Register output,int operand_index)348 void BaselineCompiler::LoadRegister(Register output, int operand_index) {
349   __ LoadRegister(output, RegisterOperand(operand_index));
350 }
351 
StoreRegister(int operand_index,Register value)352 void BaselineCompiler::StoreRegister(int operand_index, Register value) {
353   __ Move(RegisterOperand(operand_index), value);
354 }
355 
StoreRegisterPair(int operand_index,Register val0,Register val1)356 void BaselineCompiler::StoreRegisterPair(int operand_index, Register val0,
357                                          Register val1) {
358   interpreter::Register reg0, reg1;
359   std::tie(reg0, reg1) = iterator().GetRegisterPairOperand(operand_index);
360   __ StoreRegister(reg0, val0);
361   __ StoreRegister(reg1, val1);
362 }
363 template <typename Type>
Constant(int operand_index)364 Handle<Type> BaselineCompiler::Constant(int operand_index) {
365   return Handle<Type>::cast(
366       iterator().GetConstantForIndexOperand(operand_index, local_isolate_));
367 }
ConstantSmi(int operand_index)368 Smi BaselineCompiler::ConstantSmi(int operand_index) {
369   return iterator().GetConstantAtIndexAsSmi(operand_index);
370 }
371 template <typename Type>
LoadConstant(Register output,int operand_index)372 void BaselineCompiler::LoadConstant(Register output, int operand_index) {
373   __ Move(output, Constant<Type>(operand_index));
374 }
Uint(int operand_index)375 uint32_t BaselineCompiler::Uint(int operand_index) {
376   return iterator().GetUnsignedImmediateOperand(operand_index);
377 }
Int(int operand_index)378 int32_t BaselineCompiler::Int(int operand_index) {
379   return iterator().GetImmediateOperand(operand_index);
380 }
Index(int operand_index)381 uint32_t BaselineCompiler::Index(int operand_index) {
382   return iterator().GetIndexOperand(operand_index);
383 }
Flag(int operand_index)384 uint32_t BaselineCompiler::Flag(int operand_index) {
385   return iterator().GetFlagOperand(operand_index);
386 }
RegisterCount(int operand_index)387 uint32_t BaselineCompiler::RegisterCount(int operand_index) {
388   return iterator().GetRegisterCountOperand(operand_index);
389 }
IndexAsTagged(int operand_index)390 TaggedIndex BaselineCompiler::IndexAsTagged(int operand_index) {
391   return TaggedIndex::FromIntptr(Index(operand_index));
392 }
UintAsTagged(int operand_index)393 TaggedIndex BaselineCompiler::UintAsTagged(int operand_index) {
394   return TaggedIndex::FromIntptr(Uint(operand_index));
395 }
IndexAsSmi(int operand_index)396 Smi BaselineCompiler::IndexAsSmi(int operand_index) {
397   return Smi::FromInt(Index(operand_index));
398 }
IntAsSmi(int operand_index)399 Smi BaselineCompiler::IntAsSmi(int operand_index) {
400   return Smi::FromInt(Int(operand_index));
401 }
FlagAsSmi(int operand_index)402 Smi BaselineCompiler::FlagAsSmi(int operand_index) {
403   return Smi::FromInt(Flag(operand_index));
404 }
405 
FeedbackVector()406 MemOperand BaselineCompiler::FeedbackVector() {
407   return __ FeedbackVectorOperand();
408 }
409 
LoadFeedbackVector(Register output)410 void BaselineCompiler::LoadFeedbackVector(Register output) {
411   ASM_CODE_COMMENT(&masm_);
412   __ Move(output, __ FeedbackVectorOperand());
413 }
414 
LoadClosureFeedbackArray(Register output)415 void BaselineCompiler::LoadClosureFeedbackArray(Register output) {
416   LoadFeedbackVector(output);
417   __ LoadTaggedPointerField(output, output,
418                             FeedbackVector::kClosureFeedbackCellArrayOffset);
419 }
420 
SelectBooleanConstant(Register output,std::function<void (Label *,Label::Distance)> jump_func)421 void BaselineCompiler::SelectBooleanConstant(
422     Register output, std::function<void(Label*, Label::Distance)> jump_func) {
423   Label done, set_true;
424   jump_func(&set_true, Label::kNear);
425   __ LoadRoot(output, RootIndex::kFalseValue);
426   __ Jump(&done, Label::kNear);
427   __ Bind(&set_true);
428   __ LoadRoot(output, RootIndex::kTrueValue);
429   __ Bind(&done);
430 }
431 
AddPosition()432 void BaselineCompiler::AddPosition() {
433   bytecode_offset_table_builder_.AddPosition(__ pc_offset());
434 }
435 
PreVisitSingleBytecode()436 void BaselineCompiler::PreVisitSingleBytecode() {
437   switch (iterator().current_bytecode()) {
438     case interpreter::Bytecode::kJumpLoop:
439       EnsureLabels(iterator().GetJumpTargetOffset());
440       break;
441 
442     // TODO(leszeks): Update the max_call_args as part of the main bytecode
443     // visit loop, by patching the value passed to the prologue.
444     case interpreter::Bytecode::kCallProperty:
445     case interpreter::Bytecode::kCallAnyReceiver:
446     case interpreter::Bytecode::kCallWithSpread:
447     case interpreter::Bytecode::kConstruct:
448     case interpreter::Bytecode::kConstructWithSpread:
449       return UpdateMaxCallArgs(
450           iterator().GetRegisterListOperand(1).register_count());
451     case interpreter::Bytecode::kCallUndefinedReceiver:
452       return UpdateMaxCallArgs(
453           iterator().GetRegisterListOperand(1).register_count() + 1);
454     case interpreter::Bytecode::kCallProperty0:
455     case interpreter::Bytecode::kCallUndefinedReceiver0:
456       return UpdateMaxCallArgs(1);
457     case interpreter::Bytecode::kCallProperty1:
458     case interpreter::Bytecode::kCallUndefinedReceiver1:
459       return UpdateMaxCallArgs(2);
460     case interpreter::Bytecode::kCallProperty2:
461     case interpreter::Bytecode::kCallUndefinedReceiver2:
462       return UpdateMaxCallArgs(3);
463 
464     default:
465       break;
466   }
467 }
468 
VisitSingleBytecode()469 void BaselineCompiler::VisitSingleBytecode() {
470   int offset = iterator().current_offset();
471   if (labels_[offset]) {
472     // Bind labels for this offset that have already been linked to a
473     // jump (i.e. forward jumps, excluding jump tables).
474     for (auto&& label : labels_[offset]->linked) {
475       __ BindWithoutJumpTarget(&label->label);
476     }
477 #ifdef DEBUG
478     labels_[offset]->linked.Clear();
479 #endif
480     __ BindWithoutJumpTarget(&labels_[offset]->unlinked);
481   }
482 
483   // Mark position as valid jump target. This is required for the deoptimizer
484   // and exception handling, when CFI is enabled.
485   __ JumpTarget();
486 
487 #ifdef V8_CODE_COMMENTS
488   std::ostringstream str;
489   if (FLAG_code_comments) {
490     iterator().PrintTo(str);
491   }
492   ASM_CODE_COMMENT_STRING(&masm_, str.str());
493 #endif
494 
495   VerifyFrame();
496 
497 #ifdef V8_TRACE_UNOPTIMIZED
498   TraceBytecode(Runtime::kTraceUnoptimizedBytecodeEntry);
499 #endif
500 
501   {
502     interpreter::Bytecode bytecode = iterator().current_bytecode();
503 
504 #ifdef DEBUG
505     base::Optional<EnsureAccumulatorPreservedScope> accumulator_preserved_scope;
506     // We should make sure to preserve the accumulator whenever the bytecode
507     // isn't registered as writing to it. We can't do this for jumps or switches
508     // though, since the control flow would not match the control flow of this
509     // scope.
510     if (FLAG_debug_code &&
511         !interpreter::Bytecodes::WritesAccumulator(bytecode) &&
512         !interpreter::Bytecodes::IsJump(bytecode) &&
513         !interpreter::Bytecodes::IsSwitch(bytecode)) {
514       accumulator_preserved_scope.emplace(&basm_);
515     }
516 #endif  // DEBUG
517 
518     switch (bytecode) {
519 #define BYTECODE_CASE(name, ...)       \
520   case interpreter::Bytecode::k##name: \
521     Visit##name();                     \
522     break;
523       BYTECODE_LIST(BYTECODE_CASE)
524 #undef BYTECODE_CASE
525     }
526   }
527 
528 #ifdef V8_TRACE_UNOPTIMIZED
529   TraceBytecode(Runtime::kTraceUnoptimizedBytecodeExit);
530 #endif
531 }
532 
VerifyFrame()533 void BaselineCompiler::VerifyFrame() {
534   if (FLAG_debug_code) {
535     ASM_CODE_COMMENT(&masm_);
536     __ RecordComment(" -- Verify frame size");
537     VerifyFrameSize();
538 
539     __ RecordComment(" -- Verify feedback vector");
540     {
541       BaselineAssembler::ScratchRegisterScope temps(&basm_);
542       Register scratch = temps.AcquireScratch();
543       __ Move(scratch, __ FeedbackVectorOperand());
544       Label is_smi, is_ok;
545       __ JumpIfSmi(scratch, &is_smi);
546       __ JumpIfObjectType(Condition::kEqual, scratch, FEEDBACK_VECTOR_TYPE,
547                           scratch, &is_ok);
548       __ Bind(&is_smi);
549       __ masm()->Abort(AbortReason::kExpectedFeedbackVector);
550       __ Bind(&is_ok);
551     }
552 
553     // TODO(leszeks): More verification.
554   }
555 }
556 
557 #ifdef V8_TRACE_UNOPTIMIZED
TraceBytecode(Runtime::FunctionId function_id)558 void BaselineCompiler::TraceBytecode(Runtime::FunctionId function_id) {
559   if (!FLAG_trace_baseline_exec) return;
560   ASM_CODE_COMMENT_STRING(&masm_,
561                           function_id == Runtime::kTraceUnoptimizedBytecodeEntry
562                               ? "Trace bytecode entry"
563                               : "Trace bytecode exit");
564   SaveAccumulatorScope accumulator_scope(&basm_);
565   CallRuntime(function_id, bytecode_,
566               Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag +
567                            iterator().current_offset()),
568               kInterpreterAccumulatorRegister);
569 }
570 #endif
571 
572 #define DECLARE_VISITOR(name, ...) void Visit##name();
573 BYTECODE_LIST(DECLARE_VISITOR)
574 #undef DECLARE_VISITOR
575 
576 #define DECLARE_VISITOR(name, ...) \
577   void VisitIntrinsic##name(interpreter::RegisterList args);
INTRINSICS_LIST(DECLARE_VISITOR)578 INTRINSICS_LIST(DECLARE_VISITOR)
579 #undef DECLARE_VISITOR
580 
581 void BaselineCompiler::UpdateInterruptBudgetAndJumpToLabel(
582     int weight, Label* label, Label* skip_interrupt_label) {
583   if (weight != 0) {
584     ASM_CODE_COMMENT(&masm_);
585     __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, skip_interrupt_label);
586 
587     if (weight < 0) {
588       SaveAccumulatorScope accumulator_scope(&basm_);
589       CallRuntime(Runtime::kBytecodeBudgetInterruptWithStackCheck,
590                   __ FunctionOperand());
591     }
592   }
593   if (label) __ Jump(label);
594 }
595 
UpdateInterruptBudgetAndDoInterpreterJump()596 void BaselineCompiler::UpdateInterruptBudgetAndDoInterpreterJump() {
597   int weight = iterator().GetRelativeJumpTargetOffset() -
598                iterator().current_bytecode_size_without_prefix();
599   UpdateInterruptBudgetAndJumpToLabel(weight, BuildForwardJumpLabel(), nullptr);
600 }
601 
UpdateInterruptBudgetAndDoInterpreterJumpIfRoot(RootIndex root)602 void BaselineCompiler::UpdateInterruptBudgetAndDoInterpreterJumpIfRoot(
603     RootIndex root) {
604   Label dont_jump;
605   __ JumpIfNotRoot(kInterpreterAccumulatorRegister, root, &dont_jump,
606                    Label::kNear);
607   UpdateInterruptBudgetAndDoInterpreterJump();
608   __ Bind(&dont_jump);
609 }
610 
UpdateInterruptBudgetAndDoInterpreterJumpIfNotRoot(RootIndex root)611 void BaselineCompiler::UpdateInterruptBudgetAndDoInterpreterJumpIfNotRoot(
612     RootIndex root) {
613   Label dont_jump;
614   __ JumpIfRoot(kInterpreterAccumulatorRegister, root, &dont_jump,
615                 Label::kNear);
616   UpdateInterruptBudgetAndDoInterpreterJump();
617   __ Bind(&dont_jump);
618 }
619 
BuildForwardJumpLabel()620 Label* BaselineCompiler::BuildForwardJumpLabel() {
621   int target_offset = iterator().GetJumpTargetOffset();
622   ThreadedLabel* threaded_label = zone_.New<ThreadedLabel>();
623   EnsureLabels(target_offset)->linked.Add(threaded_label);
624   return &threaded_label->label;
625 }
626 
627 template <Builtin kBuiltin, typename... Args>
CallBuiltin(Args...args)628 void BaselineCompiler::CallBuiltin(Args... args) {
629   ASM_CODE_COMMENT(&masm_);
630   detail::MoveArgumentsForBuiltin<kBuiltin>(&basm_, args...);
631   __ CallBuiltin(kBuiltin);
632 }
633 
634 template <Builtin kBuiltin, typename... Args>
TailCallBuiltin(Args...args)635 void BaselineCompiler::TailCallBuiltin(Args... args) {
636   detail::MoveArgumentsForBuiltin<kBuiltin>(&basm_, args...);
637   __ TailCallBuiltin(kBuiltin);
638 }
639 
640 template <typename... Args>
CallRuntime(Runtime::FunctionId function,Args...args)641 void BaselineCompiler::CallRuntime(Runtime::FunctionId function, Args... args) {
642   __ LoadContext(kContextRegister);
643   int nargs = __ Push(args...);
644   __ CallRuntime(function, nargs);
645 }
646 
647 // Returns into kInterpreterAccumulatorRegister
JumpIfToBoolean(bool do_jump_if_true,Label * label,Label::Distance distance)648 void BaselineCompiler::JumpIfToBoolean(bool do_jump_if_true, Label* label,
649                                        Label::Distance distance) {
650   CallBuiltin<Builtin::kToBooleanForBaselineJump>(
651       kInterpreterAccumulatorRegister);
652   // ToBooleanForBaselineJump returns the ToBoolean value into return reg 1, and
653   // the original value into kInterpreterAccumulatorRegister, so we don't have
654   // to worry about it getting clobbered.
655   STATIC_ASSERT(kReturnRegister0 == kInterpreterAccumulatorRegister);
656   __ JumpIfSmi(do_jump_if_true ? Condition::kNotEqual : Condition::kEqual,
657                kReturnRegister1, Smi::FromInt(0), label, distance);
658 }
659 
VisitLdaZero()660 void BaselineCompiler::VisitLdaZero() {
661   __ Move(kInterpreterAccumulatorRegister, Smi::FromInt(0));
662 }
663 
VisitLdaSmi()664 void BaselineCompiler::VisitLdaSmi() {
665   Smi constant = Smi::FromInt(iterator().GetImmediateOperand(0));
666   __ Move(kInterpreterAccumulatorRegister, constant);
667 }
668 
VisitLdaUndefined()669 void BaselineCompiler::VisitLdaUndefined() {
670   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
671 }
672 
VisitLdaNull()673 void BaselineCompiler::VisitLdaNull() {
674   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kNullValue);
675 }
676 
VisitLdaTheHole()677 void BaselineCompiler::VisitLdaTheHole() {
678   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTheHoleValue);
679 }
680 
VisitLdaTrue()681 void BaselineCompiler::VisitLdaTrue() {
682   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
683 }
684 
VisitLdaFalse()685 void BaselineCompiler::VisitLdaFalse() {
686   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kFalseValue);
687 }
688 
VisitLdaConstant()689 void BaselineCompiler::VisitLdaConstant() {
690   LoadConstant<HeapObject>(kInterpreterAccumulatorRegister, 0);
691 }
692 
VisitLdaGlobal()693 void BaselineCompiler::VisitLdaGlobal() {
694   CallBuiltin<Builtin::kLoadGlobalICBaseline>(Constant<Name>(0),  // name
695                                               IndexAsTagged(1));  // slot
696 }
697 
VisitLdaGlobalInsideTypeof()698 void BaselineCompiler::VisitLdaGlobalInsideTypeof() {
699   CallBuiltin<Builtin::kLoadGlobalICInsideTypeofBaseline>(
700       Constant<Name>(0),  // name
701       IndexAsTagged(1));  // slot
702 }
703 
VisitStaGlobal()704 void BaselineCompiler::VisitStaGlobal() {
705   CallBuiltin<Builtin::kStoreGlobalICBaseline>(
706       Constant<Name>(0),                // name
707       kInterpreterAccumulatorRegister,  // value
708       IndexAsTagged(1));                // slot
709 }
710 
VisitPushContext()711 void BaselineCompiler::VisitPushContext() {
712   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
713   Register context = scratch_scope.AcquireScratch();
714   __ LoadContext(context);
715   __ StoreContext(kInterpreterAccumulatorRegister);
716   StoreRegister(0, context);
717 }
718 
VisitPopContext()719 void BaselineCompiler::VisitPopContext() {
720   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
721   Register context = scratch_scope.AcquireScratch();
722   LoadRegister(context, 0);
723   __ StoreContext(context);
724 }
725 
VisitLdaContextSlot()726 void BaselineCompiler::VisitLdaContextSlot() {
727   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
728   Register context = scratch_scope.AcquireScratch();
729   LoadRegister(context, 0);
730   int depth = Uint(2);
731   for (; depth > 0; --depth) {
732     __ LoadTaggedPointerField(context, context, Context::kPreviousOffset);
733   }
734   __ LoadTaggedAnyField(kInterpreterAccumulatorRegister, context,
735                         Context::OffsetOfElementAt(Index(1)));
736 }
737 
VisitLdaImmutableContextSlot()738 void BaselineCompiler::VisitLdaImmutableContextSlot() { VisitLdaContextSlot(); }
739 
VisitLdaCurrentContextSlot()740 void BaselineCompiler::VisitLdaCurrentContextSlot() {
741   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
742   Register context = scratch_scope.AcquireScratch();
743   __ LoadContext(context);
744   __ LoadTaggedAnyField(kInterpreterAccumulatorRegister, context,
745                         Context::OffsetOfElementAt(Index(0)));
746 }
747 
VisitLdaImmutableCurrentContextSlot()748 void BaselineCompiler::VisitLdaImmutableCurrentContextSlot() {
749   VisitLdaCurrentContextSlot();
750 }
751 
VisitStaContextSlot()752 void BaselineCompiler::VisitStaContextSlot() {
753   Register value = WriteBarrierDescriptor::ValueRegister();
754   Register context = WriteBarrierDescriptor::ObjectRegister();
755   DCHECK(!AreAliased(value, context, kInterpreterAccumulatorRegister));
756   __ Move(value, kInterpreterAccumulatorRegister);
757   LoadRegister(context, 0);
758   int depth = Uint(2);
759   for (; depth > 0; --depth) {
760     __ LoadTaggedPointerField(context, context, Context::kPreviousOffset);
761   }
762   __ StoreTaggedFieldWithWriteBarrier(
763       context, Context::OffsetOfElementAt(iterator().GetIndexOperand(1)),
764       value);
765 }
766 
VisitStaCurrentContextSlot()767 void BaselineCompiler::VisitStaCurrentContextSlot() {
768   Register value = WriteBarrierDescriptor::ValueRegister();
769   Register context = WriteBarrierDescriptor::ObjectRegister();
770   DCHECK(!AreAliased(value, context, kInterpreterAccumulatorRegister));
771   __ Move(value, kInterpreterAccumulatorRegister);
772   __ LoadContext(context);
773   __ StoreTaggedFieldWithWriteBarrier(
774       context, Context::OffsetOfElementAt(Index(0)), value);
775 }
776 
VisitLdaLookupSlot()777 void BaselineCompiler::VisitLdaLookupSlot() {
778   CallRuntime(Runtime::kLoadLookupSlot, Constant<Name>(0));
779 }
780 
VisitLdaLookupContextSlot()781 void BaselineCompiler::VisitLdaLookupContextSlot() {
782   CallBuiltin<Builtin::kLookupContextBaseline>(
783       Constant<Name>(0), UintAsTagged(2), IndexAsTagged(1));
784 }
785 
VisitLdaLookupGlobalSlot()786 void BaselineCompiler::VisitLdaLookupGlobalSlot() {
787   CallBuiltin<Builtin::kLookupGlobalICBaseline>(
788       Constant<Name>(0), UintAsTagged(2), IndexAsTagged(1));
789 }
790 
VisitLdaLookupSlotInsideTypeof()791 void BaselineCompiler::VisitLdaLookupSlotInsideTypeof() {
792   CallRuntime(Runtime::kLoadLookupSlotInsideTypeof, Constant<Name>(0));
793 }
794 
VisitLdaLookupContextSlotInsideTypeof()795 void BaselineCompiler::VisitLdaLookupContextSlotInsideTypeof() {
796   CallBuiltin<Builtin::kLookupContextInsideTypeofBaseline>(
797       Constant<Name>(0), UintAsTagged(2), IndexAsTagged(1));
798 }
799 
VisitLdaLookupGlobalSlotInsideTypeof()800 void BaselineCompiler::VisitLdaLookupGlobalSlotInsideTypeof() {
801   CallBuiltin<Builtin::kLookupGlobalICInsideTypeofBaseline>(
802       Constant<Name>(0), UintAsTagged(2), IndexAsTagged(1));
803 }
804 
VisitStaLookupSlot()805 void BaselineCompiler::VisitStaLookupSlot() {
806   uint32_t flags = Flag(1);
807   Runtime::FunctionId function_id;
808   if (flags & interpreter::StoreLookupSlotFlags::LanguageModeBit::kMask) {
809     function_id = Runtime::kStoreLookupSlot_Strict;
810   } else if (flags &
811              interpreter::StoreLookupSlotFlags::LookupHoistingModeBit::kMask) {
812     function_id = Runtime::kStoreLookupSlot_SloppyHoisting;
813   } else {
814     function_id = Runtime::kStoreLookupSlot_Sloppy;
815   }
816   CallRuntime(function_id, Constant<Name>(0),    // name
817               kInterpreterAccumulatorRegister);  // value
818 }
819 
VisitLdar()820 void BaselineCompiler::VisitLdar() {
821   LoadRegister(kInterpreterAccumulatorRegister, 0);
822 }
823 
VisitStar()824 void BaselineCompiler::VisitStar() {
825   StoreRegister(0, kInterpreterAccumulatorRegister);
826 }
827 
828 #define SHORT_STAR_VISITOR(Name, ...)                                         \
829   void BaselineCompiler::Visit##Name() {                                      \
830     __ StoreRegister(                                                         \
831         interpreter::Register::FromShortStar(interpreter::Bytecode::k##Name), \
832         kInterpreterAccumulatorRegister);                                     \
833   }
SHORT_STAR_BYTECODE_LIST(SHORT_STAR_VISITOR)834 SHORT_STAR_BYTECODE_LIST(SHORT_STAR_VISITOR)
835 #undef SHORT_STAR_VISITOR
836 
837 void BaselineCompiler::VisitMov() {
838   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
839   Register scratch = scratch_scope.AcquireScratch();
840   LoadRegister(scratch, 0);
841   StoreRegister(1, scratch);
842 }
843 
VisitGetNamedProperty()844 void BaselineCompiler::VisitGetNamedProperty() {
845   CallBuiltin<Builtin::kLoadICBaseline>(RegisterOperand(0),  // object
846                                         Constant<Name>(1),   // name
847                                         IndexAsTagged(2));   // slot
848 }
849 
VisitGetNamedPropertyFromSuper()850 void BaselineCompiler::VisitGetNamedPropertyFromSuper() {
851   __ LoadPrototype(
852       LoadWithReceiverAndVectorDescriptor::LookupStartObjectRegister(),
853       kInterpreterAccumulatorRegister);
854 
855   CallBuiltin<Builtin::kLoadSuperICBaseline>(
856       RegisterOperand(0),  // object
857       LoadWithReceiverAndVectorDescriptor::
858           LookupStartObjectRegister(),  // lookup start
859       Constant<Name>(1),                // name
860       IndexAsTagged(2));                // slot
861 }
862 
VisitGetKeyedProperty()863 void BaselineCompiler::VisitGetKeyedProperty() {
864   CallBuiltin<Builtin::kKeyedLoadICBaseline>(
865       RegisterOperand(0),               // object
866       kInterpreterAccumulatorRegister,  // key
867       IndexAsTagged(1));                // slot
868 }
869 
VisitLdaModuleVariable()870 void BaselineCompiler::VisitLdaModuleVariable() {
871   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
872   Register scratch = scratch_scope.AcquireScratch();
873   __ LoadContext(scratch);
874   int depth = Uint(1);
875   for (; depth > 0; --depth) {
876     __ LoadTaggedPointerField(scratch, scratch, Context::kPreviousOffset);
877   }
878   __ LoadTaggedPointerField(scratch, scratch, Context::kExtensionOffset);
879   int cell_index = Int(0);
880   if (cell_index > 0) {
881     __ LoadTaggedPointerField(scratch, scratch,
882                               SourceTextModule::kRegularExportsOffset);
883     // The actual array index is (cell_index - 1).
884     cell_index -= 1;
885   } else {
886     __ LoadTaggedPointerField(scratch, scratch,
887                               SourceTextModule::kRegularImportsOffset);
888     // The actual array index is (-cell_index - 1).
889     cell_index = -cell_index - 1;
890   }
891   __ LoadFixedArrayElement(scratch, scratch, cell_index);
892   __ LoadTaggedAnyField(kInterpreterAccumulatorRegister, scratch,
893                         Cell::kValueOffset);
894 }
895 
VisitStaModuleVariable()896 void BaselineCompiler::VisitStaModuleVariable() {
897   int cell_index = Int(0);
898   if (V8_UNLIKELY(cell_index < 0)) {
899     // Not supported (probably never).
900     CallRuntime(Runtime::kAbort,
901                 Smi::FromInt(static_cast<int>(
902                     AbortReason::kUnsupportedModuleOperation)));
903     __ Trap();
904   }
905   Register value = WriteBarrierDescriptor::ValueRegister();
906   Register scratch = WriteBarrierDescriptor::ObjectRegister();
907   DCHECK(!AreAliased(value, scratch, kInterpreterAccumulatorRegister));
908   __ Move(value, kInterpreterAccumulatorRegister);
909   __ LoadContext(scratch);
910   int depth = Uint(1);
911   for (; depth > 0; --depth) {
912     __ LoadTaggedPointerField(scratch, scratch, Context::kPreviousOffset);
913   }
914   __ LoadTaggedPointerField(scratch, scratch, Context::kExtensionOffset);
915   __ LoadTaggedPointerField(scratch, scratch,
916                             SourceTextModule::kRegularExportsOffset);
917 
918   // The actual array index is (cell_index - 1).
919   cell_index -= 1;
920   __ LoadFixedArrayElement(scratch, scratch, cell_index);
921   __ StoreTaggedFieldWithWriteBarrier(scratch, Cell::kValueOffset, value);
922 }
923 
VisitSetNamedProperty()924 void BaselineCompiler::VisitSetNamedProperty() {
925   // StoreIC is currently a base class for multiple property store operations
926   // and contains mixed logic for named and keyed, set and define operations,
927   // the paths are controlled by feedback.
928   // TODO(v8:12548): refactor SetNamedIC as a subclass of StoreIC, which can be
929   // called here.
930   CallBuiltin<Builtin::kStoreICBaseline>(
931       RegisterOperand(0),               // object
932       Constant<Name>(1),                // name
933       kInterpreterAccumulatorRegister,  // value
934       IndexAsTagged(2));                // slot
935 }
936 
VisitDefineNamedOwnProperty()937 void BaselineCompiler::VisitDefineNamedOwnProperty() {
938   CallBuiltin<Builtin::kDefineNamedOwnICBaseline>(
939       RegisterOperand(0),               // object
940       Constant<Name>(1),                // name
941       kInterpreterAccumulatorRegister,  // value
942       IndexAsTagged(2));                // slot
943 }
944 
VisitSetKeyedProperty()945 void BaselineCompiler::VisitSetKeyedProperty() {
946   // KeyedStoreIC is currently a base class for multiple keyed property store
947   // operations and contains mixed logic for set and define operations,
948   // the paths are controlled by feedback.
949   // TODO(v8:12548): refactor SetKeyedIC as a subclass of KeyedStoreIC, which
950   // can be called here.
951   CallBuiltin<Builtin::kKeyedStoreICBaseline>(
952       RegisterOperand(0),               // object
953       RegisterOperand(1),               // key
954       kInterpreterAccumulatorRegister,  // value
955       IndexAsTagged(2));                // slot
956 }
957 
VisitDefineKeyedOwnProperty()958 void BaselineCompiler::VisitDefineKeyedOwnProperty() {
959   CallBuiltin<Builtin::kDefineKeyedOwnICBaseline>(
960       RegisterOperand(0),               // object
961       RegisterOperand(1),               // key
962       kInterpreterAccumulatorRegister,  // value
963       IndexAsTagged(2));                // slot
964 }
965 
VisitStaInArrayLiteral()966 void BaselineCompiler::VisitStaInArrayLiteral() {
967   CallBuiltin<Builtin::kStoreInArrayLiteralICBaseline>(
968       RegisterOperand(0),               // object
969       RegisterOperand(1),               // name
970       kInterpreterAccumulatorRegister,  // value
971       IndexAsTagged(2));                // slot
972 }
973 
VisitDefineKeyedOwnPropertyInLiteral()974 void BaselineCompiler::VisitDefineKeyedOwnPropertyInLiteral() {
975   // Here we should save the accumulator, since
976   // DefineKeyedOwnPropertyInLiteral doesn't write the accumulator, but
977   // Runtime::kDefineKeyedOwnPropertyInLiteral returns the value that we got
978   // from the accumulator so this still works.
979   CallRuntime(Runtime::kDefineKeyedOwnPropertyInLiteral,
980               RegisterOperand(0),               // object
981               RegisterOperand(1),               // name
982               kInterpreterAccumulatorRegister,  // value
983               FlagAsSmi(2),                     // flags
984               FeedbackVector(),                 // feedback vector
985               IndexAsTagged(3));                // slot
986 }
987 
VisitCollectTypeProfile()988 void BaselineCompiler::VisitCollectTypeProfile() {
989   SaveAccumulatorScope accumulator_scope(&basm_);
990   CallRuntime(Runtime::kCollectTypeProfile,
991               IntAsSmi(0),                      // position
992               kInterpreterAccumulatorRegister,  // value
993               FeedbackVector());                // feedback vector
994 }
995 
VisitAdd()996 void BaselineCompiler::VisitAdd() {
997   CallBuiltin<Builtin::kAdd_Baseline>(
998       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
999 }
1000 
VisitSub()1001 void BaselineCompiler::VisitSub() {
1002   CallBuiltin<Builtin::kSubtract_Baseline>(
1003       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1004 }
1005 
VisitMul()1006 void BaselineCompiler::VisitMul() {
1007   CallBuiltin<Builtin::kMultiply_Baseline>(
1008       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1009 }
1010 
VisitDiv()1011 void BaselineCompiler::VisitDiv() {
1012   CallBuiltin<Builtin::kDivide_Baseline>(
1013       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1014 }
1015 
VisitMod()1016 void BaselineCompiler::VisitMod() {
1017   CallBuiltin<Builtin::kModulus_Baseline>(
1018       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1019 }
1020 
VisitExp()1021 void BaselineCompiler::VisitExp() {
1022   CallBuiltin<Builtin::kExponentiate_Baseline>(
1023       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1024 }
1025 
VisitBitwiseOr()1026 void BaselineCompiler::VisitBitwiseOr() {
1027   CallBuiltin<Builtin::kBitwiseOr_Baseline>(
1028       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1029 }
1030 
VisitBitwiseXor()1031 void BaselineCompiler::VisitBitwiseXor() {
1032   CallBuiltin<Builtin::kBitwiseXor_Baseline>(
1033       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1034 }
1035 
VisitBitwiseAnd()1036 void BaselineCompiler::VisitBitwiseAnd() {
1037   CallBuiltin<Builtin::kBitwiseAnd_Baseline>(
1038       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1039 }
1040 
VisitShiftLeft()1041 void BaselineCompiler::VisitShiftLeft() {
1042   CallBuiltin<Builtin::kShiftLeft_Baseline>(
1043       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1044 }
1045 
VisitShiftRight()1046 void BaselineCompiler::VisitShiftRight() {
1047   CallBuiltin<Builtin::kShiftRight_Baseline>(
1048       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1049 }
1050 
VisitShiftRightLogical()1051 void BaselineCompiler::VisitShiftRightLogical() {
1052   CallBuiltin<Builtin::kShiftRightLogical_Baseline>(
1053       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1054 }
1055 
VisitAddSmi()1056 void BaselineCompiler::VisitAddSmi() {
1057   CallBuiltin<Builtin::kAddSmi_Baseline>(kInterpreterAccumulatorRegister,
1058                                          IntAsSmi(0), Index(1));
1059 }
1060 
VisitSubSmi()1061 void BaselineCompiler::VisitSubSmi() {
1062   CallBuiltin<Builtin::kSubtractSmi_Baseline>(kInterpreterAccumulatorRegister,
1063                                               IntAsSmi(0), Index(1));
1064 }
1065 
VisitMulSmi()1066 void BaselineCompiler::VisitMulSmi() {
1067   CallBuiltin<Builtin::kMultiplySmi_Baseline>(kInterpreterAccumulatorRegister,
1068                                               IntAsSmi(0), Index(1));
1069 }
1070 
VisitDivSmi()1071 void BaselineCompiler::VisitDivSmi() {
1072   CallBuiltin<Builtin::kDivideSmi_Baseline>(kInterpreterAccumulatorRegister,
1073                                             IntAsSmi(0), Index(1));
1074 }
1075 
VisitModSmi()1076 void BaselineCompiler::VisitModSmi() {
1077   CallBuiltin<Builtin::kModulusSmi_Baseline>(kInterpreterAccumulatorRegister,
1078                                              IntAsSmi(0), Index(1));
1079 }
1080 
VisitExpSmi()1081 void BaselineCompiler::VisitExpSmi() {
1082   CallBuiltin<Builtin::kExponentiateSmi_Baseline>(
1083       kInterpreterAccumulatorRegister, IntAsSmi(0), Index(1));
1084 }
1085 
VisitBitwiseOrSmi()1086 void BaselineCompiler::VisitBitwiseOrSmi() {
1087   CallBuiltin<Builtin::kBitwiseOrSmi_Baseline>(kInterpreterAccumulatorRegister,
1088                                                IntAsSmi(0), Index(1));
1089 }
1090 
VisitBitwiseXorSmi()1091 void BaselineCompiler::VisitBitwiseXorSmi() {
1092   CallBuiltin<Builtin::kBitwiseXorSmi_Baseline>(kInterpreterAccumulatorRegister,
1093                                                 IntAsSmi(0), Index(1));
1094 }
1095 
VisitBitwiseAndSmi()1096 void BaselineCompiler::VisitBitwiseAndSmi() {
1097   CallBuiltin<Builtin::kBitwiseAndSmi_Baseline>(kInterpreterAccumulatorRegister,
1098                                                 IntAsSmi(0), Index(1));
1099 }
1100 
VisitShiftLeftSmi()1101 void BaselineCompiler::VisitShiftLeftSmi() {
1102   CallBuiltin<Builtin::kShiftLeftSmi_Baseline>(kInterpreterAccumulatorRegister,
1103                                                IntAsSmi(0), Index(1));
1104 }
1105 
VisitShiftRightSmi()1106 void BaselineCompiler::VisitShiftRightSmi() {
1107   CallBuiltin<Builtin::kShiftRightSmi_Baseline>(kInterpreterAccumulatorRegister,
1108                                                 IntAsSmi(0), Index(1));
1109 }
1110 
VisitShiftRightLogicalSmi()1111 void BaselineCompiler::VisitShiftRightLogicalSmi() {
1112   CallBuiltin<Builtin::kShiftRightLogicalSmi_Baseline>(
1113       kInterpreterAccumulatorRegister, IntAsSmi(0), Index(1));
1114 }
1115 
VisitInc()1116 void BaselineCompiler::VisitInc() {
1117   CallBuiltin<Builtin::kIncrement_Baseline>(kInterpreterAccumulatorRegister,
1118                                             Index(0));
1119 }
1120 
VisitDec()1121 void BaselineCompiler::VisitDec() {
1122   CallBuiltin<Builtin::kDecrement_Baseline>(kInterpreterAccumulatorRegister,
1123                                             Index(0));
1124 }
1125 
VisitNegate()1126 void BaselineCompiler::VisitNegate() {
1127   CallBuiltin<Builtin::kNegate_Baseline>(kInterpreterAccumulatorRegister,
1128                                          Index(0));
1129 }
1130 
VisitBitwiseNot()1131 void BaselineCompiler::VisitBitwiseNot() {
1132   CallBuiltin<Builtin::kBitwiseNot_Baseline>(kInterpreterAccumulatorRegister,
1133                                              Index(0));
1134 }
1135 
VisitToBooleanLogicalNot()1136 void BaselineCompiler::VisitToBooleanLogicalNot() {
1137   SelectBooleanConstant(kInterpreterAccumulatorRegister,
1138                         [&](Label* if_true, Label::Distance distance) {
1139                           JumpIfToBoolean(false, if_true, distance);
1140                         });
1141 }
1142 
VisitLogicalNot()1143 void BaselineCompiler::VisitLogicalNot() {
1144   SelectBooleanConstant(kInterpreterAccumulatorRegister,
1145                         [&](Label* if_true, Label::Distance distance) {
1146                           __ JumpIfRoot(kInterpreterAccumulatorRegister,
1147                                         RootIndex::kFalseValue, if_true,
1148                                         distance);
1149                         });
1150 }
1151 
VisitTypeOf()1152 void BaselineCompiler::VisitTypeOf() {
1153   CallBuiltin<Builtin::kTypeof>(kInterpreterAccumulatorRegister);
1154 }
1155 
VisitDeletePropertyStrict()1156 void BaselineCompiler::VisitDeletePropertyStrict() {
1157   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
1158   Register scratch = scratch_scope.AcquireScratch();
1159   __ Move(scratch, kInterpreterAccumulatorRegister);
1160   CallBuiltin<Builtin::kDeleteProperty>(RegisterOperand(0), scratch,
1161                                         Smi::FromEnum(LanguageMode::kStrict));
1162 }
1163 
VisitDeletePropertySloppy()1164 void BaselineCompiler::VisitDeletePropertySloppy() {
1165   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
1166   Register scratch = scratch_scope.AcquireScratch();
1167   __ Move(scratch, kInterpreterAccumulatorRegister);
1168   CallBuiltin<Builtin::kDeleteProperty>(RegisterOperand(0), scratch,
1169                                         Smi::FromEnum(LanguageMode::kSloppy));
1170 }
1171 
VisitGetSuperConstructor()1172 void BaselineCompiler::VisitGetSuperConstructor() {
1173   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
1174   Register prototype = scratch_scope.AcquireScratch();
1175   __ LoadPrototype(prototype, kInterpreterAccumulatorRegister);
1176   StoreRegister(0, prototype);
1177 }
1178 
1179 namespace {
ConvertReceiverModeToCompactBuiltin(ConvertReceiverMode mode)1180 constexpr Builtin ConvertReceiverModeToCompactBuiltin(
1181     ConvertReceiverMode mode) {
1182   switch (mode) {
1183     case ConvertReceiverMode::kAny:
1184       return Builtin::kCall_ReceiverIsAny_Baseline_Compact;
1185     case ConvertReceiverMode::kNullOrUndefined:
1186       return Builtin::kCall_ReceiverIsNullOrUndefined_Baseline_Compact;
1187     case ConvertReceiverMode::kNotNullOrUndefined:
1188       return Builtin::kCall_ReceiverIsNotNullOrUndefined_Baseline_Compact;
1189   }
1190 }
ConvertReceiverModeToBuiltin(ConvertReceiverMode mode)1191 constexpr Builtin ConvertReceiverModeToBuiltin(ConvertReceiverMode mode) {
1192   switch (mode) {
1193     case ConvertReceiverMode::kAny:
1194       return Builtin::kCall_ReceiverIsAny_Baseline;
1195     case ConvertReceiverMode::kNullOrUndefined:
1196       return Builtin::kCall_ReceiverIsNullOrUndefined_Baseline;
1197     case ConvertReceiverMode::kNotNullOrUndefined:
1198       return Builtin::kCall_ReceiverIsNotNullOrUndefined_Baseline;
1199   }
1200 }
1201 }  // namespace
1202 
1203 template <ConvertReceiverMode kMode, typename... Args>
BuildCall(uint32_t slot,uint32_t arg_count,Args...args)1204 void BaselineCompiler::BuildCall(uint32_t slot, uint32_t arg_count,
1205                                  Args... args) {
1206   uint32_t bitfield;
1207   if (CallTrampoline_Baseline_CompactDescriptor::EncodeBitField(arg_count, slot,
1208                                                                 &bitfield)) {
1209     CallBuiltin<ConvertReceiverModeToCompactBuiltin(kMode)>(
1210         RegisterOperand(0),  // kFunction
1211         bitfield,            // kActualArgumentsCount | kSlot
1212         args...);            // Arguments
1213   } else {
1214     CallBuiltin<ConvertReceiverModeToBuiltin(kMode)>(
1215         RegisterOperand(0),  // kFunction
1216         arg_count,           // kActualArgumentsCount
1217         slot,                // kSlot
1218         args...);            // Arguments
1219   }
1220 }
1221 
VisitCallAnyReceiver()1222 void BaselineCompiler::VisitCallAnyReceiver() {
1223   interpreter::RegisterList args = iterator().GetRegisterListOperand(1);
1224   uint32_t arg_count = args.register_count();
1225   BuildCall<ConvertReceiverMode::kAny>(Index(3), arg_count, args);
1226 }
1227 
VisitCallProperty()1228 void BaselineCompiler::VisitCallProperty() {
1229   interpreter::RegisterList args = iterator().GetRegisterListOperand(1);
1230   uint32_t arg_count = args.register_count();
1231   BuildCall<ConvertReceiverMode::kNotNullOrUndefined>(Index(3), arg_count,
1232                                                       args);
1233 }
1234 
VisitCallProperty0()1235 void BaselineCompiler::VisitCallProperty0() {
1236   BuildCall<ConvertReceiverMode::kNotNullOrUndefined>(
1237       Index(2), JSParameterCount(0), RegisterOperand(1));
1238 }
1239 
VisitCallProperty1()1240 void BaselineCompiler::VisitCallProperty1() {
1241   BuildCall<ConvertReceiverMode::kNotNullOrUndefined>(
1242       Index(3), JSParameterCount(1), RegisterOperand(1), RegisterOperand(2));
1243 }
1244 
VisitCallProperty2()1245 void BaselineCompiler::VisitCallProperty2() {
1246   BuildCall<ConvertReceiverMode::kNotNullOrUndefined>(
1247       Index(4), JSParameterCount(2), RegisterOperand(1), RegisterOperand(2),
1248       RegisterOperand(3));
1249 }
1250 
VisitCallUndefinedReceiver()1251 void BaselineCompiler::VisitCallUndefinedReceiver() {
1252   interpreter::RegisterList args = iterator().GetRegisterListOperand(1);
1253   uint32_t arg_count = JSParameterCount(args.register_count());
1254   BuildCall<ConvertReceiverMode::kNullOrUndefined>(
1255       Index(3), arg_count, RootIndex::kUndefinedValue, args);
1256 }
1257 
VisitCallUndefinedReceiver0()1258 void BaselineCompiler::VisitCallUndefinedReceiver0() {
1259   BuildCall<ConvertReceiverMode::kNullOrUndefined>(
1260       Index(1), JSParameterCount(0), RootIndex::kUndefinedValue);
1261 }
1262 
VisitCallUndefinedReceiver1()1263 void BaselineCompiler::VisitCallUndefinedReceiver1() {
1264   BuildCall<ConvertReceiverMode::kNullOrUndefined>(
1265       Index(2), JSParameterCount(1), RootIndex::kUndefinedValue,
1266       RegisterOperand(1));
1267 }
1268 
VisitCallUndefinedReceiver2()1269 void BaselineCompiler::VisitCallUndefinedReceiver2() {
1270   BuildCall<ConvertReceiverMode::kNullOrUndefined>(
1271       Index(3), JSParameterCount(2), RootIndex::kUndefinedValue,
1272       RegisterOperand(1), RegisterOperand(2));
1273 }
1274 
VisitCallWithSpread()1275 void BaselineCompiler::VisitCallWithSpread() {
1276   interpreter::RegisterList args = iterator().GetRegisterListOperand(1);
1277 
1278   // Do not push the spread argument
1279   interpreter::Register spread_register = args.last_register();
1280   args = args.Truncate(args.register_count() - 1);
1281 
1282   uint32_t arg_count = args.register_count();
1283 
1284   CallBuiltin<Builtin::kCallWithSpread_Baseline>(
1285       RegisterOperand(0),  // kFunction
1286       arg_count,           // kActualArgumentsCount
1287       spread_register,     // kSpread
1288       Index(3),            // kSlot
1289       args);
1290 }
1291 
VisitCallRuntime()1292 void BaselineCompiler::VisitCallRuntime() {
1293   CallRuntime(iterator().GetRuntimeIdOperand(0),
1294               iterator().GetRegisterListOperand(1));
1295 }
1296 
VisitCallRuntimeForPair()1297 void BaselineCompiler::VisitCallRuntimeForPair() {
1298   SaveAccumulatorScope accumulator_scope(&basm_);
1299   CallRuntime(iterator().GetRuntimeIdOperand(0),
1300               iterator().GetRegisterListOperand(1));
1301   StoreRegisterPair(3, kReturnRegister0, kReturnRegister1);
1302 }
1303 
VisitCallJSRuntime()1304 void BaselineCompiler::VisitCallJSRuntime() {
1305   interpreter::RegisterList args = iterator().GetRegisterListOperand(1);
1306   uint32_t arg_count = JSParameterCount(args.register_count());
1307 
1308   // Load context for LoadNativeContextSlot.
1309   __ LoadContext(kContextRegister);
1310   __ LoadNativeContextSlot(kJavaScriptCallTargetRegister,
1311                            iterator().GetNativeContextIndexOperand(0));
1312   CallBuiltin<Builtin::kCall_ReceiverIsNullOrUndefined>(
1313       kJavaScriptCallTargetRegister,  // kFunction
1314       arg_count,                      // kActualArgumentsCount
1315       RootIndex::kUndefinedValue,     // kReceiver
1316       args);
1317 }
1318 
VisitInvokeIntrinsic()1319 void BaselineCompiler::VisitInvokeIntrinsic() {
1320   Runtime::FunctionId intrinsic_id = iterator().GetIntrinsicIdOperand(0);
1321   interpreter::RegisterList args = iterator().GetRegisterListOperand(1);
1322   switch (intrinsic_id) {
1323 #define CASE(Name, ...)         \
1324   case Runtime::kInline##Name:  \
1325     VisitIntrinsic##Name(args); \
1326     break;
1327     INTRINSICS_LIST(CASE)
1328 #undef CASE
1329 
1330     default:
1331       UNREACHABLE();
1332   }
1333 }
1334 
VisitIntrinsicCopyDataProperties(interpreter::RegisterList args)1335 void BaselineCompiler::VisitIntrinsicCopyDataProperties(
1336     interpreter::RegisterList args) {
1337   CallBuiltin<Builtin::kCopyDataProperties>(args);
1338 }
1339 
1340 void BaselineCompiler::
VisitIntrinsicCopyDataPropertiesWithExcludedPropertiesOnStack(interpreter::RegisterList args)1341     VisitIntrinsicCopyDataPropertiesWithExcludedPropertiesOnStack(
1342         interpreter::RegisterList args) {
1343   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
1344   Register rscratch = scratch_scope.AcquireScratch();
1345   // Use an offset from args[0] instead of args[1] to pass a valid "end of"
1346   // pointer in the case where args.register_count() == 1.
1347   basm_.RegisterFrameAddress(interpreter::Register(args[0].index() + 1),
1348                              rscratch);
1349   CallBuiltin<Builtin::kCopyDataPropertiesWithExcludedPropertiesOnStack>(
1350       args[0], args.register_count() - 1, rscratch);
1351 }
1352 
VisitIntrinsicCreateIterResultObject(interpreter::RegisterList args)1353 void BaselineCompiler::VisitIntrinsicCreateIterResultObject(
1354     interpreter::RegisterList args) {
1355   CallBuiltin<Builtin::kCreateIterResultObject>(args);
1356 }
1357 
VisitIntrinsicCreateAsyncFromSyncIterator(interpreter::RegisterList args)1358 void BaselineCompiler::VisitIntrinsicCreateAsyncFromSyncIterator(
1359     interpreter::RegisterList args) {
1360   CallBuiltin<Builtin::kCreateAsyncFromSyncIteratorBaseline>(args[0]);
1361 }
1362 
VisitIntrinsicCreateJSGeneratorObject(interpreter::RegisterList args)1363 void BaselineCompiler::VisitIntrinsicCreateJSGeneratorObject(
1364     interpreter::RegisterList args) {
1365   CallBuiltin<Builtin::kCreateGeneratorObject>(args);
1366 }
1367 
VisitIntrinsicGeneratorGetResumeMode(interpreter::RegisterList args)1368 void BaselineCompiler::VisitIntrinsicGeneratorGetResumeMode(
1369     interpreter::RegisterList args) {
1370   __ LoadRegister(kInterpreterAccumulatorRegister, args[0]);
1371   __ LoadTaggedAnyField(kInterpreterAccumulatorRegister,
1372                         kInterpreterAccumulatorRegister,
1373                         JSGeneratorObject::kResumeModeOffset);
1374 }
1375 
VisitIntrinsicGeneratorClose(interpreter::RegisterList args)1376 void BaselineCompiler::VisitIntrinsicGeneratorClose(
1377     interpreter::RegisterList args) {
1378   __ LoadRegister(kInterpreterAccumulatorRegister, args[0]);
1379   __ StoreTaggedSignedField(kInterpreterAccumulatorRegister,
1380                             JSGeneratorObject::kContinuationOffset,
1381                             Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
1382   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1383 }
1384 
VisitIntrinsicGetImportMetaObject(interpreter::RegisterList args)1385 void BaselineCompiler::VisitIntrinsicGetImportMetaObject(
1386     interpreter::RegisterList args) {
1387   CallBuiltin<Builtin::kGetImportMetaObjectBaseline>();
1388 }
1389 
VisitIntrinsicAsyncFunctionAwaitCaught(interpreter::RegisterList args)1390 void BaselineCompiler::VisitIntrinsicAsyncFunctionAwaitCaught(
1391     interpreter::RegisterList args) {
1392   CallBuiltin<Builtin::kAsyncFunctionAwaitCaught>(args);
1393 }
1394 
VisitIntrinsicAsyncFunctionAwaitUncaught(interpreter::RegisterList args)1395 void BaselineCompiler::VisitIntrinsicAsyncFunctionAwaitUncaught(
1396     interpreter::RegisterList args) {
1397   CallBuiltin<Builtin::kAsyncFunctionAwaitUncaught>(args);
1398 }
1399 
VisitIntrinsicAsyncFunctionEnter(interpreter::RegisterList args)1400 void BaselineCompiler::VisitIntrinsicAsyncFunctionEnter(
1401     interpreter::RegisterList args) {
1402   CallBuiltin<Builtin::kAsyncFunctionEnter>(args);
1403 }
1404 
VisitIntrinsicAsyncFunctionReject(interpreter::RegisterList args)1405 void BaselineCompiler::VisitIntrinsicAsyncFunctionReject(
1406     interpreter::RegisterList args) {
1407   CallBuiltin<Builtin::kAsyncFunctionReject>(args);
1408 }
1409 
VisitIntrinsicAsyncFunctionResolve(interpreter::RegisterList args)1410 void BaselineCompiler::VisitIntrinsicAsyncFunctionResolve(
1411     interpreter::RegisterList args) {
1412   CallBuiltin<Builtin::kAsyncFunctionResolve>(args);
1413 }
1414 
VisitIntrinsicAsyncGeneratorAwaitCaught(interpreter::RegisterList args)1415 void BaselineCompiler::VisitIntrinsicAsyncGeneratorAwaitCaught(
1416     interpreter::RegisterList args) {
1417   CallBuiltin<Builtin::kAsyncGeneratorAwaitCaught>(args);
1418 }
1419 
VisitIntrinsicAsyncGeneratorAwaitUncaught(interpreter::RegisterList args)1420 void BaselineCompiler::VisitIntrinsicAsyncGeneratorAwaitUncaught(
1421     interpreter::RegisterList args) {
1422   CallBuiltin<Builtin::kAsyncGeneratorAwaitUncaught>(args);
1423 }
1424 
VisitIntrinsicAsyncGeneratorReject(interpreter::RegisterList args)1425 void BaselineCompiler::VisitIntrinsicAsyncGeneratorReject(
1426     interpreter::RegisterList args) {
1427   CallBuiltin<Builtin::kAsyncGeneratorReject>(args);
1428 }
1429 
VisitIntrinsicAsyncGeneratorResolve(interpreter::RegisterList args)1430 void BaselineCompiler::VisitIntrinsicAsyncGeneratorResolve(
1431     interpreter::RegisterList args) {
1432   CallBuiltin<Builtin::kAsyncGeneratorResolve>(args);
1433 }
1434 
VisitIntrinsicAsyncGeneratorYield(interpreter::RegisterList args)1435 void BaselineCompiler::VisitIntrinsicAsyncGeneratorYield(
1436     interpreter::RegisterList args) {
1437   CallBuiltin<Builtin::kAsyncGeneratorYield>(args);
1438 }
1439 
VisitConstruct()1440 void BaselineCompiler::VisitConstruct() {
1441   interpreter::RegisterList args = iterator().GetRegisterListOperand(1);
1442   uint32_t arg_count = JSParameterCount(args.register_count());
1443   CallBuiltin<Builtin::kConstruct_Baseline>(
1444       RegisterOperand(0),               // kFunction
1445       kInterpreterAccumulatorRegister,  // kNewTarget
1446       arg_count,                        // kActualArgumentsCount
1447       Index(3),                         // kSlot
1448       RootIndex::kUndefinedValue,       // kReceiver
1449       args);
1450 }
1451 
VisitConstructWithSpread()1452 void BaselineCompiler::VisitConstructWithSpread() {
1453   interpreter::RegisterList args = iterator().GetRegisterListOperand(1);
1454 
1455   // Do not push the spread argument
1456   interpreter::Register spread_register = args.last_register();
1457   args = args.Truncate(args.register_count() - 1);
1458 
1459   uint32_t arg_count = JSParameterCount(args.register_count());
1460 
1461   using Descriptor =
1462       CallInterfaceDescriptorFor<Builtin::kConstructWithSpread_Baseline>::type;
1463   Register new_target =
1464       Descriptor::GetRegisterParameter(Descriptor::kNewTarget);
1465   __ Move(new_target, kInterpreterAccumulatorRegister);
1466 
1467   CallBuiltin<Builtin::kConstructWithSpread_Baseline>(
1468       RegisterOperand(0),          // kFunction
1469       new_target,                  // kNewTarget
1470       arg_count,                   // kActualArgumentsCount
1471       Index(3),                    // kSlot
1472       spread_register,             // kSpread
1473       RootIndex::kUndefinedValue,  // kReceiver
1474       args);
1475 }
1476 
VisitTestEqual()1477 void BaselineCompiler::VisitTestEqual() {
1478   CallBuiltin<Builtin::kEqual_Baseline>(
1479       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1480 }
1481 
VisitTestEqualStrict()1482 void BaselineCompiler::VisitTestEqualStrict() {
1483   CallBuiltin<Builtin::kStrictEqual_Baseline>(
1484       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1485 }
1486 
VisitTestLessThan()1487 void BaselineCompiler::VisitTestLessThan() {
1488   CallBuiltin<Builtin::kLessThan_Baseline>(
1489       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1490 }
1491 
VisitTestGreaterThan()1492 void BaselineCompiler::VisitTestGreaterThan() {
1493   CallBuiltin<Builtin::kGreaterThan_Baseline>(
1494       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1495 }
1496 
VisitTestLessThanOrEqual()1497 void BaselineCompiler::VisitTestLessThanOrEqual() {
1498   CallBuiltin<Builtin::kLessThanOrEqual_Baseline>(
1499       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1500 }
1501 
VisitTestGreaterThanOrEqual()1502 void BaselineCompiler::VisitTestGreaterThanOrEqual() {
1503   CallBuiltin<Builtin::kGreaterThanOrEqual_Baseline>(
1504       RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
1505 }
1506 
VisitTestReferenceEqual()1507 void BaselineCompiler::VisitTestReferenceEqual() {
1508   SelectBooleanConstant(
1509       kInterpreterAccumulatorRegister,
1510       [&](Label* is_true, Label::Distance distance) {
1511         __ JumpIfTagged(Condition::kEqual,
1512                         __ RegisterFrameOperand(RegisterOperand(0)),
1513                         kInterpreterAccumulatorRegister, is_true, distance);
1514       });
1515 }
1516 
VisitTestInstanceOf()1517 void BaselineCompiler::VisitTestInstanceOf() {
1518   using Descriptor =
1519       CallInterfaceDescriptorFor<Builtin::kInstanceOf_Baseline>::type;
1520   Register callable = Descriptor::GetRegisterParameter(Descriptor::kRight);
1521   __ Move(callable, kInterpreterAccumulatorRegister);
1522 
1523   CallBuiltin<Builtin::kInstanceOf_Baseline>(RegisterOperand(0),  // object
1524                                              callable,            // callable
1525                                              Index(1));           // slot
1526 }
1527 
VisitTestIn()1528 void BaselineCompiler::VisitTestIn() {
1529   CallBuiltin<Builtin::kKeyedHasICBaseline>(
1530       kInterpreterAccumulatorRegister,  // object
1531       RegisterOperand(0),               // name
1532       IndexAsTagged(1));                // slot
1533 }
1534 
VisitTestUndetectable()1535 void BaselineCompiler::VisitTestUndetectable() {
1536   Label done, is_smi, not_undetectable;
1537   __ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
1538 
1539   Register map_bit_field = kInterpreterAccumulatorRegister;
1540   __ LoadMap(map_bit_field, kInterpreterAccumulatorRegister);
1541   __ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
1542   __ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask,
1543                    Condition::kZero, &not_undetectable, Label::kNear);
1544 
1545   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
1546   __ Jump(&done, Label::kNear);
1547 
1548   __ Bind(&is_smi);
1549   __ Bind(&not_undetectable);
1550   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kFalseValue);
1551   __ Bind(&done);
1552 }
1553 
VisitTestNull()1554 void BaselineCompiler::VisitTestNull() {
1555   SelectBooleanConstant(kInterpreterAccumulatorRegister,
1556                         [&](Label* is_true, Label::Distance distance) {
1557                           __ JumpIfRoot(kInterpreterAccumulatorRegister,
1558                                         RootIndex::kNullValue, is_true,
1559                                         distance);
1560                         });
1561 }
1562 
VisitTestUndefined()1563 void BaselineCompiler::VisitTestUndefined() {
1564   SelectBooleanConstant(kInterpreterAccumulatorRegister,
1565                         [&](Label* is_true, Label::Distance distance) {
1566                           __ JumpIfRoot(kInterpreterAccumulatorRegister,
1567                                         RootIndex::kUndefinedValue, is_true,
1568                                         distance);
1569                         });
1570 }
1571 
VisitTestTypeOf()1572 void BaselineCompiler::VisitTestTypeOf() {
1573   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
1574 
1575   auto literal_flag =
1576       static_cast<interpreter::TestTypeOfFlags::LiteralFlag>(Flag(0));
1577 
1578   Label done;
1579   switch (literal_flag) {
1580     case interpreter::TestTypeOfFlags::LiteralFlag::kNumber: {
1581       Label is_smi, is_heap_number;
1582       __ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
1583       __ JumpIfObjectType(Condition::kEqual, kInterpreterAccumulatorRegister,
1584                           HEAP_NUMBER_TYPE, scratch_scope.AcquireScratch(),
1585                           &is_heap_number, Label::kNear);
1586 
1587       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kFalseValue);
1588       __ Jump(&done, Label::kNear);
1589 
1590       __ Bind(&is_smi);
1591       __ Bind(&is_heap_number);
1592       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
1593       break;
1594     }
1595     case interpreter::TestTypeOfFlags::LiteralFlag::kString: {
1596       Label is_smi, bad_instance_type;
1597       __ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
1598       STATIC_ASSERT(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
1599       __ JumpIfObjectType(Condition::kGreaterThanEqual,
1600                           kInterpreterAccumulatorRegister, FIRST_NONSTRING_TYPE,
1601                           scratch_scope.AcquireScratch(), &bad_instance_type,
1602                           Label::kNear);
1603 
1604       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
1605       __ Jump(&done, Label::kNear);
1606 
1607       __ Bind(&is_smi);
1608       __ Bind(&bad_instance_type);
1609       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kFalseValue);
1610       break;
1611     }
1612     case interpreter::TestTypeOfFlags::LiteralFlag::kSymbol: {
1613       Label is_smi, bad_instance_type;
1614       __ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
1615       __ JumpIfObjectType(Condition::kNotEqual, kInterpreterAccumulatorRegister,
1616                           SYMBOL_TYPE, scratch_scope.AcquireScratch(),
1617                           &bad_instance_type, Label::kNear);
1618 
1619       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
1620       __ Jump(&done, Label::kNear);
1621 
1622       __ Bind(&is_smi);
1623       __ Bind(&bad_instance_type);
1624       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kFalseValue);
1625       break;
1626     }
1627     case interpreter::TestTypeOfFlags::LiteralFlag::kBoolean: {
1628       Label is_true, is_false;
1629       __ JumpIfRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue,
1630                     &is_true, Label::kNear);
1631       __ JumpIfRoot(kInterpreterAccumulatorRegister, RootIndex::kFalseValue,
1632                     &is_false, Label::kNear);
1633 
1634       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kFalseValue);
1635       __ Jump(&done, Label::kNear);
1636 
1637       __ Bind(&is_true);
1638       __ Bind(&is_false);
1639       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
1640       break;
1641     }
1642     case interpreter::TestTypeOfFlags::LiteralFlag::kBigInt: {
1643       Label is_smi, bad_instance_type;
1644       __ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
1645       __ JumpIfObjectType(Condition::kNotEqual, kInterpreterAccumulatorRegister,
1646                           BIGINT_TYPE, scratch_scope.AcquireScratch(),
1647                           &bad_instance_type, Label::kNear);
1648 
1649       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
1650       __ Jump(&done, Label::kNear);
1651 
1652       __ Bind(&is_smi);
1653       __ Bind(&bad_instance_type);
1654       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kFalseValue);
1655       break;
1656     }
1657     case interpreter::TestTypeOfFlags::LiteralFlag::kUndefined: {
1658       Label is_smi, is_null, not_undetectable;
1659       __ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
1660 
1661       // null is undetectable, so test it explicitly, and return false.
1662       __ JumpIfRoot(kInterpreterAccumulatorRegister, RootIndex::kNullValue,
1663                     &is_null, Label::kNear);
1664 
1665       // All other undetectable maps are typeof undefined.
1666       Register map_bit_field = kInterpreterAccumulatorRegister;
1667       __ LoadMap(map_bit_field, kInterpreterAccumulatorRegister);
1668       __ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
1669       __ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask,
1670                        Condition::kZero, &not_undetectable, Label::kNear);
1671 
1672       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
1673       __ Jump(&done, Label::kNear);
1674 
1675       __ Bind(&is_smi);
1676       __ Bind(&is_null);
1677       __ Bind(&not_undetectable);
1678       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kFalseValue);
1679       break;
1680     }
1681     case interpreter::TestTypeOfFlags::LiteralFlag::kFunction: {
1682       Label is_smi, not_callable, undetectable;
1683       __ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
1684 
1685       // Check if the map is callable but not undetectable.
1686       Register map_bit_field = kInterpreterAccumulatorRegister;
1687       __ LoadMap(map_bit_field, kInterpreterAccumulatorRegister);
1688       __ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
1689       __ TestAndBranch(map_bit_field, Map::Bits1::IsCallableBit::kMask,
1690                        Condition::kZero, &not_callable, Label::kNear);
1691       __ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask,
1692                        Condition::kNotZero, &undetectable, Label::kNear);
1693 
1694       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
1695       __ Jump(&done, Label::kNear);
1696 
1697       __ Bind(&is_smi);
1698       __ Bind(&not_callable);
1699       __ Bind(&undetectable);
1700       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kFalseValue);
1701       break;
1702     }
1703     case interpreter::TestTypeOfFlags::LiteralFlag::kObject: {
1704       Label is_smi, is_null, bad_instance_type, undetectable_or_callable;
1705       __ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
1706 
1707       // If the object is null, return true.
1708       __ JumpIfRoot(kInterpreterAccumulatorRegister, RootIndex::kNullValue,
1709                     &is_null, Label::kNear);
1710 
1711       // If the object's instance type isn't within the range, return false.
1712       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1713       Register map = scratch_scope.AcquireScratch();
1714       __ JumpIfObjectType(Condition::kLessThan, kInterpreterAccumulatorRegister,
1715                           FIRST_JS_RECEIVER_TYPE, map, &bad_instance_type,
1716                           Label::kNear);
1717 
1718       // If the map is undetectable or callable, return false.
1719       Register map_bit_field = kInterpreterAccumulatorRegister;
1720       __ LoadWord8Field(map_bit_field, map, Map::kBitFieldOffset);
1721       __ TestAndBranch(map_bit_field,
1722                        Map::Bits1::IsUndetectableBit::kMask |
1723                            Map::Bits1::IsCallableBit::kMask,
1724                        Condition::kNotZero, &undetectable_or_callable,
1725                        Label::kNear);
1726 
1727       __ Bind(&is_null);
1728       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
1729       __ Jump(&done, Label::kNear);
1730 
1731       __ Bind(&is_smi);
1732       __ Bind(&bad_instance_type);
1733       __ Bind(&undetectable_or_callable);
1734       __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kFalseValue);
1735       break;
1736     }
1737     case interpreter::TestTypeOfFlags::LiteralFlag::kOther:
1738     default:
1739       UNREACHABLE();
1740   }
1741   __ Bind(&done);
1742 }
1743 
VisitToName()1744 void BaselineCompiler::VisitToName() {
1745   SaveAccumulatorScope save_accumulator(&basm_);
1746   CallBuiltin<Builtin::kToName>(kInterpreterAccumulatorRegister);
1747   StoreRegister(0, kInterpreterAccumulatorRegister);
1748 }
1749 
VisitToNumber()1750 void BaselineCompiler::VisitToNumber() {
1751   CallBuiltin<Builtin::kToNumber_Baseline>(kInterpreterAccumulatorRegister,
1752                                            Index(0));
1753 }
1754 
VisitToNumeric()1755 void BaselineCompiler::VisitToNumeric() {
1756   CallBuiltin<Builtin::kToNumeric_Baseline>(kInterpreterAccumulatorRegister,
1757                                             Index(0));
1758 }
1759 
VisitToObject()1760 void BaselineCompiler::VisitToObject() {
1761   SaveAccumulatorScope save_accumulator(&basm_);
1762   CallBuiltin<Builtin::kToObject>(kInterpreterAccumulatorRegister);
1763   StoreRegister(0, kInterpreterAccumulatorRegister);
1764 }
1765 
VisitToString()1766 void BaselineCompiler::VisitToString() {
1767   CallBuiltin<Builtin::kToString>(kInterpreterAccumulatorRegister);
1768 }
1769 
VisitCreateRegExpLiteral()1770 void BaselineCompiler::VisitCreateRegExpLiteral() {
1771   CallBuiltin<Builtin::kCreateRegExpLiteral>(
1772       FeedbackVector(),         // feedback vector
1773       IndexAsTagged(1),         // slot
1774       Constant<HeapObject>(0),  // pattern
1775       FlagAsSmi(2));            // flags
1776 }
1777 
VisitCreateArrayLiteral()1778 void BaselineCompiler::VisitCreateArrayLiteral() {
1779   uint32_t flags = Flag(2);
1780   int32_t flags_raw = static_cast<int32_t>(
1781       interpreter::CreateArrayLiteralFlags::FlagsBits::decode(flags));
1782   if (flags &
1783       interpreter::CreateArrayLiteralFlags::FastCloneSupportedBit::kMask) {
1784     CallBuiltin<Builtin::kCreateShallowArrayLiteral>(
1785         FeedbackVector(),          // feedback vector
1786         IndexAsTagged(1),          // slot
1787         Constant<HeapObject>(0),   // constant elements
1788         Smi::FromInt(flags_raw));  // flags
1789   } else {
1790     CallRuntime(Runtime::kCreateArrayLiteral,
1791                 FeedbackVector(),          // feedback vector
1792                 IndexAsTagged(1),          // slot
1793                 Constant<HeapObject>(0),   // constant elements
1794                 Smi::FromInt(flags_raw));  // flags
1795   }
1796 }
1797 
VisitCreateArrayFromIterable()1798 void BaselineCompiler::VisitCreateArrayFromIterable() {
1799   CallBuiltin<Builtin::kIterableToListWithSymbolLookup>(
1800       kInterpreterAccumulatorRegister);  // iterable
1801 }
1802 
VisitCreateEmptyArrayLiteral()1803 void BaselineCompiler::VisitCreateEmptyArrayLiteral() {
1804   CallBuiltin<Builtin::kCreateEmptyArrayLiteral>(FeedbackVector(),
1805                                                  IndexAsTagged(0));
1806 }
1807 
VisitCreateObjectLiteral()1808 void BaselineCompiler::VisitCreateObjectLiteral() {
1809   uint32_t flags = Flag(2);
1810   int32_t flags_raw = static_cast<int32_t>(
1811       interpreter::CreateObjectLiteralFlags::FlagsBits::decode(flags));
1812   if (flags &
1813       interpreter::CreateObjectLiteralFlags::FastCloneSupportedBit::kMask) {
1814     CallBuiltin<Builtin::kCreateShallowObjectLiteral>(
1815         FeedbackVector(),                           // feedback vector
1816         IndexAsTagged(1),                           // slot
1817         Constant<ObjectBoilerplateDescription>(0),  // boilerplate
1818         Smi::FromInt(flags_raw));                   // flags
1819   } else {
1820     CallRuntime(Runtime::kCreateObjectLiteral,
1821                 FeedbackVector(),                           // feedback vector
1822                 IndexAsTagged(1),                           // slot
1823                 Constant<ObjectBoilerplateDescription>(0),  // boilerplate
1824                 Smi::FromInt(flags_raw));                   // flags
1825   }
1826 }
1827 
VisitCreateEmptyObjectLiteral()1828 void BaselineCompiler::VisitCreateEmptyObjectLiteral() {
1829   CallBuiltin<Builtin::kCreateEmptyLiteralObject>();
1830 }
1831 
VisitCloneObject()1832 void BaselineCompiler::VisitCloneObject() {
1833   uint32_t flags = Flag(1);
1834   int32_t raw_flags =
1835       interpreter::CreateObjectLiteralFlags::FlagsBits::decode(flags);
1836   CallBuiltin<Builtin::kCloneObjectICBaseline>(
1837       RegisterOperand(0),       // source
1838       Smi::FromInt(raw_flags),  // flags
1839       IndexAsTagged(2));        // slot
1840 }
1841 
VisitGetTemplateObject()1842 void BaselineCompiler::VisitGetTemplateObject() {
1843   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
1844   CallBuiltin<Builtin::kGetTemplateObject>(
1845       shared_function_info_,    // shared function info
1846       Constant<HeapObject>(0),  // description
1847       Index(1),                 // slot
1848       FeedbackVector());        // feedback_vector
1849 }
1850 
VisitCreateClosure()1851 void BaselineCompiler::VisitCreateClosure() {
1852   Register feedback_cell =
1853       FastNewClosureBaselineDescriptor::GetRegisterParameter(
1854           FastNewClosureBaselineDescriptor::kFeedbackCell);
1855   LoadClosureFeedbackArray(feedback_cell);
1856   __ LoadFixedArrayElement(feedback_cell, feedback_cell, Index(1));
1857 
1858   uint32_t flags = Flag(2);
1859   if (interpreter::CreateClosureFlags::FastNewClosureBit::decode(flags)) {
1860     CallBuiltin<Builtin::kFastNewClosureBaseline>(
1861         Constant<SharedFunctionInfo>(0), feedback_cell);
1862   } else {
1863     Runtime::FunctionId function_id =
1864         interpreter::CreateClosureFlags::PretenuredBit::decode(flags)
1865             ? Runtime::kNewClosure_Tenured
1866             : Runtime::kNewClosure;
1867     CallRuntime(function_id, Constant<SharedFunctionInfo>(0), feedback_cell);
1868   }
1869 }
1870 
VisitCreateBlockContext()1871 void BaselineCompiler::VisitCreateBlockContext() {
1872   CallRuntime(Runtime::kPushBlockContext, Constant<ScopeInfo>(0));
1873 }
1874 
VisitCreateCatchContext()1875 void BaselineCompiler::VisitCreateCatchContext() {
1876   CallRuntime(Runtime::kPushCatchContext,
1877               RegisterOperand(0),  // exception
1878               Constant<ScopeInfo>(1));
1879 }
1880 
VisitCreateFunctionContext()1881 void BaselineCompiler::VisitCreateFunctionContext() {
1882   Handle<ScopeInfo> info = Constant<ScopeInfo>(0);
1883   uint32_t slot_count = Uint(1);
1884   if (slot_count < static_cast<uint32_t>(
1885                        ConstructorBuiltins::MaximumFunctionContextSlots())) {
1886     DCHECK_EQ(info->scope_type(), ScopeType::FUNCTION_SCOPE);
1887     CallBuiltin<Builtin::kFastNewFunctionContextFunction>(info, slot_count);
1888   } else {
1889     CallRuntime(Runtime::kNewFunctionContext, Constant<ScopeInfo>(0));
1890   }
1891 }
1892 
VisitCreateEvalContext()1893 void BaselineCompiler::VisitCreateEvalContext() {
1894   Handle<ScopeInfo> info = Constant<ScopeInfo>(0);
1895   uint32_t slot_count = Uint(1);
1896   if (slot_count < static_cast<uint32_t>(
1897                        ConstructorBuiltins::MaximumFunctionContextSlots())) {
1898     DCHECK_EQ(info->scope_type(), ScopeType::EVAL_SCOPE);
1899     CallBuiltin<Builtin::kFastNewFunctionContextEval>(info, slot_count);
1900   } else {
1901     CallRuntime(Runtime::kNewFunctionContext, Constant<ScopeInfo>(0));
1902   }
1903 }
1904 
VisitCreateWithContext()1905 void BaselineCompiler::VisitCreateWithContext() {
1906   CallRuntime(Runtime::kPushWithContext,
1907               RegisterOperand(0),  // object
1908               Constant<ScopeInfo>(1));
1909 }
1910 
VisitCreateMappedArguments()1911 void BaselineCompiler::VisitCreateMappedArguments() {
1912   if (shared_function_info_->has_duplicate_parameters()) {
1913     CallRuntime(Runtime::kNewSloppyArguments, __ FunctionOperand());
1914   } else {
1915     CallBuiltin<Builtin::kFastNewSloppyArguments>(__ FunctionOperand());
1916   }
1917 }
1918 
VisitCreateUnmappedArguments()1919 void BaselineCompiler::VisitCreateUnmappedArguments() {
1920   CallBuiltin<Builtin::kFastNewStrictArguments>(__ FunctionOperand());
1921 }
1922 
VisitCreateRestParameter()1923 void BaselineCompiler::VisitCreateRestParameter() {
1924   CallBuiltin<Builtin::kFastNewRestArguments>(__ FunctionOperand());
1925 }
1926 
VisitJumpLoop()1927 void BaselineCompiler::VisitJumpLoop() {
1928   Label osr_not_armed, osr;
1929   {
1930     BaselineAssembler::ScratchRegisterScope scope(&basm_);
1931     Register osr_urgency_and_install_target = scope.AcquireScratch();
1932 
1933     ASM_CODE_COMMENT_STRING(&masm_, "OSR Check Armed");
1934     __ LoadRegister(osr_urgency_and_install_target,
1935                     interpreter::Register::bytecode_array());
1936     __ LoadWord16FieldZeroExtend(
1937         osr_urgency_and_install_target, osr_urgency_and_install_target,
1938         BytecodeArray::kOsrUrgencyAndInstallTargetOffset);
1939     int loop_depth = iterator().GetImmediateOperand(1);
1940     __ JumpIfImmediate(Condition::kUnsignedLessThanEqual,
1941                        osr_urgency_and_install_target, loop_depth,
1942                        &osr_not_armed, Label::kNear);
1943 
1944     // TODO(jgruber): Move the extended checks into the
1945     // BaselineOnStackReplacement builtin.
1946 
1947     // OSR based on urgency, i.e. is the OSR urgency greater than the current
1948     // loop depth?
1949     STATIC_ASSERT(BytecodeArray::OsrUrgencyBits::kShift == 0);
1950     Register scratch2 = scope.AcquireScratch();
1951     __ Word32And(scratch2, osr_urgency_and_install_target,
1952                  BytecodeArray::OsrUrgencyBits::kMask);
1953     __ JumpIfImmediate(Condition::kUnsignedGreaterThan, scratch2, loop_depth,
1954                        &osr, Label::kNear);
1955 
1956     // OSR based on the install target offset, i.e. does the current bytecode
1957     // offset match the install target offset?
1958     static constexpr int kShift = BytecodeArray::OsrInstallTargetBits::kShift;
1959     static constexpr int kMask = BytecodeArray::OsrInstallTargetBits::kMask;
1960     const int encoded_current_offset =
1961         BytecodeArray::OsrInstallTargetFor(
1962             BytecodeOffset{iterator().current_offset()})
1963         << kShift;
1964     __ Word32And(scratch2, osr_urgency_and_install_target, kMask);
1965     __ JumpIfImmediate(Condition::kNotEqual, scratch2, encoded_current_offset,
1966                        &osr_not_armed, Label::kNear);
1967   }
1968 
1969   __ Bind(&osr);
1970   CallBuiltin<Builtin::kBaselineOnStackReplacement>();
1971 
1972   __ Bind(&osr_not_armed);
1973   Label* label = &labels_[iterator().GetJumpTargetOffset()]->unlinked;
1974   int weight = iterator().GetRelativeJumpTargetOffset() -
1975                iterator().current_bytecode_size_without_prefix();
1976   // We can pass in the same label twice since it's a back edge and thus already
1977   // bound.
1978   DCHECK(label->is_bound());
1979   UpdateInterruptBudgetAndJumpToLabel(weight, label, label);
1980 }
1981 
VisitJump()1982 void BaselineCompiler::VisitJump() {
1983   UpdateInterruptBudgetAndDoInterpreterJump();
1984 }
1985 
VisitJumpConstant()1986 void BaselineCompiler::VisitJumpConstant() { VisitJump(); }
1987 
VisitJumpIfNullConstant()1988 void BaselineCompiler::VisitJumpIfNullConstant() { VisitJumpIfNull(); }
1989 
VisitJumpIfNotNullConstant()1990 void BaselineCompiler::VisitJumpIfNotNullConstant() { VisitJumpIfNotNull(); }
1991 
VisitJumpIfUndefinedConstant()1992 void BaselineCompiler::VisitJumpIfUndefinedConstant() {
1993   VisitJumpIfUndefined();
1994 }
1995 
VisitJumpIfNotUndefinedConstant()1996 void BaselineCompiler::VisitJumpIfNotUndefinedConstant() {
1997   VisitJumpIfNotUndefined();
1998 }
1999 
VisitJumpIfUndefinedOrNullConstant()2000 void BaselineCompiler::VisitJumpIfUndefinedOrNullConstant() {
2001   VisitJumpIfUndefinedOrNull();
2002 }
2003 
VisitJumpIfTrueConstant()2004 void BaselineCompiler::VisitJumpIfTrueConstant() { VisitJumpIfTrue(); }
2005 
VisitJumpIfFalseConstant()2006 void BaselineCompiler::VisitJumpIfFalseConstant() { VisitJumpIfFalse(); }
2007 
VisitJumpIfJSReceiverConstant()2008 void BaselineCompiler::VisitJumpIfJSReceiverConstant() {
2009   VisitJumpIfJSReceiver();
2010 }
2011 
VisitJumpIfToBooleanTrueConstant()2012 void BaselineCompiler::VisitJumpIfToBooleanTrueConstant() {
2013   VisitJumpIfToBooleanTrue();
2014 }
2015 
VisitJumpIfToBooleanFalseConstant()2016 void BaselineCompiler::VisitJumpIfToBooleanFalseConstant() {
2017   VisitJumpIfToBooleanFalse();
2018 }
2019 
VisitJumpIfToBooleanTrue()2020 void BaselineCompiler::VisitJumpIfToBooleanTrue() {
2021   Label dont_jump;
2022   JumpIfToBoolean(false, &dont_jump, Label::kNear);
2023   UpdateInterruptBudgetAndDoInterpreterJump();
2024   __ Bind(&dont_jump);
2025 }
2026 
VisitJumpIfToBooleanFalse()2027 void BaselineCompiler::VisitJumpIfToBooleanFalse() {
2028   Label dont_jump;
2029   JumpIfToBoolean(true, &dont_jump, Label::kNear);
2030   UpdateInterruptBudgetAndDoInterpreterJump();
2031   __ Bind(&dont_jump);
2032 }
2033 
VisitJumpIfTrue()2034 void BaselineCompiler::VisitJumpIfTrue() {
2035   UpdateInterruptBudgetAndDoInterpreterJumpIfRoot(RootIndex::kTrueValue);
2036 }
2037 
VisitJumpIfFalse()2038 void BaselineCompiler::VisitJumpIfFalse() {
2039   UpdateInterruptBudgetAndDoInterpreterJumpIfRoot(RootIndex::kFalseValue);
2040 }
2041 
VisitJumpIfNull()2042 void BaselineCompiler::VisitJumpIfNull() {
2043   UpdateInterruptBudgetAndDoInterpreterJumpIfRoot(RootIndex::kNullValue);
2044 }
2045 
VisitJumpIfNotNull()2046 void BaselineCompiler::VisitJumpIfNotNull() {
2047   UpdateInterruptBudgetAndDoInterpreterJumpIfNotRoot(RootIndex::kNullValue);
2048 }
2049 
VisitJumpIfUndefined()2050 void BaselineCompiler::VisitJumpIfUndefined() {
2051   UpdateInterruptBudgetAndDoInterpreterJumpIfRoot(RootIndex::kUndefinedValue);
2052 }
2053 
VisitJumpIfNotUndefined()2054 void BaselineCompiler::VisitJumpIfNotUndefined() {
2055   UpdateInterruptBudgetAndDoInterpreterJumpIfNotRoot(
2056       RootIndex::kUndefinedValue);
2057 }
2058 
VisitJumpIfUndefinedOrNull()2059 void BaselineCompiler::VisitJumpIfUndefinedOrNull() {
2060   Label do_jump, dont_jump;
2061   __ JumpIfRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue,
2062                 &do_jump);
2063   __ JumpIfNotRoot(kInterpreterAccumulatorRegister, RootIndex::kNullValue,
2064                    &dont_jump, Label::kNear);
2065   __ Bind(&do_jump);
2066   UpdateInterruptBudgetAndDoInterpreterJump();
2067   __ Bind(&dont_jump);
2068 }
2069 
VisitJumpIfJSReceiver()2070 void BaselineCompiler::VisitJumpIfJSReceiver() {
2071   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
2072 
2073   Label is_smi, dont_jump;
2074   __ JumpIfSmi(kInterpreterAccumulatorRegister, &is_smi, Label::kNear);
2075 
2076   __ JumpIfObjectType(Condition::kLessThan, kInterpreterAccumulatorRegister,
2077                       FIRST_JS_RECEIVER_TYPE, scratch_scope.AcquireScratch(),
2078                       &dont_jump);
2079   UpdateInterruptBudgetAndDoInterpreterJump();
2080 
2081   __ Bind(&is_smi);
2082   __ Bind(&dont_jump);
2083 }
2084 
VisitSwitchOnSmiNoFeedback()2085 void BaselineCompiler::VisitSwitchOnSmiNoFeedback() {
2086   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
2087   interpreter::JumpTableTargetOffsets offsets =
2088       iterator().GetJumpTableTargetOffsets();
2089 
2090   if (offsets.size() == 0) return;
2091 
2092   int case_value_base = (*offsets.begin()).case_value;
2093 
2094   std::unique_ptr<Label*[]> labels = std::make_unique<Label*[]>(offsets.size());
2095   for (interpreter::JumpTableTargetOffset offset : offsets) {
2096     labels[offset.case_value - case_value_base] =
2097         &EnsureLabels(offset.target_offset)->unlinked;
2098   }
2099   Register case_value = scratch_scope.AcquireScratch();
2100   __ SmiUntag(case_value, kInterpreterAccumulatorRegister);
2101   __ Switch(case_value, case_value_base, labels.get(), offsets.size());
2102 }
2103 
VisitForInEnumerate()2104 void BaselineCompiler::VisitForInEnumerate() {
2105   CallBuiltin<Builtin::kForInEnumerate>(RegisterOperand(0));
2106 }
2107 
VisitForInPrepare()2108 void BaselineCompiler::VisitForInPrepare() {
2109   StoreRegister(0, kInterpreterAccumulatorRegister);
2110   CallBuiltin<Builtin::kForInPrepare>(kInterpreterAccumulatorRegister,
2111                                       IndexAsTagged(1), FeedbackVector());
2112   interpreter::Register first = iterator().GetRegisterOperand(0);
2113   interpreter::Register second(first.index() + 1);
2114   interpreter::Register third(first.index() + 2);
2115   __ StoreRegister(second, kReturnRegister0);
2116   __ StoreRegister(third, kReturnRegister1);
2117 }
2118 
VisitForInContinue()2119 void BaselineCompiler::VisitForInContinue() {
2120   SelectBooleanConstant(kInterpreterAccumulatorRegister,
2121                         [&](Label* is_true, Label::Distance distance) {
2122                           LoadRegister(kInterpreterAccumulatorRegister, 0);
2123                           __ JumpIfTagged(
2124                               Condition::kNotEqual,
2125                               kInterpreterAccumulatorRegister,
2126                               __ RegisterFrameOperand(RegisterOperand(1)),
2127                               is_true, distance);
2128                         });
2129 }
2130 
VisitForInNext()2131 void BaselineCompiler::VisitForInNext() {
2132   interpreter::Register cache_type, cache_array;
2133   std::tie(cache_type, cache_array) = iterator().GetRegisterPairOperand(2);
2134   CallBuiltin<Builtin::kForInNext>(Index(3),            // vector slot
2135                                    RegisterOperand(0),  // object
2136                                    cache_array,         // cache array
2137                                    cache_type,          // cache type
2138                                    RegisterOperand(1),  // index
2139                                    FeedbackVector());   // feedback vector
2140 }
2141 
VisitForInStep()2142 void BaselineCompiler::VisitForInStep() {
2143   LoadRegister(kInterpreterAccumulatorRegister, 0);
2144   __ AddSmi(kInterpreterAccumulatorRegister, Smi::FromInt(1));
2145 }
2146 
VisitSetPendingMessage()2147 void BaselineCompiler::VisitSetPendingMessage() {
2148   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
2149   Register pending_message = scratch_scope.AcquireScratch();
2150   __ Move(pending_message,
2151           ExternalReference::address_of_pending_message(local_isolate_));
2152   Register tmp = scratch_scope.AcquireScratch();
2153   __ Move(tmp, kInterpreterAccumulatorRegister);
2154   __ Move(kInterpreterAccumulatorRegister, MemOperand(pending_message, 0));
2155   __ Move(MemOperand(pending_message, 0), tmp);
2156 }
2157 
VisitThrow()2158 void BaselineCompiler::VisitThrow() {
2159   CallRuntime(Runtime::kThrow, kInterpreterAccumulatorRegister);
2160   __ Trap();
2161 }
2162 
VisitReThrow()2163 void BaselineCompiler::VisitReThrow() {
2164   CallRuntime(Runtime::kReThrow, kInterpreterAccumulatorRegister);
2165   __ Trap();
2166 }
2167 
VisitReturn()2168 void BaselineCompiler::VisitReturn() {
2169   ASM_CODE_COMMENT_STRING(&masm_, "Return");
2170   int profiling_weight = iterator().current_offset() +
2171                          iterator().current_bytecode_size_without_prefix();
2172   int parameter_count = bytecode_->parameter_count();
2173 
2174   TailCallBuiltin<Builtin::kBaselineLeaveFrame>(parameter_count,
2175                                                 -profiling_weight);
2176 }
2177 
VisitThrowReferenceErrorIfHole()2178 void BaselineCompiler::VisitThrowReferenceErrorIfHole() {
2179   Label done;
2180   __ JumpIfNotRoot(kInterpreterAccumulatorRegister, RootIndex::kTheHoleValue,
2181                    &done);
2182   CallRuntime(Runtime::kThrowAccessedUninitializedVariable, Constant<Name>(0));
2183   // Unreachable.
2184   __ Trap();
2185   __ Bind(&done);
2186 }
2187 
VisitThrowSuperNotCalledIfHole()2188 void BaselineCompiler::VisitThrowSuperNotCalledIfHole() {
2189   Label done;
2190   __ JumpIfNotRoot(kInterpreterAccumulatorRegister, RootIndex::kTheHoleValue,
2191                    &done);
2192   CallRuntime(Runtime::kThrowSuperNotCalled);
2193   // Unreachable.
2194   __ Trap();
2195   __ Bind(&done);
2196 }
2197 
VisitThrowSuperAlreadyCalledIfNotHole()2198 void BaselineCompiler::VisitThrowSuperAlreadyCalledIfNotHole() {
2199   Label done;
2200   __ JumpIfRoot(kInterpreterAccumulatorRegister, RootIndex::kTheHoleValue,
2201                 &done);
2202   CallRuntime(Runtime::kThrowSuperAlreadyCalledError);
2203   // Unreachable.
2204   __ Trap();
2205   __ Bind(&done);
2206 }
2207 
VisitThrowIfNotSuperConstructor()2208 void BaselineCompiler::VisitThrowIfNotSuperConstructor() {
2209   Label done;
2210 
2211   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
2212   Register reg = scratch_scope.AcquireScratch();
2213   LoadRegister(reg, 0);
2214   Register map_bit_field = scratch_scope.AcquireScratch();
2215   __ LoadMap(map_bit_field, reg);
2216   __ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
2217   __ TestAndBranch(map_bit_field, Map::Bits1::IsConstructorBit::kMask,
2218                    Condition::kNotZero, &done, Label::kNear);
2219 
2220   CallRuntime(Runtime::kThrowNotSuperConstructor, reg, __ FunctionOperand());
2221 
2222   __ Bind(&done);
2223 }
2224 
VisitSwitchOnGeneratorState()2225 void BaselineCompiler::VisitSwitchOnGeneratorState() {
2226   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
2227 
2228   Label fallthrough;
2229 
2230   Register generator_object = scratch_scope.AcquireScratch();
2231   LoadRegister(generator_object, 0);
2232   __ JumpIfRoot(generator_object, RootIndex::kUndefinedValue, &fallthrough);
2233 
2234   Register continuation = scratch_scope.AcquireScratch();
2235   __ LoadTaggedAnyField(continuation, generator_object,
2236                         JSGeneratorObject::kContinuationOffset);
2237   __ StoreTaggedSignedField(
2238       generator_object, JSGeneratorObject::kContinuationOffset,
2239       Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2240 
2241   Register context = scratch_scope.AcquireScratch();
2242   __ LoadTaggedAnyField(context, generator_object,
2243                         JSGeneratorObject::kContextOffset);
2244   __ StoreContext(context);
2245 
2246   interpreter::JumpTableTargetOffsets offsets =
2247       iterator().GetJumpTableTargetOffsets();
2248 
2249   if (0 < offsets.size()) {
2250     DCHECK_EQ(0, (*offsets.begin()).case_value);
2251 
2252     std::unique_ptr<Label*[]> labels =
2253         std::make_unique<Label*[]>(offsets.size());
2254     for (interpreter::JumpTableTargetOffset offset : offsets) {
2255       labels[offset.case_value] = &EnsureLabels(offset.target_offset)->unlinked;
2256     }
2257     __ SmiUntag(continuation);
2258     __ Switch(continuation, 0, labels.get(), offsets.size());
2259     // We should never fall through this switch.
2260     // TODO(v8:11429,leszeks): Maybe remove the fallthrough check in the Switch?
2261     __ Trap();
2262   }
2263 
2264   __ Bind(&fallthrough);
2265 }
2266 
VisitSuspendGenerator()2267 void BaselineCompiler::VisitSuspendGenerator() {
2268   DCHECK_EQ(iterator().GetRegisterOperand(1), interpreter::Register(0));
2269   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
2270   Register generator_object = scratch_scope.AcquireScratch();
2271   LoadRegister(generator_object, 0);
2272   {
2273     SaveAccumulatorScope accumulator_scope(&basm_);
2274 
2275     int bytecode_offset =
2276         BytecodeArray::kHeaderSize + iterator().current_offset();
2277     CallBuiltin<Builtin::kSuspendGeneratorBaseline>(
2278         generator_object,
2279         static_cast<int>(Uint(3)),  // suspend_id
2280         bytecode_offset,
2281         static_cast<int>(RegisterCount(2)));  // register_count
2282   }
2283   VisitReturn();
2284 }
2285 
VisitResumeGenerator()2286 void BaselineCompiler::VisitResumeGenerator() {
2287   DCHECK_EQ(iterator().GetRegisterOperand(1), interpreter::Register(0));
2288   BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
2289   Register generator_object = scratch_scope.AcquireScratch();
2290   LoadRegister(generator_object, 0);
2291   CallBuiltin<Builtin::kResumeGeneratorBaseline>(
2292       generator_object,
2293       static_cast<int>(RegisterCount(2)));  // register_count
2294 }
2295 
VisitGetIterator()2296 void BaselineCompiler::VisitGetIterator() {
2297   CallBuiltin<Builtin::kGetIteratorBaseline>(RegisterOperand(0),  // receiver
2298                                              IndexAsTagged(1),    // load_slot
2299                                              IndexAsTagged(2));   // call_slot
2300 }
2301 
VisitDebugger()2302 void BaselineCompiler::VisitDebugger() {
2303   SaveAccumulatorScope accumulator_scope(&basm_);
2304   CallRuntime(Runtime::kHandleDebuggerStatement);
2305 }
2306 
VisitIncBlockCounter()2307 void BaselineCompiler::VisitIncBlockCounter() {
2308   SaveAccumulatorScope accumulator_scope(&basm_);
2309   CallBuiltin<Builtin::kIncBlockCounter>(__ FunctionOperand(),
2310                                          IndexAsSmi(0));  // coverage array slot
2311 }
2312 
VisitAbort()2313 void BaselineCompiler::VisitAbort() {
2314   CallRuntime(Runtime::kAbort, Smi::FromInt(Index(0)));
2315   __ Trap();
2316 }
2317 
VisitWide()2318 void BaselineCompiler::VisitWide() {
2319   // Consumed by the BytecodeArrayIterator.
2320   UNREACHABLE();
2321 }
2322 
VisitExtraWide()2323 void BaselineCompiler::VisitExtraWide() {
2324   // Consumed by the BytecodeArrayIterator.
2325   UNREACHABLE();
2326 }
2327 
VisitIllegal()2328 void BaselineCompiler::VisitIllegal() {
2329   // Not emitted in valid bytecode.
2330   UNREACHABLE();
2331 }
2332 #define DEBUG_BREAK(Name, ...) \
2333   void BaselineCompiler::Visit##Name() { UNREACHABLE(); }
2334 DEBUG_BREAK_BYTECODE_LIST(DEBUG_BREAK)
2335 #undef DEBUG_BREAK
2336 
2337 }  // namespace baseline
2338 }  // namespace internal
2339 }  // namespace v8
2340 
2341 #endif  // ENABLE_SPARKPLUG
2342