• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Use of this source code is governed by a BSD-style license that can be
2 // Copyright 2021 the V8 project authors. All rights reserved.
3 // found in the LICENSE file.
4 
5 #ifndef V8_BASELINE_IA32_BASELINE_COMPILER_IA32_INL_H_
6 #define V8_BASELINE_IA32_BASELINE_COMPILER_IA32_INL_H_
7 
8 #include "src/base/macros.h"
9 #include "src/baseline/baseline-compiler.h"
10 #include "src/codegen/interface-descriptors.h"
11 
12 namespace v8 {
13 namespace internal {
14 namespace baseline {
15 
16 #define __ basm_.
17 
Prologue()18 void BaselineCompiler::Prologue() {
19   DCHECK_EQ(kJSFunctionRegister, kJavaScriptCallTargetRegister);
20   int max_frame_size =
21       bytecode_->frame_size() + max_call_args_ * kSystemPointerSize;
22   CallBuiltin<Builtin::kBaselineOutOfLinePrologue>(
23       kContextRegister, kJSFunctionRegister, kJavaScriptCallArgCountRegister,
24       max_frame_size, kJavaScriptCallNewTargetRegister, bytecode_);
25 
26   PrologueFillFrame();
27 }
28 
PrologueFillFrame()29 void BaselineCompiler::PrologueFillFrame() {
30   ASM_CODE_COMMENT(&masm_);
31   // Inlined register frame fill
32   interpreter::Register new_target_or_generator_register =
33       bytecode_->incoming_new_target_or_generator_register();
34   if (FLAG_debug_code) {
35     __ masm()->CompareRoot(kInterpreterAccumulatorRegister,
36                            RootIndex::kUndefinedValue);
37     __ masm()->Assert(equal, AbortReason::kUnexpectedValue);
38   }
39   int register_count = bytecode_->register_count();
40   // Magic value
41   const int kLoopUnrollSize = 8;
42   const int new_target_index = new_target_or_generator_register.index();
43   const bool has_new_target = new_target_index != kMaxInt;
44   if (has_new_target) {
45     DCHECK_LE(new_target_index, register_count);
46     for (int i = 0; i < new_target_index; i++) {
47       __ Push(kInterpreterAccumulatorRegister);
48     }
49     // Push new_target_or_generator.
50     __ Push(kJavaScriptCallNewTargetRegister);
51     register_count -= new_target_index + 1;
52   }
53   if (register_count < 2 * kLoopUnrollSize) {
54     // If the frame is small enough, just unroll the frame fill completely.
55     for (int i = 0; i < register_count; ++i) {
56       __ Push(kInterpreterAccumulatorRegister);
57     }
58   } else {
59     // Extract the first few registers to round to the unroll size.
60     int first_registers = register_count % kLoopUnrollSize;
61     for (int i = 0; i < first_registers; ++i) {
62       __ Push(kInterpreterAccumulatorRegister);
63     }
64     BaselineAssembler::ScratchRegisterScope scope(&basm_);
65     Register scratch = scope.AcquireScratch();
66     __ Move(scratch, register_count / kLoopUnrollSize);
67     // We enter the loop unconditionally, so make sure we need to loop at least
68     // once.
69     DCHECK_GT(register_count / kLoopUnrollSize, 0);
70     Label loop;
71     __ Bind(&loop);
72     for (int i = 0; i < kLoopUnrollSize; ++i) {
73       __ Push(kInterpreterAccumulatorRegister);
74     }
75     __ masm()->dec(scratch);
76     __ masm()->j(greater, &loop);
77   }
78 }
79 
VerifyFrameSize()80 void BaselineCompiler::VerifyFrameSize() {
81   __ masm()->movd(xmm0, eax);
82   __ Move(eax, esp);
83   __ masm()->add(eax,
84                  Immediate(InterpreterFrameConstants::kFixedFrameSizeFromFp +
85                            bytecode_->frame_size()));
86   __ masm()->cmp(eax, ebp);
87   __ masm()->Assert(equal, AbortReason::kUnexpectedStackPointer);
88   __ masm()->movd(eax, xmm0);
89 }
90 
91 #undef __
92 
93 }  // namespace baseline
94 }  // namespace internal
95 }  // namespace v8
96 
97 #endif  // V8_BASELINE_IA32_BASELINE_COMPILER_IA32_INL_H_
98