• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2021 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_BASELINE_BASELINE_ASSEMBLER_H_
6 #define V8_BASELINE_BASELINE_ASSEMBLER_H_
7 
8 // TODO(v8:11421): Remove #if once baseline compiler is ported to other
9 // architectures.
10 #include "src/flags/flags.h"
11 #if ENABLE_SPARKPLUG
12 
13 #include "src/codegen/macro-assembler.h"
14 #include "src/objects/tagged-index.h"
15 
16 namespace v8 {
17 namespace internal {
18 namespace baseline {
19 
20 enum class Condition : uint32_t;
21 
22 class BaselineAssembler {
23  public:
24   class ScratchRegisterScope;
25 
BaselineAssembler(MacroAssembler * masm)26   explicit BaselineAssembler(MacroAssembler* masm) : masm_(masm) {}
27   inline static MemOperand RegisterFrameOperand(
28       interpreter::Register interpreter_register);
29   inline void RegisterFrameAddress(interpreter::Register interpreter_register,
30                                    Register rscratch);
31   inline MemOperand ContextOperand();
32   inline MemOperand FunctionOperand();
33   inline MemOperand FeedbackVectorOperand();
34 
35   inline void GetCode(Isolate* isolate, CodeDesc* desc);
36   inline int pc_offset() const;
37   inline void CodeEntry() const;
38   inline void ExceptionHandler() const;
39   V8_INLINE void RecordComment(const char* string);
40   inline void Trap();
41   inline void DebugBreak();
42 
43   inline void Bind(Label* label);
44   // Binds the label without marking it as a valid jump target.
45   // This is only useful, when the position is already marked as a valid jump
46   // target (i.e. at the beginning of the bytecode).
47   inline void BindWithoutJumpTarget(Label* label);
48   // Marks the current position as a valid jump target on CFI enabled
49   // architectures.
50   inline void JumpTarget();
51   inline void Jump(Label* target, Label::Distance distance = Label::kFar);
52   inline void JumpIfRoot(Register value, RootIndex index, Label* target,
53                          Label::Distance distance = Label::kFar);
54   inline void JumpIfNotRoot(Register value, RootIndex index, Label* target,
55                             Label ::Distance distance = Label::kFar);
56   inline void JumpIfSmi(Register value, Label* target,
57                         Label::Distance distance = Label::kFar);
58   inline void JumpIfNotSmi(Register value, Label* target,
59                            Label::Distance distance = Label::kFar);
60 
61   inline void TestAndBranch(Register value, int mask, Condition cc,
62                             Label* target,
63                             Label::Distance distance = Label::kFar);
64 
65   inline void JumpIf(Condition cc, Register lhs, const Operand& rhs,
66                      Label* target, Label::Distance distance = Label::kFar);
67   inline void JumpIfObjectType(Condition cc, Register object,
68                                InstanceType instance_type, Register map,
69                                Label* target,
70                                Label::Distance distance = Label::kFar);
71   inline void JumpIfInstanceType(Condition cc, Register map,
72                                  InstanceType instance_type, Label* target,
73                                  Label::Distance distance = Label::kFar);
74   inline void JumpIfPointer(Condition cc, Register value, MemOperand operand,
75                             Label* target,
76                             Label::Distance distance = Label::kFar);
77   inline Condition CheckSmi(Register value);
78   inline void JumpIfSmi(Condition cc, Register value, Smi smi, Label* target,
79                         Label::Distance distance = Label::kFar);
80   inline void JumpIfSmi(Condition cc, Register lhs, Register rhs, Label* target,
81                         Label::Distance distance = Label::kFar);
82   inline void JumpIfImmediate(Condition cc, Register left, int right,
83                               Label* target,
84                               Label::Distance distance = Label::kFar);
85   inline void JumpIfTagged(Condition cc, Register value, MemOperand operand,
86                            Label* target,
87                            Label::Distance distance = Label::kFar);
88   inline void JumpIfTagged(Condition cc, MemOperand operand, Register value,
89                            Label* target,
90                            Label::Distance distance = Label::kFar);
91   inline void JumpIfByte(Condition cc, Register value, int32_t byte,
92                          Label* target, Label::Distance distance = Label::kFar);
93 
94   inline void LoadMap(Register output, Register value);
95   inline void LoadRoot(Register output, RootIndex index);
96   inline void LoadNativeContextSlot(Register output, uint32_t index);
97 
98   inline void Move(Register output, Register source);
99   inline void Move(Register output, MemOperand operand);
100   inline void Move(Register output, Smi value);
101   inline void Move(Register output, TaggedIndex value);
102   inline void Move(Register output, interpreter::Register source);
103   inline void Move(interpreter::Register output, Register source);
104   inline void Move(Register output, RootIndex source);
105   inline void Move(MemOperand output, Register source);
106   inline void Move(Register output, ExternalReference reference);
107   inline void Move(Register output, Handle<HeapObject> value);
108   inline void Move(Register output, int32_t immediate);
109   inline void MoveMaybeSmi(Register output, Register source);
110   inline void MoveSmi(Register output, Register source);
111 
112   // Push the given values, in the given order. If the stack needs alignment
113   // (looking at you Arm64), the stack is padded from the front (i.e. before the
114   // first value is pushed).
115   //
116   // This supports pushing a RegisterList as the last value -- the list is
117   // iterated and each interpreter Register is pushed.
118   //
119   // The total number of values pushed is returned. Note that this might be
120   // different from sizeof(T...), specifically if there was a RegisterList.
121   template <typename... T>
122   inline int Push(T... vals);
123 
124   // Like Push(vals...), but pushes in reverse order, to support our reversed
125   // order argument JS calling convention. Doesn't return the number of
126   // arguments pushed though.
127   //
128   // Note that padding is still inserted before the first pushed value (i.e. the
129   // last value).
130   template <typename... T>
131   inline void PushReverse(T... vals);
132 
133   // Pop values off the stack into the given registers.
134   //
135   // Note that this inserts into registers in the given order, i.e. in reverse
136   // order if the registers were pushed. This means that to spill registers,
137   // push and pop have to be in reverse order, e.g.
138   //
139   //     Push(r1, r2, ..., rN);
140   //     ClobberRegisters();
141   //     Pop(rN, ..., r2, r1);
142   //
143   // On stack-alignment architectures, any padding is popped off after the last
144   // register. This the behaviour of Push, which means that the above code still
145   // works even if the number of registers doesn't match stack alignment.
146   template <typename... T>
147   inline void Pop(T... registers);
148 
149   inline void CallBuiltin(Builtin builtin);
150   inline void TailCallBuiltin(Builtin builtin);
151   inline void CallRuntime(Runtime::FunctionId function, int nargs);
152 
153   inline void LoadTaggedPointerField(Register output, Register source,
154                                      int offset);
155   inline void LoadTaggedSignedField(Register output, Register source,
156                                     int offset);
157   inline void LoadTaggedAnyField(Register output, Register source, int offset);
158   inline void LoadWord16FieldZeroExtend(Register output, Register source,
159                                         int offset);
160   inline void LoadWord8Field(Register output, Register source, int offset);
161   inline void StoreTaggedSignedField(Register target, int offset, Smi value);
162   inline void StoreTaggedFieldWithWriteBarrier(Register target, int offset,
163                                                Register value);
164   inline void StoreTaggedFieldNoWriteBarrier(Register target, int offset,
165                                              Register value);
166   inline void LoadFixedArrayElement(Register output, Register array,
167                                     int32_t index);
168   inline void LoadPrototype(Register prototype, Register object);
169 
170   // Loads the feedback cell from the function, and sets flags on add so that
171   // we can compare afterward.
172   inline void AddToInterruptBudgetAndJumpIfNotExceeded(
173       int32_t weight, Label* skip_interrupt_label);
174   inline void AddToInterruptBudgetAndJumpIfNotExceeded(
175       Register weight, Label* skip_interrupt_label);
176 
177   inline void AddSmi(Register lhs, Smi rhs);
178   inline void SmiUntag(Register value);
179   inline void SmiUntag(Register output, Register value);
180 
181   inline void Word32And(Register output, Register lhs, int rhs);
182 
183   inline void Switch(Register reg, int case_value_base, Label** labels,
184                      int num_labels);
185 
186   // Register operands.
187   inline void LoadRegister(Register output, interpreter::Register source);
188   inline void StoreRegister(interpreter::Register output, Register value);
189 
190   // Frame values
191   inline void LoadFunction(Register output);
192   inline void LoadContext(Register output);
193   inline void StoreContext(Register context);
194 
195   inline static void EmitReturn(MacroAssembler* masm);
196 
masm()197   MacroAssembler* masm() { return masm_; }
198 
199  private:
200   MacroAssembler* masm_;
201   ScratchRegisterScope* scratch_register_scope_ = nullptr;
202 };
203 
204 class SaveAccumulatorScope final {
205  public:
206   inline explicit SaveAccumulatorScope(BaselineAssembler* assembler);
207 
208   inline ~SaveAccumulatorScope();
209 
210  private:
211   BaselineAssembler* assembler_;
212 };
213 
214 class EnsureAccumulatorPreservedScope final {
215  public:
216   inline explicit EnsureAccumulatorPreservedScope(BaselineAssembler* assembler);
217 
218   inline ~EnsureAccumulatorPreservedScope();
219 
220  private:
221   inline void AssertEqualToAccumulator(Register reg);
222 
223   BaselineAssembler* assembler_;
224 #ifdef V8_CODE_COMMENTS
225   Assembler::CodeComment comment_;
226 #endif
227 };
228 
229 }  // namespace baseline
230 }  // namespace internal
231 }  // namespace v8
232 
233 #endif
234 
235 #endif  // V8_BASELINE_BASELINE_ASSEMBLER_H_
236