• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Use of this source code is governed by a BSD-style license that can be
2 // Copyright 2021 the V8 project authors. All rights reserved.
3 // found in the LICENSE file.
4 
5 #ifndef V8_BASELINE_IA32_BASELINE_ASSEMBLER_IA32_INL_H_
6 #define V8_BASELINE_IA32_BASELINE_ASSEMBLER_IA32_INL_H_
7 
8 #include "src/baseline/baseline-assembler.h"
9 #include "src/codegen/ia32/register-ia32.h"
10 #include "src/codegen/interface-descriptors.h"
11 
12 namespace v8 {
13 namespace internal {
14 namespace baseline {
15 
16 namespace detail {
17 
18 static constexpr Register kScratchRegisters[] = {ecx, edx, esi, edi};
19 static constexpr int kNumScratchRegisters = arraysize(kScratchRegisters);
20 
21 }  // namespace detail
22 
23 class BaselineAssembler::ScratchRegisterScope {
24  public:
ScratchRegisterScope(BaselineAssembler * assembler)25   explicit ScratchRegisterScope(BaselineAssembler* assembler)
26       : assembler_(assembler),
27         prev_scope_(assembler->scratch_register_scope_),
28         registers_used_(prev_scope_ == nullptr ? 0
29                                                : prev_scope_->registers_used_) {
30     assembler_->scratch_register_scope_ = this;
31   }
~ScratchRegisterScope()32   ~ScratchRegisterScope() { assembler_->scratch_register_scope_ = prev_scope_; }
33 
AcquireScratch()34   Register AcquireScratch() {
35     DCHECK_LT(registers_used_, detail::kNumScratchRegisters);
36     return detail::kScratchRegisters[registers_used_++];
37   }
38 
39  private:
40   BaselineAssembler* assembler_;
41   ScratchRegisterScope* prev_scope_;
42   int registers_used_;
43 };
44 
45 // TODO(v8:11461): Unify condition names in the MacroAssembler.
46 enum class Condition : uint32_t {
47   kEqual = equal,
48   kNotEqual = not_equal,
49 
50   kLessThan = less,
51   kGreaterThan = greater,
52   kLessThanEqual = less_equal,
53   kGreaterThanEqual = greater_equal,
54 
55   kUnsignedLessThan = below,
56   kUnsignedGreaterThan = above,
57   kUnsignedLessThanEqual = below_equal,
58   kUnsignedGreaterThanEqual = above_equal,
59 
60   kOverflow = overflow,
61   kNoOverflow = no_overflow,
62 
63   kZero = zero,
64   kNotZero = not_zero,
65 };
66 
AsMasmCondition(Condition cond)67 inline internal::Condition AsMasmCondition(Condition cond) {
68   return static_cast<internal::Condition>(cond);
69 }
70 
71 namespace detail {
72 
73 #define __ masm_->
74 
75 #ifdef DEBUG
Clobbers(Register target,MemOperand op)76 inline bool Clobbers(Register target, MemOperand op) {
77   return op.is_reg(target);
78 }
79 #endif
80 
81 }  // namespace detail
82 
RegisterFrameOperand(interpreter::Register interpreter_register)83 MemOperand BaselineAssembler::RegisterFrameOperand(
84     interpreter::Register interpreter_register) {
85   return MemOperand(ebp, interpreter_register.ToOperand() * kSystemPointerSize);
86 }
RegisterFrameAddress(interpreter::Register interpreter_register,Register rscratch)87 void BaselineAssembler::RegisterFrameAddress(
88     interpreter::Register interpreter_register, Register rscratch) {
89   return __ lea(rscratch, MemOperand(ebp, interpreter_register.ToOperand() *
90                                               kSystemPointerSize));
91 }
FeedbackVectorOperand()92 MemOperand BaselineAssembler::FeedbackVectorOperand() {
93   return MemOperand(ebp, BaselineFrameConstants::kFeedbackVectorFromFp);
94 }
95 
Bind(Label * label)96 void BaselineAssembler::Bind(Label* label) { __ bind(label); }
BindWithoutJumpTarget(Label * label)97 void BaselineAssembler::BindWithoutJumpTarget(Label* label) { __ bind(label); }
98 
JumpTarget()99 void BaselineAssembler::JumpTarget() {
100   // NOP on ia32.
101 }
102 
Jump(Label * target,Label::Distance distance)103 void BaselineAssembler::Jump(Label* target, Label::Distance distance) {
104   __ jmp(target, distance);
105 }
106 
JumpIfRoot(Register value,RootIndex index,Label * target,Label::Distance distance)107 void BaselineAssembler::JumpIfRoot(Register value, RootIndex index,
108                                    Label* target, Label::Distance distance) {
109   __ JumpIfRoot(value, index, target, distance);
110 }
111 
JumpIfNotRoot(Register value,RootIndex index,Label * target,Label::Distance distance)112 void BaselineAssembler::JumpIfNotRoot(Register value, RootIndex index,
113                                       Label* target, Label::Distance distance) {
114   __ JumpIfNotRoot(value, index, target, distance);
115 }
116 
JumpIfSmi(Register value,Label * target,Label::Distance distance)117 void BaselineAssembler::JumpIfSmi(Register value, Label* target,
118                                   Label::Distance distance) {
119   __ JumpIfSmi(value, target, distance);
120 }
121 
JumpIfImmediate(Condition cc,Register left,int right,Label * target,Label::Distance distance)122 void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
123                                         Label* target,
124                                         Label::Distance distance) {
125   __ cmp(left, Immediate(right));
126   __ j(AsMasmCondition(cc), target, distance);
127 }
128 
JumpIfNotSmi(Register value,Label * target,Label::Distance distance)129 void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
130                                      Label::Distance distance) {
131   __ JumpIfNotSmi(value, target, distance);
132 }
133 
CallBuiltin(Builtin builtin)134 void BaselineAssembler::CallBuiltin(Builtin builtin) {
135   ASM_CODE_COMMENT_STRING(masm_,
136                           __ CommentForOffHeapTrampoline("call", builtin));
137   __ Call(__ EntryFromBuiltinAsOperand(builtin));
138 }
139 
TailCallBuiltin(Builtin builtin)140 void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
141   ASM_CODE_COMMENT_STRING(masm_,
142                           __ CommentForOffHeapTrampoline("tail call", builtin));
143   __ jmp(__ EntryFromBuiltinAsOperand(builtin));
144 }
145 
TestAndBranch(Register value,int mask,Condition cc,Label * target,Label::Distance distance)146 void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
147                                       Label* target, Label::Distance distance) {
148   if ((mask & 0xff) == mask) {
149     __ test_b(value, Immediate(mask));
150   } else {
151     __ test(value, Immediate(mask));
152   }
153   __ j(AsMasmCondition(cc), target, distance);
154 }
155 
JumpIf(Condition cc,Register lhs,const Operand & rhs,Label * target,Label::Distance distance)156 void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
157                                Label* target, Label::Distance distance) {
158   __ cmp(lhs, rhs);
159   __ j(AsMasmCondition(cc), target, distance);
160 }
JumpIfObjectType(Condition cc,Register object,InstanceType instance_type,Register map,Label * target,Label::Distance distance)161 void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
162                                          InstanceType instance_type,
163                                          Register map, Label* target,
164                                          Label::Distance distance) {
165   __ AssertNotSmi(object);
166   __ CmpObjectType(object, instance_type, map);
167   __ j(AsMasmCondition(cc), target, distance);
168 }
JumpIfInstanceType(Condition cc,Register map,InstanceType instance_type,Label * target,Label::Distance distance)169 void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
170                                            InstanceType instance_type,
171                                            Label* target,
172                                            Label::Distance distance) {
173   if (FLAG_debug_code) {
174     __ movd(xmm0, eax);
175     __ AssertNotSmi(map);
176     __ CmpObjectType(map, MAP_TYPE, eax);
177     __ Assert(equal, AbortReason::kUnexpectedValue);
178     __ movd(eax, xmm0);
179   }
180   __ CmpInstanceType(map, instance_type);
181   __ j(AsMasmCondition(cc), target, distance);
182 }
JumpIfPointer(Condition cc,Register value,MemOperand operand,Label * target,Label::Distance distance)183 void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
184                                       MemOperand operand, Label* target,
185                                       Label::Distance distance) {
186   JumpIf(cc, value, operand, target, distance);
187 }
JumpIfSmi(Condition cc,Register value,Smi smi,Label * target,Label::Distance distance)188 void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
189                                   Label* target, Label::Distance distance) {
190   if (smi.value() == 0) {
191     __ test(value, value);
192   } else {
193     __ cmp(value, Immediate(smi));
194   }
195   __ j(AsMasmCondition(cc), target, distance);
196 }
JumpIfSmi(Condition cc,Register lhs,Register rhs,Label * target,Label::Distance distance)197 void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
198                                   Label* target, Label::Distance distance) {
199   __ AssertSmi(lhs);
200   __ AssertSmi(rhs);
201   __ cmp(lhs, rhs);
202   __ j(AsMasmCondition(cc), target, distance);
203 }
JumpIfTagged(Condition cc,Register value,MemOperand operand,Label * target,Label::Distance distance)204 void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
205                                      MemOperand operand, Label* target,
206                                      Label::Distance distance) {
207   __ cmp(operand, value);
208   __ j(AsMasmCondition(cc), target, distance);
209 }
JumpIfTagged(Condition cc,MemOperand operand,Register value,Label * target,Label::Distance distance)210 void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
211                                      Register value, Label* target,
212                                      Label::Distance distance) {
213   __ cmp(operand, value);
214   __ j(AsMasmCondition(cc), target, distance);
215 }
JumpIfByte(Condition cc,Register value,int32_t byte,Label * target,Label::Distance distance)216 void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
217                                    Label* target, Label::Distance distance) {
218   __ cmpb(value, Immediate(byte));
219   __ j(AsMasmCondition(cc), target, distance);
220 }
Move(interpreter::Register output,Register source)221 void BaselineAssembler::Move(interpreter::Register output, Register source) {
222   return __ mov(RegisterFrameOperand(output), source);
223 }
Move(Register output,TaggedIndex value)224 void BaselineAssembler::Move(Register output, TaggedIndex value) {
225   __ Move(output, Immediate(value.ptr()));
226 }
Move(MemOperand output,Register source)227 void BaselineAssembler::Move(MemOperand output, Register source) {
228   __ mov(output, source);
229 }
Move(Register output,ExternalReference reference)230 void BaselineAssembler::Move(Register output, ExternalReference reference) {
231   __ Move(output, Immediate(reference));
232 }
Move(Register output,Handle<HeapObject> value)233 void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
234   __ Move(output, value);
235 }
Move(Register output,int32_t value)236 void BaselineAssembler::Move(Register output, int32_t value) {
237   __ Move(output, Immediate(value));
238 }
MoveMaybeSmi(Register output,Register source)239 void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
240   __ mov(output, source);
241 }
MoveSmi(Register output,Register source)242 void BaselineAssembler::MoveSmi(Register output, Register source) {
243   __ mov(output, source);
244 }
245 
246 namespace detail {
PushSingle(MacroAssembler * masm,RootIndex source)247 inline void PushSingle(MacroAssembler* masm, RootIndex source) {
248   masm->PushRoot(source);
249 }
PushSingle(MacroAssembler * masm,Register reg)250 inline void PushSingle(MacroAssembler* masm, Register reg) { masm->Push(reg); }
PushSingle(MacroAssembler * masm,TaggedIndex value)251 inline void PushSingle(MacroAssembler* masm, TaggedIndex value) {
252   masm->Push(Immediate(value.ptr()));
253 }
PushSingle(MacroAssembler * masm,Smi value)254 inline void PushSingle(MacroAssembler* masm, Smi value) { masm->Push(value); }
PushSingle(MacroAssembler * masm,Handle<HeapObject> object)255 inline void PushSingle(MacroAssembler* masm, Handle<HeapObject> object) {
256   masm->Push(object);
257 }
PushSingle(MacroAssembler * masm,int32_t immediate)258 inline void PushSingle(MacroAssembler* masm, int32_t immediate) {
259   masm->Push(Immediate(immediate));
260 }
PushSingle(MacroAssembler * masm,MemOperand operand)261 inline void PushSingle(MacroAssembler* masm, MemOperand operand) {
262   masm->Push(operand);
263 }
PushSingle(MacroAssembler * masm,interpreter::Register source)264 inline void PushSingle(MacroAssembler* masm, interpreter::Register source) {
265   return PushSingle(masm, BaselineAssembler::RegisterFrameOperand(source));
266 }
267 
268 template <typename Arg>
269 struct PushHelper {
PushPushHelper270   static int Push(BaselineAssembler* basm, Arg arg) {
271     PushSingle(basm->masm(), arg);
272     return 1;
273   }
PushReversePushHelper274   static int PushReverse(BaselineAssembler* basm, Arg arg) {
275     return Push(basm, arg);
276   }
277 };
278 
279 template <>
280 struct PushHelper<interpreter::RegisterList> {
281   static int Push(BaselineAssembler* basm, interpreter::RegisterList list) {
282     for (int reg_index = 0; reg_index < list.register_count(); ++reg_index) {
283       PushSingle(basm->masm(), list[reg_index]);
284     }
285     return list.register_count();
286   }
287   static int PushReverse(BaselineAssembler* basm,
288                          interpreter::RegisterList list) {
289     for (int reg_index = list.register_count() - 1; reg_index >= 0;
290          --reg_index) {
291       PushSingle(basm->masm(), list[reg_index]);
292     }
293     return list.register_count();
294   }
295 };
296 
297 template <typename... Args>
298 struct PushAllHelper;
299 template <>
300 struct PushAllHelper<> {
301   static int Push(BaselineAssembler* masm) { return 0; }
302   static int PushReverse(BaselineAssembler* masm) { return 0; }
303 };
304 template <typename Arg, typename... Args>
305 struct PushAllHelper<Arg, Args...> {
306   static int Push(BaselineAssembler* masm, Arg arg, Args... args) {
307     int nargs = PushHelper<Arg>::Push(masm, arg);
308     return nargs + PushAllHelper<Args...>::Push(masm, args...);
309   }
310   static int PushReverse(BaselineAssembler* masm, Arg arg, Args... args) {
311     int nargs = PushAllHelper<Args...>::PushReverse(masm, args...);
312     return nargs + PushHelper<Arg>::PushReverse(masm, arg);
313   }
314 };
315 
316 }  // namespace detail
317 
318 template <typename... T>
319 int BaselineAssembler::Push(T... vals) {
320   return detail::PushAllHelper<T...>::Push(this, vals...);
321 }
322 
323 template <typename... T>
324 void BaselineAssembler::PushReverse(T... vals) {
325   detail::PushAllHelper<T...>::PushReverse(this, vals...);
326 }
327 
328 template <typename... T>
329 void BaselineAssembler::Pop(T... registers) {
330   (__ Pop(registers), ...);
331 }
332 
333 void BaselineAssembler::LoadTaggedPointerField(Register output, Register source,
334                                                int offset) {
335   __ mov(output, FieldOperand(source, offset));
336 }
337 
338 void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
339                                               int offset) {
340   __ mov(output, FieldOperand(source, offset));
341 }
342 
343 void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
344                                            int offset) {
345   __ mov(output, FieldOperand(source, offset));
346 }
347 
348 void BaselineAssembler::LoadWord16FieldZeroExtend(Register output,
349                                                   Register source, int offset) {
350   __ movzx_w(output, FieldOperand(source, offset));
351 }
352 
353 void BaselineAssembler::LoadWord8Field(Register output, Register source,
354                                        int offset) {
355   __ mov_b(output, FieldOperand(source, offset));
356 }
357 
358 void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
359                                                Smi value) {
360   __ mov(FieldOperand(target, offset), Immediate(value));
361 }
362 
363 void BaselineAssembler::StoreTaggedFieldWithWriteBarrier(Register target,
364                                                          int offset,
365                                                          Register value) {
366   ASM_CODE_COMMENT(masm_);
367   BaselineAssembler::ScratchRegisterScope scratch_scope(this);
368   Register scratch = scratch_scope.AcquireScratch();
369   DCHECK(!AreAliased(scratch, target, value));
370   __ mov(FieldOperand(target, offset), value);
371   __ RecordWriteField(target, offset, value, scratch, SaveFPRegsMode::kIgnore);
372 }
373 
374 void BaselineAssembler::StoreTaggedFieldNoWriteBarrier(Register target,
375                                                        int offset,
376                                                        Register value) {
377   DCHECK(!AreAliased(target, value));
378   __ mov(FieldOperand(target, offset), value);
379 }
380 
381 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
382     int32_t weight, Label* skip_interrupt_label) {
383   ASM_CODE_COMMENT(masm_);
384   ScratchRegisterScope scratch_scope(this);
385   Register feedback_cell = scratch_scope.AcquireScratch();
386   LoadFunction(feedback_cell);
387   LoadTaggedPointerField(feedback_cell, feedback_cell,
388                          JSFunction::kFeedbackCellOffset);
389   __ add(FieldOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset),
390          Immediate(weight));
391   if (skip_interrupt_label) {
392     DCHECK_LT(weight, 0);
393     __ j(greater_equal, skip_interrupt_label);
394   }
395 }
396 
397 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
398     Register weight, Label* skip_interrupt_label) {
399   ASM_CODE_COMMENT(masm_);
400   ScratchRegisterScope scratch_scope(this);
401   Register feedback_cell = scratch_scope.AcquireScratch();
402   DCHECK(!AreAliased(feedback_cell, weight));
403   LoadFunction(feedback_cell);
404   LoadTaggedPointerField(feedback_cell, feedback_cell,
405                          JSFunction::kFeedbackCellOffset);
406   __ add(FieldOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset),
407          weight);
408   if (skip_interrupt_label) __ j(greater_equal, skip_interrupt_label);
409 }
410 
411 void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
412   if (rhs.value() == 0) return;
413   __ add(lhs, Immediate(rhs));
414 }
415 
416 void BaselineAssembler::Word32And(Register output, Register lhs, int rhs) {
417   Move(output, lhs);
418   __ and_(output, Immediate(rhs));
419 }
420 
421 void BaselineAssembler::Switch(Register reg, int case_value_base,
422                                Label** labels, int num_labels) {
423   ASM_CODE_COMMENT(masm_);
424   ScratchRegisterScope scope(this);
425   Register table = scope.AcquireScratch();
426   DCHECK(!AreAliased(reg, table));
427   Label fallthrough, jump_table;
428   if (case_value_base != 0) {
429     __ sub(reg, Immediate(case_value_base));
430   }
431   __ cmp(reg, Immediate(num_labels));
432   __ j(above_equal, &fallthrough);
433   __ lea(table, MemOperand(&jump_table));
434   __ jmp(Operand(table, reg, times_system_pointer_size, 0));
435   // Emit the jump table inline, under the assumption that it's not too big.
436   __ Align(kSystemPointerSize);
437   __ bind(&jump_table);
438   for (int i = 0; i < num_labels; ++i) {
439     __ dd(labels[i]);
440   }
441   __ bind(&fallthrough);
442 }
443 
444 #undef __
445 #define __ basm.
446 
447 void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
448   ASM_CODE_COMMENT(masm);
449   BaselineAssembler basm(masm);
450 
451   Register weight = BaselineLeaveFrameDescriptor::WeightRegister();
452   Register params_size = BaselineLeaveFrameDescriptor::ParamsSizeRegister();
453   {
454     ASM_CODE_COMMENT_STRING(masm, "Update Interrupt Budget");
455 
456     Label skip_interrupt_label;
457     __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, &skip_interrupt_label);
458     __ masm()->SmiTag(params_size);
459     __ Push(params_size, kInterpreterAccumulatorRegister);
460 
461     __ LoadContext(kContextRegister);
462     __ Push(MemOperand(ebp, InterpreterFrameConstants::kFunctionOffset));
463     __ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1);
464 
465     __ Pop(kInterpreterAccumulatorRegister, params_size);
466     __ masm()->SmiUntag(params_size);
467 
468   __ Bind(&skip_interrupt_label);
469   }
470 
471   BaselineAssembler::ScratchRegisterScope scope(&basm);
472   Register scratch = scope.AcquireScratch();
473   DCHECK(!AreAliased(weight, params_size, scratch));
474 
475   Register actual_params_size = scratch;
476   // Compute the size of the actual parameters + receiver (in bytes).
477   __ masm()->mov(actual_params_size,
478                  MemOperand(ebp, StandardFrameConstants::kArgCOffset));
479 
480   // If actual is bigger than formal, then we should use it to free up the stack
481   // arguments.
482   Label corrected_args_count;
483   __ masm()->cmp(params_size, actual_params_size);
484   __ masm()->j(greater_equal, &corrected_args_count);
485   __ masm()->mov(params_size, actual_params_size);
486   __ Bind(&corrected_args_count);
487 
488   // Leave the frame (also dropping the register file).
489   __ masm()->LeaveFrame(StackFrame::BASELINE);
490 
491   // Drop receiver + arguments.
492   __ masm()->DropArguments(params_size, scratch,
493                            TurboAssembler::kCountIsInteger,
494                            TurboAssembler::kCountIncludesReceiver);
495   __ masm()->Ret();
496 }
497 
498 #undef __
499 
500 inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
501     Register reg) {
502   assembler_->masm()->cmp(reg, kInterpreterAccumulatorRegister);
503   assembler_->masm()->Assert(equal, AbortReason::kUnexpectedValue);
504 }
505 
506 }  // namespace baseline
507 }  // namespace internal
508 }  // namespace v8
509 
510 #endif  // V8_BASELINE_IA32_BASELINE_ASSEMBLER_IA32_INL_H_
511