1 // Use of this source code is governed by a BSD-style license that can be
2 // Copyright 2021 the V8 project authors. All rights reserved.
3 // found in the LICENSE file.
4
5 #ifndef V8_BASELINE_X64_BASELINE_ASSEMBLER_X64_INL_H_
6 #define V8_BASELINE_X64_BASELINE_ASSEMBLER_X64_INL_H_
7
8 #include "src/base/macros.h"
9 #include "src/baseline/baseline-assembler.h"
10 #include "src/codegen/x64/register-x64.h"
11
12 namespace v8 {
13 namespace internal {
14 namespace baseline {
15
16 namespace detail {
17
18 // Avoid using kScratchRegister(==r10) since the macro-assembler doesn't use
19 // this scope and will conflict.
20 static constexpr Register kScratchRegisters[] = {r8, r9, r11, r12, r15};
21 static constexpr int kNumScratchRegisters = arraysize(kScratchRegisters);
22
23 } // namespace detail
24
25 class BaselineAssembler::ScratchRegisterScope {
26 public:
ScratchRegisterScope(BaselineAssembler * assembler)27 explicit ScratchRegisterScope(BaselineAssembler* assembler)
28 : assembler_(assembler),
29 prev_scope_(assembler->scratch_register_scope_),
30 registers_used_(prev_scope_ == nullptr ? 0
31 : prev_scope_->registers_used_) {
32 assembler_->scratch_register_scope_ = this;
33 }
~ScratchRegisterScope()34 ~ScratchRegisterScope() { assembler_->scratch_register_scope_ = prev_scope_; }
35
AcquireScratch()36 Register AcquireScratch() {
37 DCHECK_LT(registers_used_, detail::kNumScratchRegisters);
38 return detail::kScratchRegisters[registers_used_++];
39 }
40
41 private:
42 BaselineAssembler* assembler_;
43 ScratchRegisterScope* prev_scope_;
44 int registers_used_;
45 };
46
47 // TODO(v8:11461): Unify condition names in the MacroAssembler.
48 enum class Condition : uint32_t {
49 kEqual = equal,
50 kNotEqual = not_equal,
51
52 kLessThan = less,
53 kGreaterThan = greater,
54 kLessThanEqual = less_equal,
55 kGreaterThanEqual = greater_equal,
56
57 kUnsignedLessThan = below,
58 kUnsignedGreaterThan = above,
59 kUnsignedLessThanEqual = below_equal,
60 kUnsignedGreaterThanEqual = above_equal,
61
62 kOverflow = overflow,
63 kNoOverflow = no_overflow,
64
65 kZero = zero,
66 kNotZero = not_zero,
67 };
68
AsMasmCondition(Condition cond)69 inline internal::Condition AsMasmCondition(Condition cond) {
70 return static_cast<internal::Condition>(cond);
71 }
72
73 namespace detail {
74
75 #define __ masm_->
76
77 #ifdef DEBUG
Clobbers(Register target,MemOperand op)78 inline bool Clobbers(Register target, MemOperand op) {
79 return op.AddressUsesRegister(target);
80 }
81 #endif
82
83 } // namespace detail
84
RegisterFrameOperand(interpreter::Register interpreter_register)85 MemOperand BaselineAssembler::RegisterFrameOperand(
86 interpreter::Register interpreter_register) {
87 return MemOperand(rbp, interpreter_register.ToOperand() * kSystemPointerSize);
88 }
RegisterFrameAddress(interpreter::Register interpreter_register,Register rscratch)89 void BaselineAssembler::RegisterFrameAddress(
90 interpreter::Register interpreter_register, Register rscratch) {
91 return __ leaq(rscratch, MemOperand(rbp, interpreter_register.ToOperand() *
92 kSystemPointerSize));
93 }
FeedbackVectorOperand()94 MemOperand BaselineAssembler::FeedbackVectorOperand() {
95 return MemOperand(rbp, BaselineFrameConstants::kFeedbackVectorFromFp);
96 }
97
Bind(Label * label)98 void BaselineAssembler::Bind(Label* label) { __ bind(label); }
BindWithoutJumpTarget(Label * label)99 void BaselineAssembler::BindWithoutJumpTarget(Label* label) { __ bind(label); }
100
JumpTarget()101 void BaselineAssembler::JumpTarget() {
102 // NOP on x64.
103 }
104
Jump(Label * target,Label::Distance distance)105 void BaselineAssembler::Jump(Label* target, Label::Distance distance) {
106 __ jmp(target, distance);
107 }
JumpIfRoot(Register value,RootIndex index,Label * target,Label::Distance distance)108 void BaselineAssembler::JumpIfRoot(Register value, RootIndex index,
109 Label* target, Label::Distance distance) {
110 __ JumpIfRoot(value, index, target, distance);
111 }
JumpIfNotRoot(Register value,RootIndex index,Label * target,Label::Distance distance)112 void BaselineAssembler::JumpIfNotRoot(Register value, RootIndex index,
113 Label* target, Label::Distance distance) {
114 __ JumpIfNotRoot(value, index, target, distance);
115 }
JumpIfSmi(Register value,Label * target,Label::Distance distance)116 void BaselineAssembler::JumpIfSmi(Register value, Label* target,
117 Label::Distance distance) {
118 __ JumpIfSmi(value, target, distance);
119 }
JumpIfNotSmi(Register value,Label * target,Label::Distance distance)120 void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
121 Label::Distance distance) {
122 __ JumpIfNotSmi(value, target, distance);
123 }
124
CallBuiltin(Builtin builtin)125 void BaselineAssembler::CallBuiltin(Builtin builtin) {
126 if (masm()->options().short_builtin_calls) {
127 // Generate pc-relative call.
128 __ CallBuiltin(builtin);
129 } else {
130 ASM_CODE_COMMENT_STRING(masm_,
131 __ CommentForOffHeapTrampoline("call", builtin));
132 __ Call(__ EntryFromBuiltinAsOperand(builtin));
133 }
134 }
135
TailCallBuiltin(Builtin builtin)136 void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
137 if (masm()->options().short_builtin_calls) {
138 // Generate pc-relative jump.
139 __ TailCallBuiltin(builtin);
140 } else {
141 ASM_CODE_COMMENT_STRING(
142 masm_, __ CommentForOffHeapTrampoline("tail call", builtin));
143 __ Jump(__ EntryFromBuiltinAsOperand(builtin));
144 }
145 }
146
TestAndBranch(Register value,int mask,Condition cc,Label * target,Label::Distance distance)147 void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
148 Label* target, Label::Distance distance) {
149 if ((mask & 0xff) == mask) {
150 __ testb(value, Immediate(mask));
151 } else {
152 __ testl(value, Immediate(mask));
153 }
154 __ j(AsMasmCondition(cc), target, distance);
155 }
156
JumpIf(Condition cc,Register lhs,const Operand & rhs,Label * target,Label::Distance distance)157 void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
158 Label* target, Label::Distance distance) {
159 __ cmpq(lhs, rhs);
160 __ j(AsMasmCondition(cc), target, distance);
161 }
JumpIfObjectType(Condition cc,Register object,InstanceType instance_type,Register map,Label * target,Label::Distance distance)162 void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
163 InstanceType instance_type,
164 Register map, Label* target,
165 Label::Distance distance) {
166 __ AssertNotSmi(object);
167 __ CmpObjectType(object, instance_type, map);
168 __ j(AsMasmCondition(cc), target, distance);
169 }
JumpIfInstanceType(Condition cc,Register map,InstanceType instance_type,Label * target,Label::Distance distance)170 void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
171 InstanceType instance_type,
172 Label* target,
173 Label::Distance distance) {
174 if (FLAG_debug_code) {
175 __ AssertNotSmi(map);
176 __ CmpObjectType(map, MAP_TYPE, kScratchRegister);
177 __ Assert(equal, AbortReason::kUnexpectedValue);
178 }
179 __ CmpInstanceType(map, instance_type);
180 __ j(AsMasmCondition(cc), target, distance);
181 }
JumpIfPointer(Condition cc,Register value,MemOperand operand,Label * target,Label::Distance distance)182 void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
183 MemOperand operand, Label* target,
184 Label::Distance distance) {
185 __ cmpq(value, operand);
186 __ j(AsMasmCondition(cc), target, distance);
187 }
JumpIfSmi(Condition cc,Register lhs,Smi smi,Label * target,Label::Distance distance)188 void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Smi smi,
189 Label* target, Label::Distance distance) {
190 __ SmiCompare(lhs, smi);
191 __ j(AsMasmCondition(cc), target, distance);
192 }
JumpIfSmi(Condition cc,Register lhs,Register rhs,Label * target,Label::Distance distance)193 void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
194 Label* target, Label::Distance distance) {
195 __ SmiCompare(lhs, rhs);
196 __ j(AsMasmCondition(cc), target, distance);
197 }
198
JumpIfImmediate(Condition cc,Register left,int right,Label * target,Label::Distance distance)199 void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
200 Label* target,
201 Label::Distance distance) {
202 __ cmpq(left, Immediate(right));
203 __ j(AsMasmCondition(cc), target, distance);
204 }
205
206 // cmp_tagged
JumpIfTagged(Condition cc,Register value,MemOperand operand,Label * target,Label::Distance distance)207 void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
208 MemOperand operand, Label* target,
209 Label::Distance distance) {
210 __ cmp_tagged(value, operand);
211 __ j(AsMasmCondition(cc), target, distance);
212 }
JumpIfTagged(Condition cc,MemOperand operand,Register value,Label * target,Label::Distance distance)213 void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
214 Register value, Label* target,
215 Label::Distance distance) {
216 __ cmp_tagged(operand, value);
217 __ j(AsMasmCondition(cc), target, distance);
218 }
JumpIfByte(Condition cc,Register value,int32_t byte,Label * target,Label::Distance distance)219 void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
220 Label* target, Label::Distance distance) {
221 __ cmpb(value, Immediate(byte));
222 __ j(AsMasmCondition(cc), target, distance);
223 }
224
Move(interpreter::Register output,Register source)225 void BaselineAssembler::Move(interpreter::Register output, Register source) {
226 return __ movq(RegisterFrameOperand(output), source);
227 }
Move(Register output,TaggedIndex value)228 void BaselineAssembler::Move(Register output, TaggedIndex value) {
229 __ Move(output, value);
230 }
Move(MemOperand output,Register source)231 void BaselineAssembler::Move(MemOperand output, Register source) {
232 __ movq(output, source);
233 }
Move(Register output,ExternalReference reference)234 void BaselineAssembler::Move(Register output, ExternalReference reference) {
235 __ Move(output, reference);
236 }
Move(Register output,Handle<HeapObject> value)237 void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
238 __ Move(output, value);
239 }
Move(Register output,int32_t value)240 void BaselineAssembler::Move(Register output, int32_t value) {
241 __ Move(output, value);
242 }
MoveMaybeSmi(Register output,Register source)243 void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
244 __ mov_tagged(output, source);
245 }
MoveSmi(Register output,Register source)246 void BaselineAssembler::MoveSmi(Register output, Register source) {
247 __ mov_tagged(output, source);
248 }
249
250 namespace detail {
PushSingle(MacroAssembler * masm,RootIndex source)251 inline void PushSingle(MacroAssembler* masm, RootIndex source) {
252 masm->PushRoot(source);
253 }
PushSingle(MacroAssembler * masm,Register reg)254 inline void PushSingle(MacroAssembler* masm, Register reg) { masm->Push(reg); }
PushSingle(MacroAssembler * masm,TaggedIndex value)255 inline void PushSingle(MacroAssembler* masm, TaggedIndex value) {
256 masm->Push(value);
257 }
PushSingle(MacroAssembler * masm,Smi value)258 inline void PushSingle(MacroAssembler* masm, Smi value) { masm->Push(value); }
PushSingle(MacroAssembler * masm,Handle<HeapObject> object)259 inline void PushSingle(MacroAssembler* masm, Handle<HeapObject> object) {
260 masm->Push(object);
261 }
PushSingle(MacroAssembler * masm,int32_t immediate)262 inline void PushSingle(MacroAssembler* masm, int32_t immediate) {
263 masm->Push(Immediate(immediate));
264 }
PushSingle(MacroAssembler * masm,MemOperand operand)265 inline void PushSingle(MacroAssembler* masm, MemOperand operand) {
266 masm->Push(operand);
267 }
PushSingle(MacroAssembler * masm,interpreter::Register source)268 inline void PushSingle(MacroAssembler* masm, interpreter::Register source) {
269 return PushSingle(masm, BaselineAssembler::RegisterFrameOperand(source));
270 }
271
272 template <typename Arg>
273 struct PushHelper {
PushPushHelper274 static int Push(BaselineAssembler* basm, Arg arg) {
275 PushSingle(basm->masm(), arg);
276 return 1;
277 }
PushReversePushHelper278 static int PushReverse(BaselineAssembler* basm, Arg arg) {
279 return Push(basm, arg);
280 }
281 };
282
283 template <>
284 struct PushHelper<interpreter::RegisterList> {
285 static int Push(BaselineAssembler* basm, interpreter::RegisterList list) {
286 for (int reg_index = 0; reg_index < list.register_count(); ++reg_index) {
287 PushSingle(basm->masm(), list[reg_index]);
288 }
289 return list.register_count();
290 }
291 static int PushReverse(BaselineAssembler* basm,
292 interpreter::RegisterList list) {
293 for (int reg_index = list.register_count() - 1; reg_index >= 0;
294 --reg_index) {
295 PushSingle(basm->masm(), list[reg_index]);
296 }
297 return list.register_count();
298 }
299 };
300
301 template <typename... Args>
302 struct PushAllHelper;
303 template <>
304 struct PushAllHelper<> {
305 static int Push(BaselineAssembler* masm) { return 0; }
306 static int PushReverse(BaselineAssembler* masm) { return 0; }
307 };
308 template <typename Arg, typename... Args>
309 struct PushAllHelper<Arg, Args...> {
310 static int Push(BaselineAssembler* masm, Arg arg, Args... args) {
311 int nargs = PushHelper<Arg>::Push(masm, arg);
312 return nargs + PushAllHelper<Args...>::Push(masm, args...);
313 }
314 static int PushReverse(BaselineAssembler* masm, Arg arg, Args... args) {
315 int nargs = PushAllHelper<Args...>::PushReverse(masm, args...);
316 return nargs + PushHelper<Arg>::PushReverse(masm, arg);
317 }
318 };
319
320 } // namespace detail
321
322 template <typename... T>
323 int BaselineAssembler::Push(T... vals) {
324 return detail::PushAllHelper<T...>::Push(this, vals...);
325 }
326
327 template <typename... T>
328 void BaselineAssembler::PushReverse(T... vals) {
329 detail::PushAllHelper<T...>::PushReverse(this, vals...);
330 }
331
332 template <typename... T>
333 void BaselineAssembler::Pop(T... registers) {
334 (__ Pop(registers), ...);
335 }
336
337 void BaselineAssembler::LoadTaggedPointerField(Register output, Register source,
338 int offset) {
339 __ LoadTaggedPointerField(output, FieldOperand(source, offset));
340 }
341 void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
342 int offset) {
343 __ LoadTaggedSignedField(output, FieldOperand(source, offset));
344 }
345 void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
346 int offset) {
347 __ LoadAnyTaggedField(output, FieldOperand(source, offset));
348 }
349 void BaselineAssembler::LoadWord16FieldZeroExtend(Register output,
350 Register source, int offset) {
351 __ movzxwq(output, FieldOperand(source, offset));
352 }
353 void BaselineAssembler::LoadWord8Field(Register output, Register source,
354 int offset) {
355 __ movb(output, FieldOperand(source, offset));
356 }
357 void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
358 Smi value) {
359 __ StoreTaggedSignedField(FieldOperand(target, offset), value);
360 }
361 void BaselineAssembler::StoreTaggedFieldWithWriteBarrier(Register target,
362 int offset,
363 Register value) {
364 ASM_CODE_COMMENT(masm_);
365 Register scratch = WriteBarrierDescriptor::SlotAddressRegister();
366 DCHECK(!AreAliased(target, value, scratch));
367 __ StoreTaggedField(FieldOperand(target, offset), value);
368 __ RecordWriteField(target, offset, value, scratch, SaveFPRegsMode::kIgnore);
369 }
370 void BaselineAssembler::StoreTaggedFieldNoWriteBarrier(Register target,
371 int offset,
372 Register value) {
373 __ StoreTaggedField(FieldOperand(target, offset), value);
374 }
375
376 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
377 int32_t weight, Label* skip_interrupt_label) {
378 ASM_CODE_COMMENT(masm_);
379 ScratchRegisterScope scratch_scope(this);
380 Register feedback_cell = scratch_scope.AcquireScratch();
381 LoadFunction(feedback_cell);
382 LoadTaggedPointerField(feedback_cell, feedback_cell,
383 JSFunction::kFeedbackCellOffset);
384 __ addl(FieldOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset),
385 Immediate(weight));
386 if (skip_interrupt_label) {
387 DCHECK_LT(weight, 0);
388 __ j(greater_equal, skip_interrupt_label);
389 }
390 }
391
392 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
393 Register weight, Label* skip_interrupt_label) {
394 ASM_CODE_COMMENT(masm_);
395 ScratchRegisterScope scratch_scope(this);
396 Register feedback_cell = scratch_scope.AcquireScratch();
397 LoadFunction(feedback_cell);
398 LoadTaggedPointerField(feedback_cell, feedback_cell,
399 JSFunction::kFeedbackCellOffset);
400 __ addl(FieldOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset),
401 weight);
402 if (skip_interrupt_label) __ j(greater_equal, skip_interrupt_label);
403 }
404
405 void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
406 if (rhs.value() == 0) return;
407 if (SmiValuesAre31Bits()) {
408 __ addl(lhs, Immediate(rhs));
409 } else {
410 ScratchRegisterScope scratch_scope(this);
411 Register rhs_reg = scratch_scope.AcquireScratch();
412 __ Move(rhs_reg, rhs);
413 __ addq(lhs, rhs_reg);
414 }
415 }
416
417 void BaselineAssembler::Word32And(Register output, Register lhs, int rhs) {
418 Move(output, lhs);
419 __ andq(output, Immediate(rhs));
420 }
421
422 void BaselineAssembler::Switch(Register reg, int case_value_base,
423 Label** labels, int num_labels) {
424 ASM_CODE_COMMENT(masm_);
425 ScratchRegisterScope scope(this);
426 Register table = scope.AcquireScratch();
427 Label fallthrough, jump_table;
428 if (case_value_base != 0) {
429 __ subq(reg, Immediate(case_value_base));
430 }
431 __ cmpq(reg, Immediate(num_labels));
432 __ j(above_equal, &fallthrough);
433 __ leaq(table, MemOperand(&jump_table));
434 __ jmp(MemOperand(table, reg, times_8, 0));
435 // Emit the jump table inline, under the assumption that it's not too big.
436 __ Align(kSystemPointerSize);
437 __ bind(&jump_table);
438 for (int i = 0; i < num_labels; ++i) {
439 __ dq(labels[i]);
440 }
441 __ bind(&fallthrough);
442 }
443
444 #undef __
445 #define __ basm.
446
447 void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
448 ASM_CODE_COMMENT(masm);
449 BaselineAssembler basm(masm);
450
451 Register weight = BaselineLeaveFrameDescriptor::WeightRegister();
452 Register params_size = BaselineLeaveFrameDescriptor::ParamsSizeRegister();
453
454 {
455 ASM_CODE_COMMENT_STRING(masm, "Update Interrupt Budget");
456
457 Label skip_interrupt_label;
458 __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, &skip_interrupt_label);
459 {
460 __ masm()->SmiTag(params_size);
461 __ Push(params_size, kInterpreterAccumulatorRegister);
462
463 __ LoadContext(kContextRegister);
464 __ Push(MemOperand(rbp, InterpreterFrameConstants::kFunctionOffset));
465 __ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1);
466
467 __ Pop(kInterpreterAccumulatorRegister, params_size);
468 __ masm()->SmiUntag(params_size);
469 }
470 __ Bind(&skip_interrupt_label);
471 }
472
473 BaselineAssembler::ScratchRegisterScope scope(&basm);
474 Register scratch = scope.AcquireScratch();
475
476 Register actual_params_size = scratch;
477 // Compute the size of the actual parameters + receiver (in bytes).
478 __ masm()->movq(actual_params_size,
479 MemOperand(rbp, StandardFrameConstants::kArgCOffset));
480
481 // If actual is bigger than formal, then we should use it to free up the stack
482 // arguments.
483 Label corrected_args_count;
484 __ masm()->cmpq(params_size, actual_params_size);
485 __ masm()->j(greater_equal, &corrected_args_count);
486 __ masm()->movq(params_size, actual_params_size);
487 __ Bind(&corrected_args_count);
488
489 // Leave the frame (also dropping the register file).
490 __ masm()->LeaveFrame(StackFrame::BASELINE);
491
492 // Drop receiver + arguments.
493 __ masm()->DropArguments(params_size, scratch,
494 TurboAssembler::kCountIsInteger,
495 TurboAssembler::kCountIncludesReceiver);
496 __ masm()->Ret();
497 }
498
499 #undef __
500
501 inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
502 Register reg) {
503 assembler_->masm()->cmp_tagged(reg, kInterpreterAccumulatorRegister);
504 assembler_->masm()->Assert(equal, AbortReason::kUnexpectedValue);
505 }
506
507 } // namespace baseline
508 } // namespace internal
509 } // namespace v8
510
511 #endif // V8_BASELINE_X64_BASELINE_ASSEMBLER_X64_INL_H_
512