1 // Copyright 2021 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_BASELINE_LOONG64_BASELINE_ASSEMBLER_LOONG64_INL_H_
6 #define V8_BASELINE_LOONG64_BASELINE_ASSEMBLER_LOONG64_INL_H_
7
8 #include "src/baseline/baseline-assembler.h"
9 #include "src/codegen/interface-descriptors.h"
10 #include "src/codegen/loong64/assembler-loong64-inl.h"
11
12 namespace v8 {
13 namespace internal {
14 namespace baseline {
15
16 class BaselineAssembler::ScratchRegisterScope {
17 public:
ScratchRegisterScope(BaselineAssembler * assembler)18 explicit ScratchRegisterScope(BaselineAssembler* assembler)
19 : assembler_(assembler),
20 prev_scope_(assembler->scratch_register_scope_),
21 wrapped_scope_(assembler->masm()) {
22 if (!assembler_->scratch_register_scope_) {
23 // If we haven't opened a scratch scope yet, for the first one add a
24 // couple of extra registers.
25 wrapped_scope_.Include({t0, t1, t2, t3});
26 }
27 assembler_->scratch_register_scope_ = this;
28 }
~ScratchRegisterScope()29 ~ScratchRegisterScope() { assembler_->scratch_register_scope_ = prev_scope_; }
30
AcquireScratch()31 Register AcquireScratch() { return wrapped_scope_.Acquire(); }
32
33 private:
34 BaselineAssembler* assembler_;
35 ScratchRegisterScope* prev_scope_;
36 UseScratchRegisterScope wrapped_scope_;
37 };
38
39 enum class Condition : uint32_t {
40 kEqual = eq,
41 kNotEqual = ne,
42
43 kLessThan = lt,
44 kGreaterThan = gt,
45 kLessThanEqual = le,
46 kGreaterThanEqual = ge,
47
48 kUnsignedLessThan = Uless,
49 kUnsignedGreaterThan = Ugreater,
50 kUnsignedLessThanEqual = Uless_equal,
51 kUnsignedGreaterThanEqual = Ugreater_equal,
52
53 kOverflow = overflow,
54 kNoOverflow = no_overflow,
55
56 kZero = eq,
57 kNotZero = ne,
58 };
59
AsMasmCondition(Condition cond)60 inline internal::Condition AsMasmCondition(Condition cond) {
61 STATIC_ASSERT(sizeof(internal::Condition) == sizeof(Condition));
62 return static_cast<internal::Condition>(cond);
63 }
64
65 namespace detail {
66
67 #ifdef DEBUG
Clobbers(Register target,MemOperand op)68 inline bool Clobbers(Register target, MemOperand op) {
69 return op.base() == target || op.index() == target;
70 }
71 #endif
72
73 } // namespace detail
74
75 #define __ masm_->
76
RegisterFrameOperand(interpreter::Register interpreter_register)77 MemOperand BaselineAssembler::RegisterFrameOperand(
78 interpreter::Register interpreter_register) {
79 return MemOperand(fp, interpreter_register.ToOperand() * kSystemPointerSize);
80 }
RegisterFrameAddress(interpreter::Register interpreter_register,Register rscratch)81 void BaselineAssembler::RegisterFrameAddress(
82 interpreter::Register interpreter_register, Register rscratch) {
83 return __ Add_d(rscratch, fp,
84 interpreter_register.ToOperand() * kSystemPointerSize);
85 }
FeedbackVectorOperand()86 MemOperand BaselineAssembler::FeedbackVectorOperand() {
87 return MemOperand(fp, BaselineFrameConstants::kFeedbackVectorFromFp);
88 }
89
Bind(Label * label)90 void BaselineAssembler::Bind(Label* label) { __ bind(label); }
91
BindWithoutJumpTarget(Label * label)92 void BaselineAssembler::BindWithoutJumpTarget(Label* label) { __ bind(label); }
93
JumpTarget()94 void BaselineAssembler::JumpTarget() {
95 // NOP.
96 }
Jump(Label * target,Label::Distance distance)97 void BaselineAssembler::Jump(Label* target, Label::Distance distance) {
98 __ Branch(target);
99 }
JumpIfRoot(Register value,RootIndex index,Label * target,Label::Distance)100 void BaselineAssembler::JumpIfRoot(Register value, RootIndex index,
101 Label* target, Label::Distance) {
102 __ JumpIfRoot(value, index, target);
103 }
JumpIfNotRoot(Register value,RootIndex index,Label * target,Label::Distance)104 void BaselineAssembler::JumpIfNotRoot(Register value, RootIndex index,
105 Label* target, Label::Distance) {
106 __ JumpIfNotRoot(value, index, target);
107 }
JumpIfSmi(Register value,Label * target,Label::Distance)108 void BaselineAssembler::JumpIfSmi(Register value, Label* target,
109 Label::Distance) {
110 __ JumpIfSmi(value, target);
111 }
JumpIfNotSmi(Register value,Label * target,Label::Distance)112 void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
113 Label::Distance) {
114 __ JumpIfNotSmi(value, target);
115 }
JumpIfImmediate(Condition cc,Register left,int right,Label * target,Label::Distance distance)116 void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
117 Label* target,
118 Label::Distance distance) {
119 JumpIf(cc, left, Operand(right), target, distance);
120 }
121
CallBuiltin(Builtin builtin)122 void BaselineAssembler::CallBuiltin(Builtin builtin) {
123 ASM_CODE_COMMENT_STRING(masm_,
124 __ CommentForOffHeapTrampoline("call", builtin));
125 Register temp = t7;
126 __ LoadEntryFromBuiltin(builtin, temp);
127 __ Call(temp);
128 }
129
TailCallBuiltin(Builtin builtin)130 void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
131 ASM_CODE_COMMENT_STRING(masm_,
132 __ CommentForOffHeapTrampoline("tail call", builtin));
133 Register temp = t7;
134 __ LoadEntryFromBuiltin(builtin, temp);
135 __ Jump(temp);
136 }
137
TestAndBranch(Register value,int mask,Condition cc,Label * target,Label::Distance)138 void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
139 Label* target, Label::Distance) {
140 ScratchRegisterScope temps(this);
141 Register scratch = temps.AcquireScratch();
142 __ And(scratch, value, Operand(mask));
143 __ Branch(target, AsMasmCondition(cc), scratch, Operand(zero_reg));
144 }
145
JumpIf(Condition cc,Register lhs,const Operand & rhs,Label * target,Label::Distance)146 void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
147 Label* target, Label::Distance) {
148 __ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
149 }
JumpIfObjectType(Condition cc,Register object,InstanceType instance_type,Register map,Label * target,Label::Distance)150 void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
151 InstanceType instance_type,
152 Register map, Label* target,
153 Label::Distance) {
154 ScratchRegisterScope temps(this);
155 Register type = temps.AcquireScratch();
156 __ GetObjectType(object, map, type);
157 __ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
158 }
JumpIfInstanceType(Condition cc,Register map,InstanceType instance_type,Label * target,Label::Distance)159 void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
160 InstanceType instance_type,
161 Label* target, Label::Distance) {
162 ScratchRegisterScope temps(this);
163 Register type = temps.AcquireScratch();
164 if (FLAG_debug_code) {
165 __ AssertNotSmi(map);
166 __ GetObjectType(map, type, type);
167 __ Assert(eq, AbortReason::kUnexpectedValue, type, Operand(MAP_TYPE));
168 }
169 __ Ld_d(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
170 __ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
171 }
JumpIfSmi(Condition cc,Register value,Smi smi,Label * target,Label::Distance)172 void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
173 Label* target, Label::Distance) {
174 ScratchRegisterScope temps(this);
175 Register scratch = temps.AcquireScratch();
176 __ li(scratch, Operand(smi));
177 __ SmiUntag(scratch);
178 __ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
179 }
JumpIfSmi(Condition cc,Register lhs,Register rhs,Label * target,Label::Distance)180 void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
181 Label* target, Label::Distance) {
182 __ AssertSmi(lhs);
183 __ AssertSmi(rhs);
184 __ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
185 }
JumpIfTagged(Condition cc,Register value,MemOperand operand,Label * target,Label::Distance)186 void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
187 MemOperand operand, Label* target,
188 Label::Distance) {
189 ScratchRegisterScope temps(this);
190 Register scratch = temps.AcquireScratch();
191 __ Ld_d(scratch, operand);
192 __ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
193 }
JumpIfTagged(Condition cc,MemOperand operand,Register value,Label * target,Label::Distance)194 void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
195 Register value, Label* target,
196 Label::Distance) {
197 ScratchRegisterScope temps(this);
198 Register scratch = temps.AcquireScratch();
199 __ Ld_d(scratch, operand);
200 __ Branch(target, AsMasmCondition(cc), scratch, Operand(value));
201 }
JumpIfByte(Condition cc,Register value,int32_t byte,Label * target,Label::Distance)202 void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
203 Label* target, Label::Distance) {
204 __ Branch(target, AsMasmCondition(cc), value, Operand(byte));
205 }
Move(interpreter::Register output,Register source)206 void BaselineAssembler::Move(interpreter::Register output, Register source) {
207 Move(RegisterFrameOperand(output), source);
208 }
Move(Register output,TaggedIndex value)209 void BaselineAssembler::Move(Register output, TaggedIndex value) {
210 __ li(output, Operand(value.ptr()));
211 }
Move(MemOperand output,Register source)212 void BaselineAssembler::Move(MemOperand output, Register source) {
213 __ St_d(source, output);
214 }
Move(Register output,ExternalReference reference)215 void BaselineAssembler::Move(Register output, ExternalReference reference) {
216 __ li(output, Operand(reference));
217 }
Move(Register output,Handle<HeapObject> value)218 void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
219 __ li(output, Operand(value));
220 }
Move(Register output,int32_t value)221 void BaselineAssembler::Move(Register output, int32_t value) {
222 __ li(output, Operand(value));
223 }
MoveMaybeSmi(Register output,Register source)224 void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
225 __ Move(output, source);
226 }
MoveSmi(Register output,Register source)227 void BaselineAssembler::MoveSmi(Register output, Register source) {
228 __ Move(output, source);
229 }
230
231 namespace detail {
232
233 template <typename Arg>
ToRegister(BaselineAssembler * basm,BaselineAssembler::ScratchRegisterScope * scope,Arg arg)234 inline Register ToRegister(BaselineAssembler* basm,
235 BaselineAssembler::ScratchRegisterScope* scope,
236 Arg arg) {
237 Register reg = scope->AcquireScratch();
238 basm->Move(reg, arg);
239 return reg;
240 }
ToRegister(BaselineAssembler * basm,BaselineAssembler::ScratchRegisterScope * scope,Register reg)241 inline Register ToRegister(BaselineAssembler* basm,
242 BaselineAssembler::ScratchRegisterScope* scope,
243 Register reg) {
244 return reg;
245 }
246
247 template <typename... Args>
248 struct PushAllHelper;
249 template <>
250 struct PushAllHelper<> {
251 static int Push(BaselineAssembler* basm) { return 0; }
252 static int PushReverse(BaselineAssembler* basm) { return 0; }
253 };
254 // TODO(ishell): try to pack sequence of pushes into one instruction by
255 // looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
256 // could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
257 template <typename Arg>
258 struct PushAllHelper<Arg> {
259 static int Push(BaselineAssembler* basm, Arg arg) {
260 BaselineAssembler::ScratchRegisterScope scope(basm);
261 basm->masm()->Push(ToRegister(basm, &scope, arg));
262 return 1;
263 }
264 static int PushReverse(BaselineAssembler* basm, Arg arg) {
265 return Push(basm, arg);
266 }
267 };
268 // TODO(ishell): try to pack sequence of pushes into one instruction by
269 // looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
270 // could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
271 template <typename Arg, typename... Args>
272 struct PushAllHelper<Arg, Args...> {
273 static int Push(BaselineAssembler* basm, Arg arg, Args... args) {
274 PushAllHelper<Arg>::Push(basm, arg);
275 return 1 + PushAllHelper<Args...>::Push(basm, args...);
276 }
277 static int PushReverse(BaselineAssembler* basm, Arg arg, Args... args) {
278 int nargs = PushAllHelper<Args...>::PushReverse(basm, args...);
279 PushAllHelper<Arg>::Push(basm, arg);
280 return nargs + 1;
281 }
282 };
283
284 template <>
285 struct PushAllHelper<interpreter::RegisterList> {
286 static int Push(BaselineAssembler* basm, interpreter::RegisterList list) {
287 for (int reg_index = 0; reg_index < list.register_count(); ++reg_index) {
288 PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
289 }
290 return list.register_count();
291 }
292 static int PushReverse(BaselineAssembler* basm,
293 interpreter::RegisterList list) {
294 for (int reg_index = list.register_count() - 1; reg_index >= 0;
295 --reg_index) {
296 PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
297 }
298 return list.register_count();
299 }
300 };
301
302 template <typename... T>
303 struct PopAllHelper;
304 template <>
305 struct PopAllHelper<> {
306 static void Pop(BaselineAssembler* basm) {}
307 };
308 // TODO(ishell): try to pack sequence of pops into one instruction by
309 // looking at regiser codes. For example, Pop(r1, r2, r5, r0, r3, r4)
310 // could be generated as two pops: Pop(r1, r2, r5) and Pop(r0, r3, r4).
311 template <>
312 struct PopAllHelper<Register> {
313 static void Pop(BaselineAssembler* basm, Register reg) {
314 basm->masm()->Pop(reg);
315 }
316 };
317 template <typename... T>
318 struct PopAllHelper<Register, T...> {
319 static void Pop(BaselineAssembler* basm, Register reg, T... tail) {
320 PopAllHelper<Register>::Pop(basm, reg);
321 PopAllHelper<T...>::Pop(basm, tail...);
322 }
323 };
324
325 } // namespace detail
326
327 template <typename... T>
328 int BaselineAssembler::Push(T... vals) {
329 return detail::PushAllHelper<T...>::Push(this, vals...);
330 }
331
332 template <typename... T>
333 void BaselineAssembler::PushReverse(T... vals) {
334 detail::PushAllHelper<T...>::PushReverse(this, vals...);
335 }
336
337 template <typename... T>
338 void BaselineAssembler::Pop(T... registers) {
339 detail::PopAllHelper<T...>::Pop(this, registers...);
340 }
341
342 void BaselineAssembler::LoadTaggedPointerField(Register output, Register source,
343 int offset) {
344 __ Ld_d(output, FieldMemOperand(source, offset));
345 }
346 void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
347 int offset) {
348 __ Ld_d(output, FieldMemOperand(source, offset));
349 }
350 void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
351 int offset) {
352 __ Ld_d(output, FieldMemOperand(source, offset));
353 }
354 void BaselineAssembler::LoadWord16FieldZeroExtend(Register output,
355 Register source, int offset) {
356 __ Ld_hu(output, FieldMemOperand(source, offset));
357 }
358 void BaselineAssembler::LoadWord8Field(Register output, Register source,
359 int offset) {
360 __ Ld_b(output, FieldMemOperand(source, offset));
361 }
362 void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
363 Smi value) {
364 ASM_CODE_COMMENT(masm_);
365 ScratchRegisterScope temps(this);
366 Register scratch = temps.AcquireScratch();
367 __ li(scratch, Operand(value));
368 __ St_d(scratch, FieldMemOperand(target, offset));
369 }
370 void BaselineAssembler::StoreTaggedFieldWithWriteBarrier(Register target,
371 int offset,
372 Register value) {
373 ASM_CODE_COMMENT(masm_);
374 __ St_d(value, FieldMemOperand(target, offset));
375 ScratchRegisterScope temps(this);
376 __ RecordWriteField(target, offset, value, kRAHasNotBeenSaved,
377 SaveFPRegsMode::kIgnore);
378 }
379 void BaselineAssembler::StoreTaggedFieldNoWriteBarrier(Register target,
380 int offset,
381 Register value) {
382 __ St_d(value, FieldMemOperand(target, offset));
383 }
384 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
385 int32_t weight, Label* skip_interrupt_label) {
386 ASM_CODE_COMMENT(masm_);
387 ScratchRegisterScope scratch_scope(this);
388 Register feedback_cell = scratch_scope.AcquireScratch();
389 LoadFunction(feedback_cell);
390 LoadTaggedPointerField(feedback_cell, feedback_cell,
391 JSFunction::kFeedbackCellOffset);
392
393 Register interrupt_budget = scratch_scope.AcquireScratch();
394 __ Ld_w(interrupt_budget,
395 FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
396 __ Add_w(interrupt_budget, interrupt_budget, weight);
397 __ St_w(interrupt_budget,
398 FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
399 if (skip_interrupt_label) {
400 DCHECK_LT(weight, 0);
401 __ Branch(skip_interrupt_label, ge, interrupt_budget, Operand(zero_reg));
402 }
403 }
404 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
405 Register weight, Label* skip_interrupt_label) {
406 ASM_CODE_COMMENT(masm_);
407 ScratchRegisterScope scratch_scope(this);
408 Register feedback_cell = scratch_scope.AcquireScratch();
409 LoadFunction(feedback_cell);
410 LoadTaggedPointerField(feedback_cell, feedback_cell,
411 JSFunction::kFeedbackCellOffset);
412
413 Register interrupt_budget = scratch_scope.AcquireScratch();
414 __ Ld_w(interrupt_budget,
415 FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
416 __ Add_w(interrupt_budget, interrupt_budget, weight);
417 __ St_w(interrupt_budget,
418 FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
419 if (skip_interrupt_label)
420 __ Branch(skip_interrupt_label, ge, interrupt_budget, Operand(zero_reg));
421 }
422
423 void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
424 __ Add_d(lhs, lhs, Operand(rhs));
425 }
426
427 void BaselineAssembler::Word32And(Register output, Register lhs, int rhs) {
428 __ And(output, lhs, Operand(rhs));
429 }
430
431 void BaselineAssembler::Switch(Register reg, int case_value_base,
432 Label** labels, int num_labels) {
433 ASM_CODE_COMMENT(masm_);
434 Label fallthrough;
435 if (case_value_base != 0) {
436 __ Sub_d(reg, reg, Operand(case_value_base));
437 }
438
439 __ Branch(&fallthrough, AsMasmCondition(Condition::kUnsignedGreaterThanEqual),
440 reg, Operand(num_labels));
441
442 __ GenerateSwitchTable(reg, num_labels,
443 [labels](size_t i) { return labels[i]; });
444
445 __ bind(&fallthrough);
446 }
447
448 #undef __
449
450 #define __ basm.
451
452 void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
453 ASM_CODE_COMMENT(masm);
454 BaselineAssembler basm(masm);
455
456 Register weight = BaselineLeaveFrameDescriptor::WeightRegister();
457 Register params_size = BaselineLeaveFrameDescriptor::ParamsSizeRegister();
458
459 {
460 ASM_CODE_COMMENT_STRING(masm, "Update Interrupt Budget");
461
462 Label skip_interrupt_label;
463 __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, &skip_interrupt_label);
464 __ masm()->SmiTag(params_size);
465 __ masm()->Push(params_size, kInterpreterAccumulatorRegister);
466
467 __ LoadContext(kContextRegister);
468 __ LoadFunction(kJSFunctionRegister);
469 __ masm()->Push(kJSFunctionRegister);
470 __ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1);
471
472 __ masm()->Pop(params_size, kInterpreterAccumulatorRegister);
473 __ masm()->SmiUntag(params_size);
474 __ Bind(&skip_interrupt_label);
475 }
476
477 BaselineAssembler::ScratchRegisterScope temps(&basm);
478 Register actual_params_size = temps.AcquireScratch();
479 // Compute the size of the actual parameters + receiver (in bytes).
480 __ Move(actual_params_size,
481 MemOperand(fp, StandardFrameConstants::kArgCOffset));
482
483 // If actual is bigger than formal, then we should use it to free up the stack
484 // arguments.
485 Label corrected_args_count;
486 __ masm()->Branch(&corrected_args_count, ge, params_size,
487 Operand(actual_params_size));
488 __ masm()->Move(params_size, actual_params_size);
489 __ Bind(&corrected_args_count);
490
491 // Leave the frame (also dropping the register file).
492 __ masm()->LeaveFrame(StackFrame::BASELINE);
493
494 // Drop receiver + arguments.
495 __ masm()->DropArguments(params_size, TurboAssembler::kCountIsInteger,
496 TurboAssembler::kCountIncludesReceiver);
497 __ masm()->Ret();
498 }
499
500 #undef __
501
502 inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
503 Register reg) {
504 assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue, reg,
505 Operand(kInterpreterAccumulatorRegister));
506 }
507
508 } // namespace baseline
509 } // namespace internal
510 } // namespace v8
511
512 #endif // V8_BASELINE_LOONG64_BASELINE_ASSEMBLER_LOONG64_INL_H_
513