1 // Copyright 2021 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_BASELINE_S390_BASELINE_ASSEMBLER_S390_INL_H_
6 #define V8_BASELINE_S390_BASELINE_ASSEMBLER_S390_INL_H_
7
8 #include "src/baseline/baseline-assembler.h"
9 #include "src/codegen/s390/assembler-s390-inl.h"
10 #include "src/codegen/interface-descriptors.h"
11
12 namespace v8 {
13 namespace internal {
14 namespace baseline {
15
16 namespace detail {
17
18 static constexpr Register kScratchRegisters[] = {r8, r9, ip, r1};
19 static constexpr int kNumScratchRegisters = arraysize(kScratchRegisters);
20
21 #ifdef DEBUG
Clobbers(Register target,MemOperand op)22 inline bool Clobbers(Register target, MemOperand op) {
23 return op.rb() == target || op.rx() == target;
24 }
25 #endif
26 } // namespace detail
27
28 class BaselineAssembler::ScratchRegisterScope {
29 public:
ScratchRegisterScope(BaselineAssembler * assembler)30 explicit ScratchRegisterScope(BaselineAssembler* assembler)
31 : assembler_(assembler),
32 prev_scope_(assembler->scratch_register_scope_),
33 registers_used_(prev_scope_ == nullptr ? 0
34 : prev_scope_->registers_used_) {
35 assembler_->scratch_register_scope_ = this;
36 }
~ScratchRegisterScope()37 ~ScratchRegisterScope() { assembler_->scratch_register_scope_ = prev_scope_; }
38
AcquireScratch()39 Register AcquireScratch() {
40 DCHECK_LT(registers_used_, detail::kNumScratchRegisters);
41 return detail::kScratchRegisters[registers_used_++];
42 }
43
44 private:
45 BaselineAssembler* assembler_;
46 ScratchRegisterScope* prev_scope_;
47 int registers_used_;
48 };
49
50 // TODO(v8:11429,leszeks): Unify condition names in the MacroAssembler.
51 enum class Condition : uint32_t {
52 kEqual,
53 kNotEqual,
54
55 kLessThan,
56 kGreaterThan,
57 kLessThanEqual,
58 kGreaterThanEqual,
59
60 kUnsignedLessThan,
61 kUnsignedGreaterThan,
62 kUnsignedLessThanEqual,
63 kUnsignedGreaterThanEqual,
64
65 kOverflow,
66 kNoOverflow,
67
68 kZero,
69 kNotZero
70 };
71
AsMasmCondition(Condition cond)72 inline internal::Condition AsMasmCondition(Condition cond) {
73 STATIC_ASSERT(sizeof(internal::Condition) == sizeof(Condition));
74 switch (cond) {
75 case Condition::kEqual:
76 return eq;
77 case Condition::kNotEqual:
78 return ne;
79 case Condition::kLessThan:
80 return lt;
81 case Condition::kGreaterThan:
82 return gt;
83 case Condition::kLessThanEqual:
84 return le;
85 case Condition::kGreaterThanEqual:
86 return ge;
87
88 case Condition::kUnsignedLessThan:
89 return lt;
90 case Condition::kUnsignedGreaterThan:
91 return gt;
92 case Condition::kUnsignedLessThanEqual:
93 return le;
94 case Condition::kUnsignedGreaterThanEqual:
95 return ge;
96
97 case Condition::kOverflow:
98 return overflow;
99 case Condition::kNoOverflow:
100 return nooverflow;
101
102 case Condition::kZero:
103 return eq;
104 case Condition::kNotZero:
105 return ne;
106 default:
107 UNREACHABLE();
108 }
109 }
110
IsSignedCondition(Condition cond)111 inline bool IsSignedCondition(Condition cond) {
112 switch (cond) {
113 case Condition::kEqual:
114 case Condition::kNotEqual:
115 case Condition::kLessThan:
116 case Condition::kGreaterThan:
117 case Condition::kLessThanEqual:
118 case Condition::kGreaterThanEqual:
119 case Condition::kOverflow:
120 case Condition::kNoOverflow:
121 case Condition::kZero:
122 case Condition::kNotZero:
123 return true;
124
125 case Condition::kUnsignedLessThan:
126 case Condition::kUnsignedGreaterThan:
127 case Condition::kUnsignedLessThanEqual:
128 case Condition::kUnsignedGreaterThanEqual:
129 return false;
130
131 default:
132 UNREACHABLE();
133 }
134 }
135
136 #define __ assm->
137 // s390x helper
JumpIfHelper(MacroAssembler * assm,Condition cc,Register lhs,Register rhs,Label * target)138 static void JumpIfHelper(MacroAssembler* assm, Condition cc, Register lhs,
139 Register rhs, Label* target) {
140 if (IsSignedCondition(cc)) {
141 __ CmpS64(lhs, rhs);
142 } else {
143 __ CmpU64(lhs, rhs);
144 }
145 __ b(AsMasmCondition(cc), target);
146 }
147
148 #undef __
149
150 #define __ masm_->
151
RegisterFrameOperand(interpreter::Register interpreter_register)152 MemOperand BaselineAssembler::RegisterFrameOperand(
153 interpreter::Register interpreter_register) {
154 return MemOperand(fp, interpreter_register.ToOperand() * kSystemPointerSize);
155 }
RegisterFrameAddress(interpreter::Register interpreter_register,Register rscratch)156 void BaselineAssembler::RegisterFrameAddress(
157 interpreter::Register interpreter_register, Register rscratch) {
158 return __ AddS64(rscratch, fp,
159 interpreter_register.ToOperand() * kSystemPointerSize);
160 }
FeedbackVectorOperand()161 MemOperand BaselineAssembler::FeedbackVectorOperand() {
162 return MemOperand(fp, BaselineFrameConstants::kFeedbackVectorFromFp);
163 }
164
Bind(Label * label)165 void BaselineAssembler::Bind(Label* label) { __ bind(label); }
BindWithoutJumpTarget(Label * label)166 void BaselineAssembler::BindWithoutJumpTarget(Label* label) { __ bind(label); }
167
JumpTarget()168 void BaselineAssembler::JumpTarget() {
169 // NOP on arm.
170 }
171
Jump(Label * target,Label::Distance distance)172 void BaselineAssembler::Jump(Label* target, Label::Distance distance) {
173 ASM_CODE_COMMENT(masm_);
174 __ b(target);
175 }
176
JumpIfRoot(Register value,RootIndex index,Label * target,Label::Distance)177 void BaselineAssembler::JumpIfRoot(Register value, RootIndex index,
178 Label* target, Label::Distance) {
179 ASM_CODE_COMMENT(masm_);
180 __ JumpIfRoot(value, index, target);
181 }
182
JumpIfNotRoot(Register value,RootIndex index,Label * target,Label::Distance)183 void BaselineAssembler::JumpIfNotRoot(Register value, RootIndex index,
184 Label* target, Label::Distance) {
185 ASM_CODE_COMMENT(masm_);
186 __ JumpIfNotRoot(value, index, target);
187 }
188
JumpIfSmi(Register value,Label * target,Label::Distance)189 void BaselineAssembler::JumpIfSmi(Register value, Label* target,
190 Label::Distance) {
191 ASM_CODE_COMMENT(masm_);
192 __ JumpIfSmi(value, target);
193 }
194
JumpIfImmediate(Condition cc,Register left,int right,Label * target,Label::Distance distance)195 void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
196 Label* target,
197 Label::Distance distance) {
198 ASM_CODE_COMMENT(masm_);
199 JumpIf(cc, left, Operand(right), target, distance);
200 }
201
JumpIfNotSmi(Register value,Label * target,Label::Distance)202 void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
203 Label::Distance) {
204 ASM_CODE_COMMENT(masm_);
205 __ JumpIfNotSmi(value, target);
206 }
207
CallBuiltin(Builtin builtin)208 void BaselineAssembler::CallBuiltin(Builtin builtin) {
209 ASM_CODE_COMMENT_STRING(masm_,
210 __ CommentForOffHeapTrampoline("call", builtin));
211 if (masm()->options().short_builtin_calls) {
212 // Generate pc-relative call.
213 __ CallBuiltin(builtin);
214 } else {
215 ScratchRegisterScope temps(this);
216 Register temp = temps.AcquireScratch();
217 __ LoadEntryFromBuiltin(builtin, temp);
218 __ Call(temp);
219 }
220 }
221
TailCallBuiltin(Builtin builtin)222 void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
223 ASM_CODE_COMMENT_STRING(masm_,
224 __ CommentForOffHeapTrampoline("tail call", builtin));
225 if (masm()->options().short_builtin_calls) {
226 // Generate pc-relative call.
227 __ TailCallBuiltin(builtin);
228 } else {
229 ScratchRegisterScope temps(this);
230 Register temp = temps.AcquireScratch();
231 __ LoadEntryFromBuiltin(builtin, temp);
232 __ Jump(temp);
233 }
234 }
235
TestAndBranch(Register value,int mask,Condition cc,Label * target,Label::Distance)236 void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
237 Label* target, Label::Distance) {
238 ASM_CODE_COMMENT(masm_);
239 __ AndP(r0, value, Operand(mask));
240 __ b(AsMasmCondition(cc), target);
241 }
242
JumpIf(Condition cc,Register lhs,const Operand & rhs,Label * target,Label::Distance)243 void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
244 Label* target, Label::Distance) {
245 ASM_CODE_COMMENT(masm_);
246 if (IsSignedCondition(cc)) {
247 __ CmpS64(lhs, rhs);
248 } else {
249 __ CmpU64(lhs, rhs);
250 }
251 __ b(AsMasmCondition(cc), target);
252 }
253
JumpIfObjectType(Condition cc,Register object,InstanceType instance_type,Register map,Label * target,Label::Distance)254 void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
255 InstanceType instance_type,
256 Register map, Label* target,
257 Label::Distance) {
258 ASM_CODE_COMMENT(masm_);
259 ScratchRegisterScope temps(this);
260 Register type = temps.AcquireScratch();
261 __ LoadMap(map, object);
262 __ LoadU16(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
263 JumpIf(cc, type, Operand(instance_type), target);
264 }
265
JumpIfInstanceType(Condition cc,Register map,InstanceType instance_type,Label * target,Label::Distance)266 void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
267 InstanceType instance_type,
268 Label* target, Label::Distance) {
269 ASM_CODE_COMMENT(masm_);
270 ScratchRegisterScope temps(this);
271 Register type = temps.AcquireScratch();
272 if (FLAG_debug_code) {
273 __ AssertNotSmi(map);
274 __ CompareObjectType(map, type, type, MAP_TYPE);
275 __ Assert(eq, AbortReason::kUnexpectedValue);
276 }
277 __ LoadU16(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
278 JumpIf(cc, type, Operand(instance_type), target);
279 }
280
JumpIfPointer(Condition cc,Register value,MemOperand operand,Label * target,Label::Distance)281 void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
282 MemOperand operand, Label* target,
283 Label::Distance) {
284 ASM_CODE_COMMENT(masm_);
285 ScratchRegisterScope temps(this);
286 Register tmp = temps.AcquireScratch();
287 __ LoadU64(tmp, operand);
288 JumpIfHelper(masm_, cc, value, tmp, target);
289 }
290
JumpIfSmi(Condition cc,Register value,Smi smi,Label * target,Label::Distance)291 void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
292 Label* target, Label::Distance) {
293 ASM_CODE_COMMENT(masm_);
294 __ AssertSmi(value);
295 __ LoadSmiLiteral(r0, smi);
296 JumpIfHelper(masm_, cc, value, r0, target);
297 }
298
JumpIfSmi(Condition cc,Register lhs,Register rhs,Label * target,Label::Distance)299 void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
300 Label* target, Label::Distance) {
301 ASM_CODE_COMMENT(masm_);
302 __ AssertSmi(lhs);
303 __ AssertSmi(rhs);
304 JumpIfHelper(masm_, cc, lhs, rhs, target);
305 }
306
JumpIfTagged(Condition cc,Register value,MemOperand operand,Label * target,Label::Distance)307 void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
308 MemOperand operand, Label* target,
309 Label::Distance) {
310 ASM_CODE_COMMENT(masm_);
311 __ LoadU64(r0, operand);
312 JumpIfHelper(masm_, cc, value, r0, target);
313 }
314
JumpIfTagged(Condition cc,MemOperand operand,Register value,Label * target,Label::Distance)315 void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
316 Register value, Label* target,
317 Label::Distance) {
318 ASM_CODE_COMMENT(masm_);
319 __ LoadU64(r0, operand);
320 JumpIfHelper(masm_, cc, r0, value, target);
321 }
JumpIfByte(Condition cc,Register value,int32_t byte,Label * target,Label::Distance)322 void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
323 Label* target, Label::Distance) {
324 ASM_CODE_COMMENT(masm_);
325 JumpIf(cc, value, Operand(byte), target);
326 }
327
Move(interpreter::Register output,Register source)328 void BaselineAssembler::Move(interpreter::Register output, Register source) {
329 Move(RegisterFrameOperand(output), source);
330 }
331
Move(Register output,TaggedIndex value)332 void BaselineAssembler::Move(Register output, TaggedIndex value) {
333 ASM_CODE_COMMENT(masm_);
334 __ mov(output, Operand(value.ptr()));
335 }
336
Move(MemOperand output,Register source)337 void BaselineAssembler::Move(MemOperand output, Register source) {
338 ASM_CODE_COMMENT(masm_);
339 __ StoreU64(source, output);
340 }
341
Move(Register output,ExternalReference reference)342 void BaselineAssembler::Move(Register output, ExternalReference reference) {
343 ASM_CODE_COMMENT(masm_);
344 __ Move(output, reference);
345 }
346
Move(Register output,Handle<HeapObject> value)347 void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
348 ASM_CODE_COMMENT(masm_);
349 __ Move(output, value);
350 }
351
Move(Register output,int32_t value)352 void BaselineAssembler::Move(Register output, int32_t value) {
353 ASM_CODE_COMMENT(masm_);
354 __ mov(output, Operand(value));
355 }
356
MoveMaybeSmi(Register output,Register source)357 void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
358 ASM_CODE_COMMENT(masm_);
359 __ mov(output, source);
360 }
361
MoveSmi(Register output,Register source)362 void BaselineAssembler::MoveSmi(Register output, Register source) {
363 ASM_CODE_COMMENT(masm_);
364 __ mov(output, source);
365 }
366
367 namespace detail {
368
369 template <typename Arg>
ToRegister(BaselineAssembler * basm,BaselineAssembler::ScratchRegisterScope * scope,Arg arg)370 inline Register ToRegister(BaselineAssembler* basm,
371 BaselineAssembler::ScratchRegisterScope* scope,
372 Arg arg) {
373 Register reg = scope->AcquireScratch();
374 basm->Move(reg, arg);
375 return reg;
376 }
ToRegister(BaselineAssembler * basm,BaselineAssembler::ScratchRegisterScope * scope,Register reg)377 inline Register ToRegister(BaselineAssembler* basm,
378 BaselineAssembler::ScratchRegisterScope* scope,
379 Register reg) {
380 return reg;
381 }
382
383 template <typename... Args>
384 struct PushAllHelper;
385 template <>
386 struct PushAllHelper<> {
387 static int Push(BaselineAssembler* basm) { return 0; }
388 static int PushReverse(BaselineAssembler* basm) { return 0; }
389 };
390 // TODO(ishell): try to pack sequence of pushes into one instruction by
391 // looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
392 // could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
393 template <typename Arg>
394 struct PushAllHelper<Arg> {
395 static int Push(BaselineAssembler* basm, Arg arg) {
396 BaselineAssembler::ScratchRegisterScope scope(basm);
397 basm->masm()->Push(ToRegister(basm, &scope, arg));
398 return 1;
399 }
400 static int PushReverse(BaselineAssembler* basm, Arg arg) {
401 return Push(basm, arg);
402 }
403 };
404 // TODO(ishell): try to pack sequence of pushes into one instruction by
405 // looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
406 // could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
407 template <typename Arg, typename... Args>
408 struct PushAllHelper<Arg, Args...> {
409 static int Push(BaselineAssembler* basm, Arg arg, Args... args) {
410 PushAllHelper<Arg>::Push(basm, arg);
411 return 1 + PushAllHelper<Args...>::Push(basm, args...);
412 }
413 static int PushReverse(BaselineAssembler* basm, Arg arg, Args... args) {
414 int nargs = PushAllHelper<Args...>::PushReverse(basm, args...);
415 PushAllHelper<Arg>::Push(basm, arg);
416 return nargs + 1;
417 }
418 };
419 template <>
420 struct PushAllHelper<interpreter::RegisterList> {
421 static int Push(BaselineAssembler* basm, interpreter::RegisterList list) {
422 for (int reg_index = 0; reg_index < list.register_count(); ++reg_index) {
423 PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
424 }
425 return list.register_count();
426 }
427 static int PushReverse(BaselineAssembler* basm,
428 interpreter::RegisterList list) {
429 for (int reg_index = list.register_count() - 1; reg_index >= 0;
430 --reg_index) {
431 PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
432 }
433 return list.register_count();
434 }
435 };
436
437 template <typename... T>
438 struct PopAllHelper;
439 template <>
440 struct PopAllHelper<> {
441 static void Pop(BaselineAssembler* basm) {}
442 };
443 // TODO(ishell): try to pack sequence of pops into one instruction by
444 // looking at regiser codes. For example, Pop(r1, r2, r5, r0, r3, r4)
445 // could be generated as two pops: Pop(r1, r2, r5) and Pop(r0, r3, r4).
446 template <>
447 struct PopAllHelper<Register> {
448 static void Pop(BaselineAssembler* basm, Register reg) {
449 basm->masm()->Pop(reg);
450 }
451 };
452 template <typename... T>
453 struct PopAllHelper<Register, T...> {
454 static void Pop(BaselineAssembler* basm, Register reg, T... tail) {
455 PopAllHelper<Register>::Pop(basm, reg);
456 PopAllHelper<T...>::Pop(basm, tail...);
457 }
458 };
459
460 } // namespace detail
461
462 template <typename... T>
463 int BaselineAssembler::Push(T... vals) {
464 return detail::PushAllHelper<T...>::Push(this, vals...);
465 }
466
467 template <typename... T>
468 void BaselineAssembler::PushReverse(T... vals) {
469 detail::PushAllHelper<T...>::PushReverse(this, vals...);
470 }
471
472 template <typename... T>
473 void BaselineAssembler::Pop(T... registers) {
474 detail::PopAllHelper<T...>::Pop(this, registers...);
475 }
476
477 void BaselineAssembler::LoadTaggedPointerField(Register output, Register source,
478 int offset) {
479 ASM_CODE_COMMENT(masm_);
480 __ LoadTaggedPointerField(output, FieldMemOperand(source, offset), r0);
481 }
482
483 void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
484 int offset) {
485 ASM_CODE_COMMENT(masm_);
486 __ LoadTaggedSignedField(output, FieldMemOperand(source, offset));
487 }
488
489 void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
490 int offset) {
491 ASM_CODE_COMMENT(masm_);
492 __ LoadAnyTaggedField(output, FieldMemOperand(source, offset), r0);
493 }
494
495 void BaselineAssembler::LoadWord16FieldZeroExtend(Register output,
496 Register source, int offset) {
497 ASM_CODE_COMMENT(masm_);
498 __ LoadU16(output, FieldMemOperand(source, offset));
499 }
500
501 void BaselineAssembler::LoadWord8Field(Register output, Register source,
502 int offset) {
503 ASM_CODE_COMMENT(masm_);
504 __ LoadU8(output, FieldMemOperand(source, offset));
505 }
506
507 void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
508 Smi value) {
509 ASM_CODE_COMMENT(masm_);
510 ScratchRegisterScope temps(this);
511 Register tmp = temps.AcquireScratch();
512 __ LoadSmiLiteral(tmp, value);
513 __ StoreTaggedField(tmp, FieldMemOperand(target, offset), r0);
514 }
515
516 void BaselineAssembler::StoreTaggedFieldWithWriteBarrier(Register target,
517 int offset,
518 Register value) {
519 ASM_CODE_COMMENT(masm_);
520 Register scratch = WriteBarrierDescriptor::SlotAddressRegister();
521 DCHECK(!AreAliased(target, value, scratch));
522 __ StoreTaggedField(value, FieldMemOperand(target, offset), r0);
523 __ RecordWriteField(target, offset, value, scratch, kLRHasNotBeenSaved,
524 SaveFPRegsMode::kIgnore);
525 }
526
527 void BaselineAssembler::StoreTaggedFieldNoWriteBarrier(Register target,
528 int offset,
529 Register value) {
530 __ StoreTaggedField(value, FieldMemOperand(target, offset), r0);
531 }
532
533 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
534 int32_t weight, Label* skip_interrupt_label) {
535 ASM_CODE_COMMENT(masm_);
536 ScratchRegisterScope scratch_scope(this);
537 Register feedback_cell = scratch_scope.AcquireScratch();
538 LoadFunction(feedback_cell);
539 LoadTaggedPointerField(feedback_cell, feedback_cell,
540 JSFunction::kFeedbackCellOffset);
541
542 Register interrupt_budget = scratch_scope.AcquireScratch();
543 __ LoadU32(
544 interrupt_budget,
545 FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
546 // Remember to set flags as part of the add!
547 __ AddS32(interrupt_budget, Operand(weight));
548 __ StoreU32(
549 interrupt_budget,
550 FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset), r0);
551 if (skip_interrupt_label) {
552 // Use compare flags set by add
553 DCHECK_LT(weight, 0);
554 __ b(ge, skip_interrupt_label);
555 }
556 }
557
558 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
559 Register weight, Label* skip_interrupt_label) {
560 ASM_CODE_COMMENT(masm_);
561 ScratchRegisterScope scratch_scope(this);
562 Register feedback_cell = scratch_scope.AcquireScratch();
563 LoadFunction(feedback_cell);
564 LoadTaggedPointerField(feedback_cell, feedback_cell,
565 JSFunction::kFeedbackCellOffset);
566
567 Register interrupt_budget = scratch_scope.AcquireScratch();
568 __ LoadU32(
569 interrupt_budget,
570 FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
571 // Remember to set flags as part of the add!
572 __ AddS32(interrupt_budget, interrupt_budget, weight);
573 __ StoreU32(
574 interrupt_budget,
575 FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
576 if (skip_interrupt_label) __ b(ge, skip_interrupt_label);
577 }
578
579 void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
580 if (rhs.value() == 0) return;
581 __ LoadSmiLiteral(r0, rhs);
582 if (SmiValuesAre31Bits()) {
583 __ AddS32(lhs, lhs, r0);
584 } else {
585 __ AddS64(lhs, lhs, r0);
586 }
587 }
588
589 void BaselineAssembler::Word32And(Register output, Register lhs, int rhs) {
590 __ AndP(output, lhs, Operand(rhs));
591 }
592
593 void BaselineAssembler::Switch(Register reg, int case_value_base,
594 Label** labels, int num_labels) {
595 ASM_CODE_COMMENT(masm_);
596 Label fallthrough, jump_table;
597 if (case_value_base != 0) {
598 __ AddS64(reg, Operand(-case_value_base));
599 }
600
601 // Mostly copied from code-generator-arm.cc
602 ScratchRegisterScope scope(this);
603 JumpIf(Condition::kUnsignedGreaterThanEqual, reg, Operand(num_labels),
604 &fallthrough);
605 // Ensure to emit the constant pool first if necessary.
606 int entry_size_log2 = 3;
607 __ ShiftLeftU32(reg, reg, Operand(entry_size_log2));
608 __ larl(r1, &jump_table);
609 __ lay(reg, MemOperand(reg, r1));
610 __ b(reg);
611 __ b(&fallthrough);
612 __ bind(&jump_table);
613 for (int i = 0; i < num_labels; ++i) {
614 __ b(labels[i], Label::kFar);
615 __ nop();
616 }
617 __ bind(&fallthrough);
618 }
619
620 #undef __
621
622 #define __ basm.
623
624 void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
625 ASM_CODE_COMMENT(masm);
626 BaselineAssembler basm(masm);
627
628 Register weight = BaselineLeaveFrameDescriptor::WeightRegister();
629 Register params_size = BaselineLeaveFrameDescriptor::ParamsSizeRegister();
630
631 {
632 ASM_CODE_COMMENT_STRING(masm, "Update Interrupt Budget");
633
634 Label skip_interrupt_label;
635 __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, &skip_interrupt_label);
636 {
637 __ masm()->SmiTag(params_size);
638 __ Push(params_size, kInterpreterAccumulatorRegister);
639
640 __ LoadContext(kContextRegister);
641 __ LoadFunction(kJSFunctionRegister);
642 __ Push(kJSFunctionRegister);
643 __ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1);
644
645 __ Pop(kInterpreterAccumulatorRegister, params_size);
646 __ masm()->SmiUntag(params_size);
647 }
648
649 __ Bind(&skip_interrupt_label);
650 }
651
652 BaselineAssembler::ScratchRegisterScope temps(&basm);
653 Register actual_params_size = temps.AcquireScratch();
654 // Compute the size of the actual parameters + receiver (in bytes).
655 __ Move(actual_params_size,
656 MemOperand(fp, StandardFrameConstants::kArgCOffset));
657
658 // If actual is bigger than formal, then we should use it to free up the stack
659 // arguments.
660 Label corrected_args_count;
661 JumpIfHelper(__ masm(), Condition::kGreaterThanEqual, params_size,
662 actual_params_size, &corrected_args_count);
663 __ masm()->mov(params_size, actual_params_size);
664 __ Bind(&corrected_args_count);
665
666 // Leave the frame (also dropping the register file).
667 __ masm()->LeaveFrame(StackFrame::BASELINE);
668
669 // Drop receiver + arguments.
670 __ masm()->DropArguments(params_size, TurboAssembler::kCountIsInteger,
671 TurboAssembler::kCountIncludesReceiver);
672 __ masm()->Ret();
673 }
674
675 #undef __
676
677 inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
678 Register reg) {
679 assembler_->masm()->CmpU64(reg, kInterpreterAccumulatorRegister);
680 assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue);
681 }
682
683 } // namespace baseline
684 } // namespace internal
685 } // namespace v8
686
687 #endif // V8_BASELINE_S390_BASELINE_ASSEMBLER_S390_INL_H_
688