1 // Copyright 2021 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_BASELINE_PPC_BASELINE_ASSEMBLER_PPC_INL_H_
6 #define V8_BASELINE_PPC_BASELINE_ASSEMBLER_PPC_INL_H_
7
8 #include "src/baseline/baseline-assembler.h"
9 #include "src/codegen/ppc/assembler-ppc-inl.h"
10 #include "src/codegen/interface-descriptors.h"
11
12 namespace v8 {
13 namespace internal {
14 namespace baseline {
15
16 namespace detail {
17
18 static constexpr Register kScratchRegisters[] = {r9, r10, ip};
19 static constexpr int kNumScratchRegisters = arraysize(kScratchRegisters);
20
21 #ifdef DEBUG
Clobbers(Register target,MemOperand op)22 inline bool Clobbers(Register target, MemOperand op) {
23 return op.rb() == target || op.ra() == target;
24 }
25 #endif
26 } // namespace detail
27
28 class BaselineAssembler::ScratchRegisterScope {
29 public:
ScratchRegisterScope(BaselineAssembler * assembler)30 explicit ScratchRegisterScope(BaselineAssembler* assembler)
31 : assembler_(assembler),
32 prev_scope_(assembler->scratch_register_scope_),
33 registers_used_(prev_scope_ == nullptr ? 0
34 : prev_scope_->registers_used_) {
35 assembler_->scratch_register_scope_ = this;
36 }
~ScratchRegisterScope()37 ~ScratchRegisterScope() { assembler_->scratch_register_scope_ = prev_scope_; }
38
AcquireScratch()39 Register AcquireScratch() {
40 DCHECK_LT(registers_used_, detail::kNumScratchRegisters);
41 return detail::kScratchRegisters[registers_used_++];
42 }
43
44 private:
45 BaselineAssembler* assembler_;
46 ScratchRegisterScope* prev_scope_;
47 int registers_used_;
48 };
49
50 // TODO(v8:11429,leszeks): Unify condition names in the MacroAssembler.
51 enum class Condition : uint32_t {
52 kEqual,
53 kNotEqual,
54
55 kLessThan,
56 kGreaterThan,
57 kLessThanEqual,
58 kGreaterThanEqual,
59
60 kUnsignedLessThan,
61 kUnsignedGreaterThan,
62 kUnsignedLessThanEqual,
63 kUnsignedGreaterThanEqual,
64
65 kOverflow,
66 kNoOverflow,
67
68 kZero,
69 kNotZero
70 };
71
AsMasmCondition(Condition cond)72 inline internal::Condition AsMasmCondition(Condition cond) {
73 STATIC_ASSERT(sizeof(internal::Condition) == sizeof(Condition));
74 switch (cond) {
75 case Condition::kEqual:
76 return eq;
77 case Condition::kNotEqual:
78 return ne;
79 case Condition::kLessThan:
80 return lt;
81 case Condition::kGreaterThan:
82 return gt;
83 case Condition::kLessThanEqual:
84 return le;
85 case Condition::kGreaterThanEqual:
86 return ge;
87
88 case Condition::kUnsignedLessThan:
89 return lt;
90 case Condition::kUnsignedGreaterThan:
91 return gt;
92 case Condition::kUnsignedLessThanEqual:
93 return le;
94 case Condition::kUnsignedGreaterThanEqual:
95 return ge;
96
97 case Condition::kOverflow:
98 return overflow;
99 case Condition::kNoOverflow:
100 return nooverflow;
101
102 case Condition::kZero:
103 return eq;
104 case Condition::kNotZero:
105 return ne;
106 default:
107 UNREACHABLE();
108 }
109 }
110
IsSignedCondition(Condition cond)111 inline bool IsSignedCondition(Condition cond) {
112 switch (cond) {
113 case Condition::kEqual:
114 case Condition::kNotEqual:
115 case Condition::kLessThan:
116 case Condition::kGreaterThan:
117 case Condition::kLessThanEqual:
118 case Condition::kGreaterThanEqual:
119 case Condition::kOverflow:
120 case Condition::kNoOverflow:
121 case Condition::kZero:
122 case Condition::kNotZero:
123 return true;
124
125 case Condition::kUnsignedLessThan:
126 case Condition::kUnsignedGreaterThan:
127 case Condition::kUnsignedLessThanEqual:
128 case Condition::kUnsignedGreaterThanEqual:
129 return false;
130
131 default:
132 UNREACHABLE();
133 }
134 }
135
136 #define __ assm->
137 // ppc helper
JumpIfHelper(MacroAssembler * assm,Condition cc,Register lhs,Register rhs,Label * target)138 static void JumpIfHelper(MacroAssembler* assm, Condition cc, Register lhs,
139 Register rhs, Label* target) {
140 if (IsSignedCondition(cc)) {
141 __ CmpS64(lhs, rhs);
142 } else {
143 __ CmpU64(lhs, rhs);
144 }
145 __ b(AsMasmCondition(cc), target);
146 }
147 #undef __
148
149 #define __ masm_->
150
RegisterFrameOperand(interpreter::Register interpreter_register)151 MemOperand BaselineAssembler::RegisterFrameOperand(
152 interpreter::Register interpreter_register) {
153 return MemOperand(fp, interpreter_register.ToOperand() * kSystemPointerSize);
154 }
RegisterFrameAddress(interpreter::Register interpreter_register,Register rscratch)155 void BaselineAssembler::RegisterFrameAddress(
156 interpreter::Register interpreter_register, Register rscratch) {
157 return __ AddS64(
158 rscratch, fp,
159 Operand(interpreter_register.ToOperand() * kSystemPointerSize));
160 }
FeedbackVectorOperand()161 MemOperand BaselineAssembler::FeedbackVectorOperand() {
162 return MemOperand(fp, BaselineFrameConstants::kFeedbackVectorFromFp);
163 }
164
Bind(Label * label)165 void BaselineAssembler::Bind(Label* label) { __ bind(label); }
BindWithoutJumpTarget(Label * label)166 void BaselineAssembler::BindWithoutJumpTarget(Label* label) { __ bind(label); }
167
JumpTarget()168 void BaselineAssembler::JumpTarget() {
169 // NOP on arm.
170 }
171
Jump(Label * target,Label::Distance distance)172 void BaselineAssembler::Jump(Label* target, Label::Distance distance) {
173 ASM_CODE_COMMENT(masm_);
174 __ b(target);
175 }
176
JumpIfRoot(Register value,RootIndex index,Label * target,Label::Distance)177 void BaselineAssembler::JumpIfRoot(Register value, RootIndex index,
178 Label* target, Label::Distance) {
179 ASM_CODE_COMMENT(masm_);
180 __ JumpIfRoot(value, index, target);
181 }
182
JumpIfNotRoot(Register value,RootIndex index,Label * target,Label::Distance)183 void BaselineAssembler::JumpIfNotRoot(Register value, RootIndex index,
184 Label* target, Label::Distance) {
185 ASM_CODE_COMMENT(masm_);
186 __ JumpIfNotRoot(value, index, target);
187 }
188
JumpIfSmi(Register value,Label * target,Label::Distance)189 void BaselineAssembler::JumpIfSmi(Register value, Label* target,
190 Label::Distance) {
191 ASM_CODE_COMMENT(masm_);
192 __ JumpIfSmi(value, target);
193 }
194
JumpIfImmediate(Condition cc,Register left,int right,Label * target,Label::Distance distance)195 void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
196 Label* target,
197 Label::Distance distance) {
198 ASM_CODE_COMMENT(masm_);
199 JumpIf(cc, left, Operand(right), target, distance);
200 }
201
JumpIfNotSmi(Register value,Label * target,Label::Distance)202 void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
203 Label::Distance) {
204 ASM_CODE_COMMENT(masm_);
205 __ JumpIfNotSmi(value, target);
206 }
207
CallBuiltin(Builtin builtin)208 void BaselineAssembler::CallBuiltin(Builtin builtin) {
209 ASM_CODE_COMMENT_STRING(masm_,
210 __ CommentForOffHeapTrampoline("call", builtin));
211 if (masm()->options().short_builtin_calls) {
212 // Generate pc-relative call.
213 __ CallBuiltin(builtin, al);
214 } else {
215 ScratchRegisterScope temps(this);
216 Register temp = temps.AcquireScratch();
217 __ LoadEntryFromBuiltin(builtin, temp);
218 __ Call(temp);
219 }
220 }
221
TailCallBuiltin(Builtin builtin)222 void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
223 ASM_CODE_COMMENT_STRING(masm_,
224 __ CommentForOffHeapTrampoline("tail call", builtin));
225 if (masm()->options().short_builtin_calls) {
226 // Generate pc-relative call.
227 __ TailCallBuiltin(builtin);
228 } else {
229 ScratchRegisterScope temps(this);
230 Register temp = temps.AcquireScratch();
231 __ LoadEntryFromBuiltin(builtin, temp);
232 __ Jump(temp);
233 }
234 }
235
TestAndBranch(Register value,int mask,Condition cc,Label * target,Label::Distance)236 void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
237 Label* target, Label::Distance) {
238 ASM_CODE_COMMENT(masm_);
239 __ AndU64(r0, value, Operand(mask), ip, SetRC);
240 __ b(AsMasmCondition(cc), target);
241 }
242
JumpIf(Condition cc,Register lhs,const Operand & rhs,Label * target,Label::Distance)243 void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
244 Label* target, Label::Distance) {
245 ASM_CODE_COMMENT(masm_);
246 if (IsSignedCondition(cc)) {
247 __ CmpS64(lhs, rhs, r0);
248 } else {
249 __ CmpU64(lhs, rhs, r0);
250 }
251 __ b(AsMasmCondition(cc), target);
252 }
253
JumpIfObjectType(Condition cc,Register object,InstanceType instance_type,Register map,Label * target,Label::Distance)254 void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
255 InstanceType instance_type,
256 Register map, Label* target,
257 Label::Distance) {
258 ASM_CODE_COMMENT(masm_);
259 ScratchRegisterScope temps(this);
260 Register type = temps.AcquireScratch();
261 __ LoadMap(map, object);
262 __ LoadU16(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
263 JumpIf(cc, type, Operand(instance_type), target);
264 }
265
JumpIfInstanceType(Condition cc,Register map,InstanceType instance_type,Label * target,Label::Distance)266 void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
267 InstanceType instance_type,
268 Label* target, Label::Distance) {
269 ASM_CODE_COMMENT(masm_);
270 ScratchRegisterScope temps(this);
271 Register type = temps.AcquireScratch();
272 if (FLAG_debug_code) {
273 __ AssertNotSmi(map);
274 __ CompareObjectType(map, type, type, MAP_TYPE);
275 __ Assert(eq, AbortReason::kUnexpectedValue);
276 }
277 __ LoadU16(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
278 JumpIf(cc, type, Operand(instance_type), target);
279 }
280
JumpIfPointer(Condition cc,Register value,MemOperand operand,Label * target,Label::Distance)281 void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
282 MemOperand operand, Label* target,
283 Label::Distance) {
284 ASM_CODE_COMMENT(masm_);
285 ScratchRegisterScope temps(this);
286 Register tmp = temps.AcquireScratch();
287 __ LoadU64(tmp, operand);
288 JumpIfHelper(masm_, cc, value, tmp, target);
289 }
290
JumpIfSmi(Condition cc,Register value,Smi smi,Label * target,Label::Distance)291 void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
292 Label* target, Label::Distance) {
293 ASM_CODE_COMMENT(masm_);
294 __ AssertSmi(value);
295 __ LoadSmiLiteral(r0, smi);
296 JumpIfHelper(masm_, cc, value, r0, target);
297 }
298
JumpIfSmi(Condition cc,Register lhs,Register rhs,Label * target,Label::Distance)299 void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
300 Label* target, Label::Distance) {
301 ASM_CODE_COMMENT(masm_);
302 __ AssertSmi(lhs);
303 __ AssertSmi(rhs);
304 JumpIfHelper(masm_, cc, lhs, rhs, target);
305 }
306
JumpIfTagged(Condition cc,Register value,MemOperand operand,Label * target,Label::Distance)307 void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
308 MemOperand operand, Label* target,
309 Label::Distance) {
310 ASM_CODE_COMMENT(masm_);
311 __ LoadU64(r0, operand);
312 JumpIfHelper(masm_, cc, value, r0, target);
313 }
314
JumpIfTagged(Condition cc,MemOperand operand,Register value,Label * target,Label::Distance)315 void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
316 Register value, Label* target,
317 Label::Distance) {
318 ASM_CODE_COMMENT(masm_);
319 __ LoadU64(r0, operand);
320 JumpIfHelper(masm_, cc, r0, value, target);
321 }
322
JumpIfByte(Condition cc,Register value,int32_t byte,Label * target,Label::Distance)323 void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
324 Label* target, Label::Distance) {
325 ASM_CODE_COMMENT(masm_);
326 JumpIf(cc, value, Operand(byte), target);
327 }
328
Move(interpreter::Register output,Register source)329 void BaselineAssembler::Move(interpreter::Register output, Register source) {
330 ASM_CODE_COMMENT(masm_);
331 Move(RegisterFrameOperand(output), source);
332 }
333
Move(Register output,TaggedIndex value)334 void BaselineAssembler::Move(Register output, TaggedIndex value) {
335 ASM_CODE_COMMENT(masm_);
336 __ mov(output, Operand(value.ptr()));
337 }
338
Move(MemOperand output,Register source)339 void BaselineAssembler::Move(MemOperand output, Register source) {
340 ASM_CODE_COMMENT(masm_);
341 __ StoreU64(source, output);
342 }
343
Move(Register output,ExternalReference reference)344 void BaselineAssembler::Move(Register output, ExternalReference reference) {
345 ASM_CODE_COMMENT(masm_);
346 __ Move(output, reference);
347 }
348
Move(Register output,Handle<HeapObject> value)349 void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
350 ASM_CODE_COMMENT(masm_);
351 __ Move(output, value);
352 }
353
Move(Register output,int32_t value)354 void BaselineAssembler::Move(Register output, int32_t value) {
355 ASM_CODE_COMMENT(masm_);
356 __ mov(output, Operand(value));
357 }
358
MoveMaybeSmi(Register output,Register source)359 void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
360 ASM_CODE_COMMENT(masm_);
361 __ mr(output, source);
362 }
363
MoveSmi(Register output,Register source)364 void BaselineAssembler::MoveSmi(Register output, Register source) {
365 ASM_CODE_COMMENT(masm_);
366 __ mr(output, source);
367 }
368
369 namespace detail {
370
371 template <typename Arg>
ToRegister(BaselineAssembler * basm,BaselineAssembler::ScratchRegisterScope * scope,Arg arg)372 inline Register ToRegister(BaselineAssembler* basm,
373 BaselineAssembler::ScratchRegisterScope* scope,
374 Arg arg) {
375 Register reg = scope->AcquireScratch();
376 basm->Move(reg, arg);
377 return reg;
378 }
ToRegister(BaselineAssembler * basm,BaselineAssembler::ScratchRegisterScope * scope,Register reg)379 inline Register ToRegister(BaselineAssembler* basm,
380 BaselineAssembler::ScratchRegisterScope* scope,
381 Register reg) {
382 return reg;
383 }
384
385 template <typename... Args>
386 struct PushAllHelper;
387 template <>
388 struct PushAllHelper<> {
389 static int Push(BaselineAssembler* basm) { return 0; }
390 static int PushReverse(BaselineAssembler* basm) { return 0; }
391 };
392 // TODO(ishell): try to pack sequence of pushes into one instruction by
393 // looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
394 // could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
395 template <typename Arg>
396 struct PushAllHelper<Arg> {
397 static int Push(BaselineAssembler* basm, Arg arg) {
398 BaselineAssembler::ScratchRegisterScope scope(basm);
399 basm->masm()->Push(ToRegister(basm, &scope, arg));
400 return 1;
401 }
402 static int PushReverse(BaselineAssembler* basm, Arg arg) {
403 return Push(basm, arg);
404 }
405 };
406 // TODO(ishell): try to pack sequence of pushes into one instruction by
407 // looking at regiser codes. For example, Push(r1, r2, r5, r0, r3, r4)
408 // could be generated as two pushes: Push(r1, r2, r5) and Push(r0, r3, r4).
409 template <typename Arg, typename... Args>
410 struct PushAllHelper<Arg, Args...> {
411 static int Push(BaselineAssembler* basm, Arg arg, Args... args) {
412 PushAllHelper<Arg>::Push(basm, arg);
413 return 1 + PushAllHelper<Args...>::Push(basm, args...);
414 }
415 static int PushReverse(BaselineAssembler* basm, Arg arg, Args... args) {
416 int nargs = PushAllHelper<Args...>::PushReverse(basm, args...);
417 PushAllHelper<Arg>::Push(basm, arg);
418 return nargs + 1;
419 }
420 };
421 template <>
422 struct PushAllHelper<interpreter::RegisterList> {
423 static int Push(BaselineAssembler* basm, interpreter::RegisterList list) {
424 for (int reg_index = 0; reg_index < list.register_count(); ++reg_index) {
425 PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
426 }
427 return list.register_count();
428 }
429 static int PushReverse(BaselineAssembler* basm,
430 interpreter::RegisterList list) {
431 for (int reg_index = list.register_count() - 1; reg_index >= 0;
432 --reg_index) {
433 PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
434 }
435 return list.register_count();
436 }
437 };
438
439 template <typename... T>
440 struct PopAllHelper;
441 template <>
442 struct PopAllHelper<> {
443 static void Pop(BaselineAssembler* basm) {}
444 };
445 // TODO(ishell): try to pack sequence of pops into one instruction by
446 // looking at regiser codes. For example, Pop(r1, r2, r5, r0, r3, r4)
447 // could be generated as two pops: Pop(r1, r2, r5) and Pop(r0, r3, r4).
448 template <>
449 struct PopAllHelper<Register> {
450 static void Pop(BaselineAssembler* basm, Register reg) {
451 basm->masm()->Pop(reg);
452 }
453 };
454 template <typename... T>
455 struct PopAllHelper<Register, T...> {
456 static void Pop(BaselineAssembler* basm, Register reg, T... tail) {
457 PopAllHelper<Register>::Pop(basm, reg);
458 PopAllHelper<T...>::Pop(basm, tail...);
459 }
460 };
461
462 } // namespace detail
463
464 template <typename... T>
465 int BaselineAssembler::Push(T... vals) {
466 return detail::PushAllHelper<T...>::Push(this, vals...);
467 }
468
469 template <typename... T>
470 void BaselineAssembler::PushReverse(T... vals) {
471 detail::PushAllHelper<T...>::PushReverse(this, vals...);
472 }
473
474 template <typename... T>
475 void BaselineAssembler::Pop(T... registers) {
476 detail::PopAllHelper<T...>::Pop(this, registers...);
477 }
478
479 void BaselineAssembler::LoadTaggedPointerField(Register output, Register source,
480 int offset) {
481 ASM_CODE_COMMENT(masm_);
482 __ LoadTaggedPointerField(output, FieldMemOperand(source, offset), r0);
483 }
484
485 void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
486 int offset) {
487 ASM_CODE_COMMENT(masm_);
488 __ LoadTaggedSignedField(output, FieldMemOperand(source, offset), r0);
489 }
490
491 void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
492 int offset) {
493 ASM_CODE_COMMENT(masm_);
494 __ LoadAnyTaggedField(output, FieldMemOperand(source, offset), r0);
495 }
496
497 void BaselineAssembler::LoadWord16FieldZeroExtend(Register output,
498 Register source, int offset) {
499 ASM_CODE_COMMENT(masm_);
500 __ LoadU16(output, FieldMemOperand(source, offset), r0);
501 }
502
503 void BaselineAssembler::LoadWord8Field(Register output, Register source,
504 int offset) {
505 ASM_CODE_COMMENT(masm_);
506 __ LoadU8(output, FieldMemOperand(source, offset), r0);
507 }
508
509 void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
510 Smi value) {
511 ASM_CODE_COMMENT(masm_);
512 ScratchRegisterScope temps(this);
513 Register tmp = temps.AcquireScratch();
514 __ LoadSmiLiteral(tmp, value);
515 __ StoreTaggedField(tmp, FieldMemOperand(target, offset), r0);
516 }
517
518 void BaselineAssembler::StoreTaggedFieldWithWriteBarrier(Register target,
519 int offset,
520 Register value) {
521 ASM_CODE_COMMENT(masm_);
522 Register scratch = WriteBarrierDescriptor::SlotAddressRegister();
523 DCHECK(!AreAliased(target, value, scratch));
524 __ StoreTaggedField(value, FieldMemOperand(target, offset), r0);
525 __ RecordWriteField(target, offset, value, scratch, kLRHasNotBeenSaved,
526 SaveFPRegsMode::kIgnore);
527 }
528 void BaselineAssembler::StoreTaggedFieldNoWriteBarrier(Register target,
529 int offset,
530 Register value) {
531 ASM_CODE_COMMENT(masm_);
532 __ StoreTaggedField(value, FieldMemOperand(target, offset), r0);
533 }
534
535 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
536 int32_t weight, Label* skip_interrupt_label) {
537 UNIMPLEMENTED();
538 }
539
540 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
541 Register weight, Label* skip_interrupt_label) {
542 UNIMPLEMENTED();
543 }
544
545 void BaselineAssembler::AddSmi(Register lhs, Smi rhs) { UNIMPLEMENTED(); }
546
547 void BaselineAssembler::Switch(Register reg, int case_value_base,
548 Label** labels, int num_labels) {
549 UNIMPLEMENTED();
550 }
551
552 void BaselineAssembler::Word32And(Register output, Register lhs, int rhs) {
553 __ AndU32(output, lhs, Operand(rhs));
554 }
555
556 #undef __
557
558 #define __ basm.
559
560 void BaselineAssembler::EmitReturn(MacroAssembler* masm) { UNIMPLEMENTED(); }
561
562 #undef __
563
564 inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
565 Register reg) {
566 UNIMPLEMENTED();
567 }
568
569 } // namespace baseline
570 } // namespace internal
571 } // namespace v8
572
573 #endif // V8_BASELINE_PPC_BASELINE_ASSEMBLER_PPC_INL_H_
574