• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2021 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_BASELINE_RISCV64_BASELINE_ASSEMBLER_RISCV64_INL_H_
6 #define V8_BASELINE_RISCV64_BASELINE_ASSEMBLER_RISCV64_INL_H_
7 
8 #include "src/baseline/baseline-assembler.h"
9 #include "src/codegen/assembler-inl.h"
10 #include "src/codegen/interface-descriptors.h"
11 namespace v8 {
12 namespace internal {
13 namespace baseline {
14 
15 class BaselineAssembler::ScratchRegisterScope {
16  public:
ScratchRegisterScope(BaselineAssembler * assembler)17   explicit ScratchRegisterScope(BaselineAssembler* assembler)
18       : assembler_(assembler),
19         prev_scope_(assembler->scratch_register_scope_),
20         wrapped_scope_(assembler->masm()) {
21     if (!assembler_->scratch_register_scope_) {
22       // If we haven't opened a scratch scope yet, for the first one add a
23       // couple of extra registers.
24       wrapped_scope_.Include(kScratchReg, kScratchReg2);
25     }
26     assembler_->scratch_register_scope_ = this;
27   }
~ScratchRegisterScope()28   ~ScratchRegisterScope() { assembler_->scratch_register_scope_ = prev_scope_; }
29 
AcquireScratch()30   Register AcquireScratch() { return wrapped_scope_.Acquire(); }
31 
32  private:
33   BaselineAssembler* assembler_;
34   ScratchRegisterScope* prev_scope_;
35   UseScratchRegisterScope wrapped_scope_;
36 };
37 
38 enum class Condition : uint32_t {
39   kEqual = eq,
40   kNotEqual = ne,
41 
42   kLessThan = lt,
43   kGreaterThan = gt,
44   kLessThanEqual = le,
45   kGreaterThanEqual = ge,
46 
47   kUnsignedLessThan = Uless,
48   kUnsignedGreaterThan = Ugreater,
49   kUnsignedLessThanEqual = Uless_equal,
50   kUnsignedGreaterThanEqual = Ugreater_equal,
51 
52   kOverflow = overflow,
53   kNoOverflow = no_overflow,
54 
55   kZero = eq,
56   kNotZero = ne,
57 };
58 
AsMasmCondition(Condition cond)59 inline internal::Condition AsMasmCondition(Condition cond) {
60   return static_cast<internal::Condition>(cond);
61 }
62 
63 namespace detail {
64 
65 #ifdef DEBUG
Clobbers(Register target,MemOperand op)66 inline bool Clobbers(Register target, MemOperand op) {
67   return op.is_reg() && op.rm() == target;
68 }
69 #endif
70 
71 }  // namespace detail
72 
73 #define __ masm_->
74 
RegisterFrameOperand(interpreter::Register interpreter_register)75 MemOperand BaselineAssembler::RegisterFrameOperand(
76     interpreter::Register interpreter_register) {
77   return MemOperand(fp, interpreter_register.ToOperand() * kSystemPointerSize);
78 }
RegisterFrameAddress(interpreter::Register interpreter_register,Register rscratch)79 void BaselineAssembler::RegisterFrameAddress(
80     interpreter::Register interpreter_register, Register rscratch) {
81   return __ Add64(rscratch, fp,
82                   interpreter_register.ToOperand() * kSystemPointerSize);
83 }
FeedbackVectorOperand()84 MemOperand BaselineAssembler::FeedbackVectorOperand() {
85   return MemOperand(fp, BaselineFrameConstants::kFeedbackVectorFromFp);
86 }
87 
Bind(Label * label)88 void BaselineAssembler::Bind(Label* label) { __ bind(label); }
89 
BindWithoutJumpTarget(Label * label)90 void BaselineAssembler::BindWithoutJumpTarget(Label* label) { __ bind(label); }
91 
JumpTarget()92 void BaselineAssembler::JumpTarget() {
93   // Nop
94 }
95 
Jump(Label * target,Label::Distance distance)96 void BaselineAssembler::Jump(Label* target, Label::Distance distance) {
97   __ jmp(target);
98 }
JumpIfRoot(Register value,RootIndex index,Label * target,Label::Distance)99 void BaselineAssembler::JumpIfRoot(Register value, RootIndex index,
100                                    Label* target, Label::Distance) {
101   __ JumpIfRoot(value, index, target);
102 }
JumpIfNotRoot(Register value,RootIndex index,Label * target,Label::Distance)103 void BaselineAssembler::JumpIfNotRoot(Register value, RootIndex index,
104                                       Label* target, Label::Distance) {
105   __ JumpIfNotRoot(value, index, target);
106 }
JumpIfSmi(Register value,Label * target,Label::Distance)107 void BaselineAssembler::JumpIfSmi(Register value, Label* target,
108                                   Label::Distance) {
109   __ JumpIfSmi(value, target);
110 }
JumpIfNotSmi(Register value,Label * target,Label::Distance)111 void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
112                                      Label::Distance) {
113   __ JumpIfSmi(value, target);
114 }
JumpIfImmediate(Condition cc,Register left,int right,Label * target,Label::Distance distance)115 void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
116                                         Label* target,
117                                         Label::Distance distance) {
118   JumpIf(cc, left, Operand(right), target, distance);
119 }
CallBuiltin(Builtin builtin)120 void BaselineAssembler::CallBuiltin(Builtin builtin) {
121   ASM_CODE_COMMENT_STRING(masm_,
122                           __ CommentForOffHeapTrampoline("call", builtin));
123   Register temp = t6;
124   __ LoadEntryFromBuiltin(builtin, temp);
125   __ Call(temp);
126 }
127 
TailCallBuiltin(Builtin builtin)128 void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
129   ASM_CODE_COMMENT_STRING(masm_,
130                           __ CommentForOffHeapTrampoline("tail call", builtin));
131   Register temp = t6;
132   __ LoadEntryFromBuiltin(builtin, temp);
133   __ Jump(temp);
134 }
135 
TestAndBranch(Register value,int mask,Condition cc,Label * target,Label::Distance)136 void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
137                                       Label* target, Label::Distance) {
138   ScratchRegisterScope temps(this);
139   Register tmp = temps.AcquireScratch();
140   __ And(tmp, value, Operand(mask));
141   __ Branch(target, AsMasmCondition(cc), tmp, Operand(zero_reg));
142 }
143 
JumpIf(Condition cc,Register lhs,const Operand & rhs,Label * target,Label::Distance)144 void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
145                                Label* target, Label::Distance) {
146   __ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
147 }
JumpIfObjectType(Condition cc,Register object,InstanceType instance_type,Register map,Label * target,Label::Distance)148 void BaselineAssembler::JumpIfObjectType(Condition cc, Register object,
149                                          InstanceType instance_type,
150                                          Register map, Label* target,
151                                          Label::Distance) {
152   ScratchRegisterScope temps(this);
153   Register type = temps.AcquireScratch();
154   __ GetObjectType(object, map, type);
155   __ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
156 }
JumpIfInstanceType(Condition cc,Register map,InstanceType instance_type,Label * target,Label::Distance)157 void BaselineAssembler::JumpIfInstanceType(Condition cc, Register map,
158                                            InstanceType instance_type,
159                                            Label* target, Label::Distance) {
160   ScratchRegisterScope temps(this);
161   Register type = temps.AcquireScratch();
162   if (FLAG_debug_code) {
163     __ AssertNotSmi(map);
164     __ GetObjectType(map, type, type);
165     __ Assert(eq, AbortReason::kUnexpectedValue, type, Operand(MAP_TYPE));
166   }
167   __ Ld(type, FieldMemOperand(map, Map::kInstanceTypeOffset));
168   __ Branch(target, AsMasmCondition(cc), type, Operand(instance_type));
169 }
JumpIfPointer(Condition cc,Register value,MemOperand operand,Label * target,Label::Distance)170 void BaselineAssembler::JumpIfPointer(Condition cc, Register value,
171                                       MemOperand operand, Label* target,
172                                       Label::Distance) {
173   ScratchRegisterScope temps(this);
174   Register temp = temps.AcquireScratch();
175   __ Ld(temp, operand);
176   __ Branch(target, AsMasmCondition(cc), value, Operand(temp));
177 }
JumpIfSmi(Condition cc,Register value,Smi smi,Label * target,Label::Distance)178 void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Smi smi,
179                                   Label* target, Label::Distance) {
180   ScratchRegisterScope temps(this);
181   Register temp = temps.AcquireScratch();
182   __ li(temp, Operand(smi));
183   __ SmiUntag(temp);
184   __ Branch(target, AsMasmCondition(cc), value, Operand(temp));
185 }
JumpIfSmi(Condition cc,Register lhs,Register rhs,Label * target,Label::Distance)186 void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
187                                   Label* target, Label::Distance) {
188   // todo: compress pointer
189   __ AssertSmi(lhs);
190   __ AssertSmi(rhs);
191   __ Branch(target, AsMasmCondition(cc), lhs, Operand(rhs));
192 }
JumpIfTagged(Condition cc,Register value,MemOperand operand,Label * target,Label::Distance)193 void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
194                                      MemOperand operand, Label* target,
195                                      Label::Distance) {
196   // todo: compress pointer
197   ScratchRegisterScope temps(this);
198   Register scratch = temps.AcquireScratch();
199   __ Ld(scratch, operand);
200   __ Branch(target, AsMasmCondition(cc), value, Operand(scratch));
201 }
JumpIfTagged(Condition cc,MemOperand operand,Register value,Label * target,Label::Distance)202 void BaselineAssembler::JumpIfTagged(Condition cc, MemOperand operand,
203                                      Register value, Label* target,
204                                      Label::Distance) {
205   // todo: compress pointer
206   ScratchRegisterScope temps(this);
207   Register scratch = temps.AcquireScratch();
208   __ Ld(scratch, operand);
209   __ Branch(target, AsMasmCondition(cc), scratch, Operand(value));
210 }
JumpIfByte(Condition cc,Register value,int32_t byte,Label * target,Label::Distance)211 void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
212                                    Label* target, Label::Distance) {
213   __ Branch(target, AsMasmCondition(cc), value, Operand(byte));
214 }
215 
Move(interpreter::Register output,Register source)216 void BaselineAssembler::Move(interpreter::Register output, Register source) {
217   Move(RegisterFrameOperand(output), source);
218 }
Move(Register output,TaggedIndex value)219 void BaselineAssembler::Move(Register output, TaggedIndex value) {
220   __ li(output, Operand(value.ptr()));
221 }
Move(MemOperand output,Register source)222 void BaselineAssembler::Move(MemOperand output, Register source) {
223   __ Sd(source, output);
224 }
Move(Register output,ExternalReference reference)225 void BaselineAssembler::Move(Register output, ExternalReference reference) {
226   __ li(output, Operand(reference));
227 }
Move(Register output,Handle<HeapObject> value)228 void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
229   __ li(output, Operand(value));
230 }
Move(Register output,int32_t value)231 void BaselineAssembler::Move(Register output, int32_t value) {
232   __ li(output, Operand(value));
233 }
MoveMaybeSmi(Register output,Register source)234 void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
235   __ Move(output, source);
236 }
MoveSmi(Register output,Register source)237 void BaselineAssembler::MoveSmi(Register output, Register source) {
238   __ Move(output, source);
239 }
240 
241 namespace detail {
242 
243 template <typename Arg>
ToRegister(BaselineAssembler * basm,BaselineAssembler::ScratchRegisterScope * scope,Arg arg)244 inline Register ToRegister(BaselineAssembler* basm,
245                            BaselineAssembler::ScratchRegisterScope* scope,
246                            Arg arg) {
247   Register reg = scope->AcquireScratch();
248   basm->Move(reg, arg);
249   return reg;
250 }
ToRegister(BaselineAssembler * basm,BaselineAssembler::ScratchRegisterScope * scope,Register reg)251 inline Register ToRegister(BaselineAssembler* basm,
252                            BaselineAssembler::ScratchRegisterScope* scope,
253                            Register reg) {
254   return reg;
255 }
256 
257 template <typename... Args>
258 struct PushAllHelper;
259 template <>
260 struct PushAllHelper<> {
261   static int Push(BaselineAssembler* basm) { return 0; }
262   static int PushReverse(BaselineAssembler* basm) { return 0; }
263 };
264 template <typename Arg>
265 struct PushAllHelper<Arg> {
266   static int Push(BaselineAssembler* basm, Arg arg) {
267     BaselineAssembler::ScratchRegisterScope scope(basm);
268     basm->masm()->Push(ToRegister(basm, &scope, arg));
269     return 1;
270   }
271   static int PushReverse(BaselineAssembler* basm, Arg arg) {
272     return Push(basm, arg);
273   }
274 };
275 template <typename Arg, typename... Args>
276 struct PushAllHelper<Arg, Args...> {
277   static int Push(BaselineAssembler* basm, Arg arg, Args... args) {
278     PushAllHelper<Arg>::Push(basm, arg);
279     return 1 + PushAllHelper<Args...>::Push(basm, args...);
280   }
281   static int PushReverse(BaselineAssembler* basm, Arg arg, Args... args) {
282     int nargs = PushAllHelper<Args...>::PushReverse(basm, args...);
283     PushAllHelper<Arg>::Push(basm, arg);
284     return nargs + 1;
285   }
286 };
287 template <>
288 struct PushAllHelper<interpreter::RegisterList> {
289   static int Push(BaselineAssembler* basm, interpreter::RegisterList list) {
290     for (int reg_index = 0; reg_index < list.register_count(); ++reg_index) {
291       PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
292     }
293     return list.register_count();
294   }
295   static int PushReverse(BaselineAssembler* basm,
296                          interpreter::RegisterList list) {
297     for (int reg_index = list.register_count() - 1; reg_index >= 0;
298          --reg_index) {
299       PushAllHelper<interpreter::Register>::Push(basm, list[reg_index]);
300     }
301     return list.register_count();
302   }
303 };
304 
305 template <typename... T>
306 struct PopAllHelper;
307 template <>
308 struct PopAllHelper<> {
309   static void Pop(BaselineAssembler* basm) {}
310 };
311 template <>
312 struct PopAllHelper<Register> {
313   static void Pop(BaselineAssembler* basm, Register reg) {
314     basm->masm()->Pop(reg);
315   }
316 };
317 template <typename... T>
318 struct PopAllHelper<Register, T...> {
319   static void Pop(BaselineAssembler* basm, Register reg, T... tail) {
320     PopAllHelper<Register>::Pop(basm, reg);
321     PopAllHelper<T...>::Pop(basm, tail...);
322   }
323 };
324 
325 }  // namespace detail
326 
327 template <typename... T>
328 int BaselineAssembler::Push(T... vals) {
329   return detail::PushAllHelper<T...>::Push(this, vals...);
330 }
331 
332 template <typename... T>
333 void BaselineAssembler::PushReverse(T... vals) {
334   detail::PushAllHelper<T...>::PushReverse(this, vals...);
335 }
336 
337 template <typename... T>
338 void BaselineAssembler::Pop(T... registers) {
339   detail::PopAllHelper<T...>::Pop(this, registers...);
340 }
341 
342 void BaselineAssembler::LoadTaggedPointerField(Register output, Register source,
343                                                int offset) {
344   __ LoadTaggedPointerField(output, FieldMemOperand(source, offset));
345 }
346 void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
347                                               int offset) {
348   __ LoadTaggedSignedField(output, FieldMemOperand(source, offset));
349 }
350 void BaselineAssembler::LoadTaggedAnyField(Register output, Register source,
351                                            int offset) {
352   __ LoadAnyTaggedField(output, FieldMemOperand(source, offset));
353 }
354 void BaselineAssembler::LoadWord16FieldZeroExtend(Register output,
355                                                   Register source, int offset) {
356   __ Lhu(output, FieldMemOperand(source, offset));
357 }
358 void BaselineAssembler::LoadWord8Field(Register output, Register source,
359                                        int offset) {
360   __ Lb(output, FieldMemOperand(source, offset));
361 }
362 void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
363                                                Smi value) {
364   ASM_CODE_COMMENT(masm_);
365   ScratchRegisterScope temps(this);
366   Register tmp = temps.AcquireScratch();
367   __ li(tmp, Operand(value));
368   __ StoreTaggedField(tmp, FieldMemOperand(target, offset));
369 }
370 void BaselineAssembler::StoreTaggedFieldWithWriteBarrier(Register target,
371                                                          int offset,
372                                                          Register value) {
373   ASM_CODE_COMMENT(masm_);
374   __ StoreTaggedField(value, FieldMemOperand(target, offset));
375   __ RecordWriteField(target, offset, value, kRAHasNotBeenSaved,
376                       SaveFPRegsMode::kIgnore);
377 }
378 void BaselineAssembler::StoreTaggedFieldNoWriteBarrier(Register target,
379                                                        int offset,
380                                                        Register value) {
381   __ StoreTaggedField(value, FieldMemOperand(target, offset));
382 }
383 
384 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
385     int32_t weight, Label* skip_interrupt_label) {
386   ASM_CODE_COMMENT(masm_);
387   ScratchRegisterScope scratch_scope(this);
388   Register feedback_cell = scratch_scope.AcquireScratch();
389   LoadFunction(feedback_cell);
390   LoadTaggedPointerField(feedback_cell, feedback_cell,
391                          JSFunction::kFeedbackCellOffset);
392 
393   Register interrupt_budget = scratch_scope.AcquireScratch();
394   __ Lw(interrupt_budget,
395         FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
396   // Remember to set flags as part of the add!
397   __ Add32(interrupt_budget, interrupt_budget, weight);
398   __ Sw(interrupt_budget,
399         FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
400   if (skip_interrupt_label) {
401     DCHECK_LT(weight, 0);
402     __ Branch(skip_interrupt_label, ge, interrupt_budget, Operand(weight));
403   }
404 }
405 
406 void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
407     Register weight, Label* skip_interrupt_label) {
408   ASM_CODE_COMMENT(masm_);
409   ScratchRegisterScope scratch_scope(this);
410   Register feedback_cell = scratch_scope.AcquireScratch();
411   LoadFunction(feedback_cell);
412   LoadTaggedPointerField(feedback_cell, feedback_cell,
413                          JSFunction::kFeedbackCellOffset);
414 
415   Register interrupt_budget = scratch_scope.AcquireScratch();
416   __ Lw(interrupt_budget,
417         FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
418   // Remember to set flags as part of the add!
419   __ Add32(interrupt_budget, interrupt_budget, weight);
420   __ Sw(interrupt_budget,
421         FieldMemOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset));
422   if (skip_interrupt_label)
423     __ Branch(skip_interrupt_label, ge, interrupt_budget, Operand(weight));
424 }
425 
426 void BaselineAssembler::AddSmi(Register lhs, Smi rhs) {
427   ASM_CODE_COMMENT(masm_);
428   if (SmiValuesAre31Bits()) {
429     __ Add32(lhs, lhs, Operand(rhs));
430   } else {
431     __ Add64(lhs, lhs, Operand(rhs));
432   }
433 }
434 
435 void BaselineAssembler::Word32And(Register output, Register lhs, int rhs) {
436   __ And(output, lhs, Operand(rhs));
437 }
438 
439 void BaselineAssembler::Switch(Register reg, int case_value_base,
440                                Label** labels, int num_labels) {
441   ASM_CODE_COMMENT(masm_);
442   Label fallthrough;
443   if (case_value_base != 0) {
444     __ Sub64(reg, reg, Operand(case_value_base));
445   }
446 
447   // Mostly copied from code-generator-riscv64.cc
448   ScratchRegisterScope scope(this);
449   Label table;
450   __ Branch(&fallthrough, AsMasmCondition(Condition::kUnsignedGreaterThanEqual),
451             reg, Operand(int64_t(num_labels)));
452   int64_t imm64;
453   imm64 = __ branch_long_offset(&table);
454   CHECK(is_int32(imm64 + 0x800));
455   int32_t Hi20 = (((int32_t)imm64 + 0x800) >> 12);
456   int32_t Lo12 = (int32_t)imm64 << 20 >> 20;
457   __ BlockTrampolinePoolFor(2);
458   __ auipc(t6, Hi20);  // Read PC + Hi20 into t6
459   __ addi(t6, t6, Lo12);  // jump PC + Hi20 + Lo12
460 
461   int entry_size_log2 = 3;
462   __ CalcScaledAddress(t6, t6, reg, entry_size_log2);
463   __ Jump(t6);
464   {
465     TurboAssembler::BlockTrampolinePoolScope(masm());
466     __ BlockTrampolinePoolFor(num_labels * kInstrSize * 2);
467     __ bind(&table);
468     for (int i = 0; i < num_labels; ++i) {
469       __ BranchLong(labels[i]);
470     }
471     DCHECK_EQ(num_labels * 2, __ InstructionsGeneratedSince(&table));
472     __ bind(&fallthrough);
473   }
474 }
475 
476 #undef __
477 
478 #define __ basm.
479 
480 void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
481   ASM_CODE_COMMENT(masm);
482   BaselineAssembler basm(masm);
483 
484   Register weight = BaselineLeaveFrameDescriptor::WeightRegister();
485   Register params_size = BaselineLeaveFrameDescriptor::ParamsSizeRegister();
486 
487   {
488     ASM_CODE_COMMENT_STRING(masm, "Update Interrupt Budget");
489 
490     Label skip_interrupt_label;
491     __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, &skip_interrupt_label);
492     __ masm()->SmiTag(params_size);
493     __ masm()->Push(params_size, kInterpreterAccumulatorRegister);
494 
495     __ LoadContext(kContextRegister);
496     __ LoadFunction(kJSFunctionRegister);
497     __ masm()->Push(kJSFunctionRegister);
498     __ CallRuntime(Runtime::kBytecodeBudgetInterrupt, 1);
499 
500     __ masm()->Pop(params_size, kInterpreterAccumulatorRegister);
501     __ masm()->SmiUntag(params_size);
502 
503   __ Bind(&skip_interrupt_label);
504   }
505 
506   BaselineAssembler::ScratchRegisterScope temps(&basm);
507   Register actual_params_size = temps.AcquireScratch();
508   // Compute the size of the actual parameters + receiver (in bytes).
509   __ Move(actual_params_size,
510           MemOperand(fp, StandardFrameConstants::kArgCOffset));
511 
512   // If actual is bigger than formal, then we should use it to free up the stack
513   // arguments.
514   Label corrected_args_count;
515   __ masm()->Branch(&corrected_args_count, ge, params_size,
516                     Operand(actual_params_size), Label::Distance::kNear);
517   __ masm()->Move(params_size, actual_params_size);
518   __ Bind(&corrected_args_count);
519 
520   // Leave the frame (also dropping the register file).
521   __ masm()->LeaveFrame(StackFrame::BASELINE);
522 
523   // Drop receiver + arguments.
524   __ masm()->DropArguments(params_size, MacroAssembler::kCountIsInteger,
525                            MacroAssembler::kCountIncludesReceiver);
526   __ masm()->Ret();
527 }
528 
529 #undef __
530 
531 inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
532     Register reg) {
533   assembler_->masm()->Assert(eq, AbortReason::kUnexpectedValue, reg,
534                              Operand(kInterpreterAccumulatorRegister));
535 }
536 }  // namespace baseline
537 }  // namespace internal
538 }  // namespace v8
539 
540 #endif  // V8_BASELINE_RISCV64_BASELINE_ASSEMBLER_RISCV64_INL_H_
541