• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_WASM_BASELINE_MIPS_LIFTOFF_ASSEMBLER_MIPS_H_
6 #define V8_WASM_BASELINE_MIPS_LIFTOFF_ASSEMBLER_MIPS_H_
7 
8 #include "src/base/platform/wrappers.h"
9 #include "src/heap/memory-chunk.h"
10 #include "src/wasm/baseline/liftoff-assembler.h"
11 #include "src/wasm/wasm-objects.h"
12 
13 namespace v8 {
14 namespace internal {
15 namespace wasm {
16 
17 namespace liftoff {
18 
ToCondition(LiftoffCondition liftoff_cond)19 inline constexpr Condition ToCondition(LiftoffCondition liftoff_cond) {
20   switch (liftoff_cond) {
21     case kEqual:
22       return eq;
23     case kUnequal:
24       return ne;
25     case kSignedLessThan:
26       return lt;
27     case kSignedLessEqual:
28       return le;
29     case kSignedGreaterThan:
30       return gt;
31     case kSignedGreaterEqual:
32       return ge;
33     case kUnsignedLessThan:
34       return ult;
35     case kUnsignedLessEqual:
36       return ule;
37     case kUnsignedGreaterThan:
38       return ugt;
39     case kUnsignedGreaterEqual:
40       return uge;
41   }
42 }
43 
44 //  half
45 //  slot        Frame
46 //  -----+--------------------+---------------------------
47 //  n+3  |   parameter n      |
48 //  ...  |       ...          |
49 //   4   |   parameter 1      | or parameter 2
50 //   3   |   parameter 0      | or parameter 1
51 //   2   |  (result address)  | or parameter 0
52 //  -----+--------------------+---------------------------
53 //   1   | return addr (ra)   |
54 //   0   | previous frame (fp)|
55 //  -----+--------------------+  <-- frame ptr (fp)
56 //  -1   | StackFrame::WASM   |
57 //  -2   |    instance        |
58 //  -3   |    feedback vector |
59 //  -4   |    tiering budget  |
60 //  -----+--------------------+---------------------------
61 //  -5   |    slot 0 (high)   |   ^
62 //  -6   |    slot 0 (low)    |   |
63 //  -7   |    slot 1 (high)   | Frame slots
64 //  -8   |    slot 1 (low)    |   |
65 //       |                    |   v
66 //  -----+--------------------+  <-- stack ptr (sp)
67 //
68 #if defined(V8_TARGET_BIG_ENDIAN)
69 constexpr int32_t kLowWordOffset = 4;
70 constexpr int32_t kHighWordOffset = 0;
71 #else
72 constexpr int32_t kLowWordOffset = 0;
73 constexpr int32_t kHighWordOffset = 4;
74 #endif
75 
76 constexpr int kInstanceOffset = 2 * kSystemPointerSize;
77 constexpr int kFeedbackVectorOffset = 3 * kSystemPointerSize;
78 constexpr int kTierupBudgetOffset = 4 * kSystemPointerSize;
79 
GetStackSlot(int offset)80 inline MemOperand GetStackSlot(int offset) { return MemOperand(fp, -offset); }
81 
GetHalfStackSlot(int offset,RegPairHalf half)82 inline MemOperand GetHalfStackSlot(int offset, RegPairHalf half) {
83   int32_t half_offset =
84       half == kLowWord ? 0 : LiftoffAssembler::kStackSlotSize / 2;
85   return MemOperand(offset > 0 ? fp : sp, -offset + half_offset);
86 }
87 
GetInstanceOperand()88 inline MemOperand GetInstanceOperand() { return GetStackSlot(kInstanceOffset); }
89 
Load(LiftoffAssembler * assm,LiftoffRegister dst,Register base,int32_t offset,ValueKind kind)90 inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Register base,
91                  int32_t offset, ValueKind kind) {
92   MemOperand src(base, offset);
93   switch (kind) {
94     case kI32:
95     case kRef:
96     case kOptRef:
97     case kRtt:
98       assm->lw(dst.gp(), src);
99       break;
100     case kI64:
101       assm->lw(dst.low_gp(),
102                MemOperand(base, offset + liftoff::kLowWordOffset));
103       assm->lw(dst.high_gp(),
104                MemOperand(base, offset + liftoff::kHighWordOffset));
105       break;
106     case kF32:
107       assm->lwc1(dst.fp(), src);
108       break;
109     case kF64:
110       assm->Ldc1(dst.fp(), src);
111       break;
112     default:
113       UNREACHABLE();
114   }
115 }
116 
Store(LiftoffAssembler * assm,Register base,int32_t offset,LiftoffRegister src,ValueKind kind)117 inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
118                   LiftoffRegister src, ValueKind kind) {
119   MemOperand dst(base, offset);
120   switch (kind) {
121     case kI32:
122     case kOptRef:
123     case kRef:
124     case kRtt:
125       assm->Usw(src.gp(), dst);
126       break;
127     case kI64:
128       assm->Usw(src.low_gp(),
129                 MemOperand(base, offset + liftoff::kLowWordOffset));
130       assm->Usw(src.high_gp(),
131                 MemOperand(base, offset + liftoff::kHighWordOffset));
132       break;
133     case kF32:
134       assm->Uswc1(src.fp(), dst, t8);
135       break;
136     case kF64:
137       assm->Usdc1(src.fp(), dst, t8);
138       break;
139     default:
140       UNREACHABLE();
141   }
142 }
143 
push(LiftoffAssembler * assm,LiftoffRegister reg,ValueKind kind)144 inline void push(LiftoffAssembler* assm, LiftoffRegister reg, ValueKind kind) {
145   switch (kind) {
146     case kI32:
147     case kOptRef:
148     case kRef:
149     case kRtt:
150       assm->push(reg.gp());
151       break;
152     case kI64:
153       assm->Push(reg.high_gp(), reg.low_gp());
154       break;
155     case kF32:
156       assm->addiu(sp, sp, -sizeof(float));
157       assm->swc1(reg.fp(), MemOperand(sp, 0));
158       break;
159     case kF64:
160       assm->addiu(sp, sp, -sizeof(double));
161       assm->Sdc1(reg.fp(), MemOperand(sp, 0));
162       break;
163     default:
164       UNREACHABLE();
165   }
166 }
167 
EnsureNoAlias(Assembler * assm,Register reg,LiftoffRegister must_not_alias,UseScratchRegisterScope * temps)168 inline Register EnsureNoAlias(Assembler* assm, Register reg,
169                               LiftoffRegister must_not_alias,
170                               UseScratchRegisterScope* temps) {
171   if (reg != must_not_alias.low_gp() && reg != must_not_alias.high_gp())
172     return reg;
173   Register tmp = temps->Acquire();
174   DCHECK_NE(must_not_alias.low_gp(), tmp);
175   DCHECK_NE(must_not_alias.high_gp(), tmp);
176   assm->movz(tmp, reg, zero_reg);
177   return tmp;
178 }
179 
180 #if defined(V8_TARGET_BIG_ENDIAN)
ChangeEndiannessLoad(LiftoffAssembler * assm,LiftoffRegister dst,LoadType type,LiftoffRegList pinned)181 inline void ChangeEndiannessLoad(LiftoffAssembler* assm, LiftoffRegister dst,
182                                  LoadType type, LiftoffRegList pinned) {
183   bool is_float = false;
184   LiftoffRegister tmp = dst;
185   switch (type.value()) {
186     case LoadType::kI64Load8U:
187     case LoadType::kI64Load8S:
188     case LoadType::kI32Load8U:
189     case LoadType::kI32Load8S:
190       // No need to change endianness for byte size.
191       return;
192     case LoadType::kF32Load:
193       is_float = true;
194       tmp = assm->GetUnusedRegister(kGpReg, pinned);
195       assm->emit_type_conversion(kExprI32ReinterpretF32, tmp, dst);
196       V8_FALLTHROUGH;
197     case LoadType::kI32Load:
198       assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 4);
199       break;
200     case LoadType::kI32Load16S:
201       assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 2);
202       break;
203     case LoadType::kI32Load16U:
204       assm->TurboAssembler::ByteSwapUnsigned(tmp.gp(), tmp.gp(), 2);
205       break;
206     case LoadType::kF64Load:
207       is_float = true;
208       tmp = assm->GetUnusedRegister(kGpRegPair, pinned);
209       assm->emit_type_conversion(kExprI64ReinterpretF64, tmp, dst);
210       V8_FALLTHROUGH;
211     case LoadType::kI64Load:
212       assm->TurboAssembler::Move(kScratchReg, tmp.low_gp());
213       assm->TurboAssembler::ByteSwapSigned(tmp.low_gp(), tmp.high_gp(), 4);
214       assm->TurboAssembler::ByteSwapSigned(tmp.high_gp(), kScratchReg, 4);
215       break;
216     case LoadType::kI64Load16U:
217       assm->TurboAssembler::ByteSwapUnsigned(tmp.low_gp(), tmp.low_gp(), 2);
218       assm->TurboAssembler::Move(tmp.high_gp(), zero_reg);
219       break;
220     case LoadType::kI64Load16S:
221       assm->TurboAssembler::ByteSwapSigned(tmp.low_gp(), tmp.low_gp(), 2);
222       assm->sra(tmp.high_gp(), tmp.low_gp(), 31);
223       break;
224     case LoadType::kI64Load32U:
225       assm->TurboAssembler::ByteSwapSigned(tmp.low_gp(), tmp.low_gp(), 4);
226       assm->TurboAssembler::Move(tmp.high_gp(), zero_reg);
227       break;
228     case LoadType::kI64Load32S:
229       assm->TurboAssembler::ByteSwapSigned(tmp.low_gp(), tmp.low_gp(), 4);
230       assm->sra(tmp.high_gp(), tmp.low_gp(), 31);
231       break;
232     default:
233       UNREACHABLE();
234   }
235 
236   if (is_float) {
237     switch (type.value()) {
238       case LoadType::kF32Load:
239         assm->emit_type_conversion(kExprF32ReinterpretI32, dst, tmp);
240         break;
241       case LoadType::kF64Load:
242         assm->emit_type_conversion(kExprF64ReinterpretI64, dst, tmp);
243         break;
244       default:
245         UNREACHABLE();
246     }
247   }
248 }
249 
ChangeEndiannessStore(LiftoffAssembler * assm,LiftoffRegister src,StoreType type,LiftoffRegList pinned)250 inline void ChangeEndiannessStore(LiftoffAssembler* assm, LiftoffRegister src,
251                                   StoreType type, LiftoffRegList pinned) {
252   bool is_float = false;
253   LiftoffRegister tmp = src;
254   switch (type.value()) {
255     case StoreType::kI64Store8:
256     case StoreType::kI32Store8:
257       // No need to change endianness for byte size.
258       return;
259     case StoreType::kF32Store:
260       is_float = true;
261       tmp = assm->GetUnusedRegister(kGpReg, pinned);
262       assm->emit_type_conversion(kExprI32ReinterpretF32, tmp, src);
263       V8_FALLTHROUGH;
264     case StoreType::kI32Store:
265       assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 4);
266       break;
267     case StoreType::kI32Store16:
268       assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 2);
269       break;
270     case StoreType::kF64Store:
271       is_float = true;
272       tmp = assm->GetUnusedRegister(kGpRegPair, pinned);
273       assm->emit_type_conversion(kExprI64ReinterpretF64, tmp, src);
274       V8_FALLTHROUGH;
275     case StoreType::kI64Store:
276       assm->TurboAssembler::Move(kScratchReg, tmp.low_gp());
277       assm->TurboAssembler::ByteSwapSigned(tmp.low_gp(), tmp.high_gp(), 4);
278       assm->TurboAssembler::ByteSwapSigned(tmp.high_gp(), kScratchReg, 4);
279       break;
280     case StoreType::kI64Store32:
281       assm->TurboAssembler::ByteSwapSigned(tmp.low_gp(), tmp.low_gp(), 4);
282       break;
283     case StoreType::kI64Store16:
284       assm->TurboAssembler::ByteSwapSigned(tmp.low_gp(), tmp.low_gp(), 2);
285       break;
286     default:
287       UNREACHABLE();
288   }
289 
290   if (is_float) {
291     switch (type.value()) {
292       case StoreType::kF32Store:
293         assm->emit_type_conversion(kExprF32ReinterpretI32, src, tmp);
294         break;
295       case StoreType::kF64Store:
296         assm->emit_type_conversion(kExprF64ReinterpretI64, src, tmp);
297         break;
298       default:
299         UNREACHABLE();
300     }
301   }
302 }
303 #endif  // V8_TARGET_BIG_ENDIAN
304 
305 }  // namespace liftoff
306 
PrepareStackFrame()307 int LiftoffAssembler::PrepareStackFrame() {
308   int offset = pc_offset();
309   // When the frame size is bigger than 4KB, we need seven instructions for
310   // stack checking, so we reserve space for this case.
311   addiu(sp, sp, 0);
312   nop();
313   nop();
314   nop();
315   nop();
316   nop();
317   nop();
318   return offset;
319 }
320 
PrepareTailCall(int num_callee_stack_params,int stack_param_delta)321 void LiftoffAssembler::PrepareTailCall(int num_callee_stack_params,
322                                        int stack_param_delta) {
323   UseScratchRegisterScope temps(this);
324   Register scratch = temps.Acquire();
325 
326   // Push the return address and frame pointer to complete the stack frame.
327   Lw(scratch, MemOperand(fp, 4));
328   Push(scratch);
329   Lw(scratch, MemOperand(fp, 0));
330   Push(scratch);
331 
332   // Shift the whole frame upwards.
333   int slot_count = num_callee_stack_params + 2;
334   for (int i = slot_count - 1; i >= 0; --i) {
335     Lw(scratch, MemOperand(sp, i * 4));
336     Sw(scratch, MemOperand(fp, (i - stack_param_delta) * 4));
337   }
338 
339   // Set the new stack and frame pointer.
340   addiu(sp, fp, -stack_param_delta * 4);
341   Pop(ra, fp);
342 }
343 
AlignFrameSize()344 void LiftoffAssembler::AlignFrameSize() {}
345 
PatchPrepareStackFrame(int offset,SafepointTableBuilder * safepoint_table_builder)346 void LiftoffAssembler::PatchPrepareStackFrame(
347     int offset, SafepointTableBuilder* safepoint_table_builder) {
348   // The frame_size includes the frame marker and the instance slot. Both are
349   // pushed as part of frame construction, so we don't need to allocate memory
350   // for them anymore.
351   int frame_size = GetTotalFrameSize() - 2 * kSystemPointerSize;
352 
353   // We can't run out of space, just pass anything big enough to not cause the
354   // assembler to try to grow the buffer.
355   constexpr int kAvailableSpace = 256;
356   TurboAssembler patching_assembler(
357       nullptr, AssemblerOptions{}, CodeObjectRequired::kNo,
358       ExternalAssemblerBuffer(buffer_start_ + offset, kAvailableSpace));
359 
360   if (V8_LIKELY(frame_size < 4 * KB)) {
361     // This is the standard case for small frames: just subtract from SP and be
362     // done with it.
363     patching_assembler.Addu(sp, sp, Operand(-frame_size));
364     return;
365   }
366 
367   // The frame size is bigger than 4KB, so we might overflow the available stack
368   // space if we first allocate the frame and then do the stack check (we will
369   // need some remaining stack space for throwing the exception). That's why we
370   // check the available stack space before we allocate the frame. To do this we
371   // replace the {__ Addu(sp, sp, -framesize)} with a jump to OOL code that does
372   // this "extended stack check".
373   //
374   // The OOL code can simply be generated here with the normal assembler,
375   // because all other code generation, including OOL code, has already finished
376   // when {PatchPrepareStackFrame} is called. The function prologue then jumps
377   // to the current {pc_offset()} to execute the OOL code for allocating the
378   // large frame.
379   // Emit the unconditional branch in the function prologue (from {offset} to
380   // {pc_offset()}).
381 
382   int imm32 = pc_offset() - offset - 3 * kInstrSize;
383   patching_assembler.BranchLong(imm32);
384 
385   // If the frame is bigger than the stack, we throw the stack overflow
386   // exception unconditionally. Thereby we can avoid the integer overflow
387   // check in the condition code.
388   RecordComment("OOL: stack check for large frame");
389   Label continuation;
390   if (frame_size < FLAG_stack_size * 1024) {
391     Register stack_limit = kScratchReg;
392     Lw(stack_limit,
393        FieldMemOperand(kWasmInstanceRegister,
394                        WasmInstanceObject::kRealStackLimitAddressOffset));
395     Lw(stack_limit, MemOperand(stack_limit));
396     Addu(stack_limit, stack_limit, Operand(frame_size));
397     Branch(&continuation, uge, sp, Operand(stack_limit));
398   }
399 
400   Call(wasm::WasmCode::kWasmStackOverflow, RelocInfo::WASM_STUB_CALL);
401   // The call will not return; just define an empty safepoint.
402   safepoint_table_builder->DefineSafepoint(this);
403   if (FLAG_debug_code) stop();
404 
405   bind(&continuation);
406 
407   // Now allocate the stack space. Note that this might do more than just
408   // decrementing the SP;
409   Addu(sp, sp, Operand(-frame_size));
410 
411   // Jump back to the start of the function, from {pc_offset()} to
412   // right after the reserved space for the {__ Addu(sp, sp, -framesize)} (which
413   // is a jump now).
414   int func_start_offset = offset + 7 * kInstrSize;
415   imm32 = func_start_offset - pc_offset() - 3 * kInstrSize;
416   BranchLong(imm32);
417 }
418 
FinishCode()419 void LiftoffAssembler::FinishCode() {}
420 
AbortCompilation()421 void LiftoffAssembler::AbortCompilation() {}
422 
423 // static
StaticStackFrameSize()424 constexpr int LiftoffAssembler::StaticStackFrameSize() {
425   return liftoff::kTierupBudgetOffset;
426 }
427 
SlotSizeForType(ValueKind kind)428 int LiftoffAssembler::SlotSizeForType(ValueKind kind) {
429   switch (kind) {
430     case kS128:
431       return value_kind_size(kind);
432     default:
433       return kStackSlotSize;
434   }
435 }
436 
NeedsAlignment(ValueKind kind)437 bool LiftoffAssembler::NeedsAlignment(ValueKind kind) {
438   return kind == kS128 || is_reference(kind);
439 }
440 
LoadConstant(LiftoffRegister reg,WasmValue value,RelocInfo::Mode rmode)441 void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
442                                     RelocInfo::Mode rmode) {
443   switch (value.type().kind()) {
444     case kI32:
445       TurboAssembler::li(reg.gp(), Operand(value.to_i32(), rmode));
446       break;
447     case kI64: {
448       DCHECK(RelocInfo::IsNoInfo(rmode));
449       int32_t low_word = value.to_i64();
450       int32_t high_word = value.to_i64() >> 32;
451       TurboAssembler::li(reg.low_gp(), Operand(low_word));
452       TurboAssembler::li(reg.high_gp(), Operand(high_word));
453       break;
454     }
455     case kF32:
456       TurboAssembler::Move(reg.fp(), value.to_f32_boxed().get_bits());
457       break;
458     case kF64:
459       TurboAssembler::Move(reg.fp(), value.to_f64_boxed().get_bits());
460       break;
461     default:
462       UNREACHABLE();
463   }
464 }
465 
LoadInstanceFromFrame(Register dst)466 void LiftoffAssembler::LoadInstanceFromFrame(Register dst) {
467   lw(dst, liftoff::GetInstanceOperand());
468 }
469 
LoadFromInstance(Register dst,Register instance,int32_t offset,int size)470 void LiftoffAssembler::LoadFromInstance(Register dst, Register instance,
471                                         int32_t offset, int size) {
472   DCHECK_LE(0, offset);
473   switch (size) {
474     case 1:
475       lb(dst, MemOperand(instance, offset));
476       break;
477     case 4:
478       lw(dst, MemOperand(instance, offset));
479       break;
480     default:
481       UNIMPLEMENTED();
482   }
483 }
484 
LoadTaggedPointerFromInstance(Register dst,Register instance,int32_t offset)485 void LiftoffAssembler::LoadTaggedPointerFromInstance(Register dst,
486                                                      Register instance,
487                                                      int32_t offset) {
488   STATIC_ASSERT(kTaggedSize == kSystemPointerSize);
489   lw(dst, MemOperand(instance, offset));
490 }
491 
SpillInstance(Register instance)492 void LiftoffAssembler::SpillInstance(Register instance) {
493   sw(instance, liftoff::GetInstanceOperand());
494 }
495 
ResetOSRTarget()496 void LiftoffAssembler::ResetOSRTarget() {}
497 
LoadTaggedPointer(Register dst,Register src_addr,Register offset_reg,int32_t offset_imm,LiftoffRegList pinned)498 void LiftoffAssembler::LoadTaggedPointer(Register dst, Register src_addr,
499                                          Register offset_reg,
500                                          int32_t offset_imm,
501                                          LiftoffRegList pinned) {
502   STATIC_ASSERT(kTaggedSize == kInt32Size);
503   Load(LiftoffRegister(dst), src_addr, offset_reg,
504        static_cast<uint32_t>(offset_imm), LoadType::kI32Load, pinned);
505 }
506 
LoadFullPointer(Register dst,Register src_addr,int32_t offset_imm)507 void LiftoffAssembler::LoadFullPointer(Register dst, Register src_addr,
508                                        int32_t offset_imm) {
509   MemOperand src_op = MemOperand(src_addr, offset_imm);
510   lw(dst, src_op);
511 }
512 
StoreTaggedPointer(Register dst_addr,Register offset_reg,int32_t offset_imm,LiftoffRegister src,LiftoffRegList pinned,SkipWriteBarrier skip_write_barrier)513 void LiftoffAssembler::StoreTaggedPointer(Register dst_addr,
514                                           Register offset_reg,
515                                           int32_t offset_imm,
516                                           LiftoffRegister src,
517                                           LiftoffRegList pinned,
518                                           SkipWriteBarrier skip_write_barrier) {
519   STATIC_ASSERT(kTaggedSize == kInt32Size);
520   Register dst = no_reg;
521   if (offset_reg != no_reg) {
522     dst = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp();
523     emit_ptrsize_add(dst, dst_addr, offset_reg);
524   }
525   MemOperand dst_op = (offset_reg != no_reg) ? MemOperand(dst, offset_imm)
526                                              : MemOperand(dst_addr, offset_imm);
527   Sw(src.gp(), dst_op);
528 
529   if (skip_write_barrier || FLAG_disable_write_barriers) return;
530 
531   // The write barrier.
532   Label write_barrier;
533   Label exit;
534   Register scratch = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp();
535   CheckPageFlag(dst_addr, scratch,
536                 MemoryChunk::kPointersFromHereAreInterestingMask, ne,
537                 &write_barrier);
538   Branch(&exit);
539   bind(&write_barrier);
540   JumpIfSmi(src.gp(), &exit);
541   CheckPageFlag(src.gp(), scratch,
542                 MemoryChunk::kPointersToHereAreInterestingMask, eq, &exit);
543   Addu(scratch, dst_op.rm(), dst_op.offset());
544   CallRecordWriteStubSaveRegisters(
545       dst_addr, scratch, RememberedSetAction::kEmit, SaveFPRegsMode::kSave,
546       StubCallMode::kCallWasmRuntimeStub);
547   bind(&exit);
548 }
549 
Load(LiftoffRegister dst,Register src_addr,Register offset_reg,uint32_t offset_imm,LoadType type,LiftoffRegList pinned,uint32_t * protected_load_pc,bool is_load_mem,bool i64_offset)550 void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
551                             Register offset_reg, uint32_t offset_imm,
552                             LoadType type, LiftoffRegList pinned,
553                             uint32_t* protected_load_pc, bool is_load_mem,
554                             bool i64_offset) {
555   Register src = no_reg;
556   if (offset_reg != no_reg) {
557     src = GetUnusedRegister(kGpReg, pinned).gp();
558     emit_ptrsize_add(src, src_addr, offset_reg);
559   }
560   MemOperand src_op = (offset_reg != no_reg) ? MemOperand(src, offset_imm)
561                                              : MemOperand(src_addr, offset_imm);
562 
563   if (protected_load_pc) *protected_load_pc = pc_offset();
564   switch (type.value()) {
565     case LoadType::kI32Load8U:
566       lbu(dst.gp(), src_op);
567       break;
568     case LoadType::kI64Load8U:
569       lbu(dst.low_gp(), src_op);
570       xor_(dst.high_gp(), dst.high_gp(), dst.high_gp());
571       break;
572     case LoadType::kI32Load8S:
573       lb(dst.gp(), src_op);
574       break;
575     case LoadType::kI64Load8S:
576       lb(dst.low_gp(), src_op);
577       TurboAssembler::Move(dst.high_gp(), dst.low_gp());
578       sra(dst.high_gp(), dst.high_gp(), 31);
579       break;
580     case LoadType::kI32Load16U:
581       TurboAssembler::Ulhu(dst.gp(), src_op);
582       break;
583     case LoadType::kI64Load16U:
584       TurboAssembler::Ulhu(dst.low_gp(), src_op);
585       xor_(dst.high_gp(), dst.high_gp(), dst.high_gp());
586       break;
587     case LoadType::kI32Load16S:
588       TurboAssembler::Ulh(dst.gp(), src_op);
589       break;
590     case LoadType::kI64Load16S:
591       TurboAssembler::Ulh(dst.low_gp(), src_op);
592       TurboAssembler::Move(dst.high_gp(), dst.low_gp());
593       sra(dst.high_gp(), dst.high_gp(), 31);
594       break;
595     case LoadType::kI32Load:
596       TurboAssembler::Ulw(dst.gp(), src_op);
597       break;
598     case LoadType::kI64Load32U:
599       TurboAssembler::Ulw(dst.low_gp(), src_op);
600       xor_(dst.high_gp(), dst.high_gp(), dst.high_gp());
601       break;
602     case LoadType::kI64Load32S:
603       TurboAssembler::Ulw(dst.low_gp(), src_op);
604       TurboAssembler::Move(dst.high_gp(), dst.low_gp());
605       sra(dst.high_gp(), dst.high_gp(), 31);
606       break;
607     case LoadType::kI64Load: {
608       MemOperand src_op =
609           (offset_reg != no_reg)
610               ? MemOperand(src, offset_imm + liftoff::kLowWordOffset)
611               : MemOperand(src_addr, offset_imm + liftoff::kLowWordOffset);
612       MemOperand src_op_upper =
613           (offset_reg != no_reg)
614               ? MemOperand(src, offset_imm + liftoff::kHighWordOffset)
615               : MemOperand(src_addr, offset_imm + liftoff::kHighWordOffset);
616       {
617         UseScratchRegisterScope temps(this);
618         Register temp = dst.low_gp();
619         if (dst.low_gp() == src_op_upper.rm()) temp = temps.Acquire();
620         TurboAssembler::Ulw(temp, src_op);
621         TurboAssembler::Ulw(dst.high_gp(), src_op_upper);
622         if (dst.low_gp() == src_op_upper.rm()) mov(dst.low_gp(), temp);
623       }
624       break;
625     }
626     case LoadType::kF32Load:
627       TurboAssembler::Ulwc1(dst.fp(), src_op, t8);
628       break;
629     case LoadType::kF64Load:
630       TurboAssembler::Uldc1(dst.fp(), src_op, t8);
631       break;
632     default:
633       UNREACHABLE();
634   }
635 
636 #if defined(V8_TARGET_BIG_ENDIAN)
637   if (is_load_mem) {
638     pinned.set(src_op.rm());
639     liftoff::ChangeEndiannessLoad(this, dst, type, pinned);
640   }
641 #endif
642 }
643 
Store(Register dst_addr,Register offset_reg,uint32_t offset_imm,LiftoffRegister src,StoreType type,LiftoffRegList pinned,uint32_t * protected_store_pc,bool is_store_mem)644 void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
645                              uint32_t offset_imm, LiftoffRegister src,
646                              StoreType type, LiftoffRegList pinned,
647                              uint32_t* protected_store_pc, bool is_store_mem) {
648   Register dst = no_reg;
649   MemOperand dst_op = MemOperand(dst_addr, offset_imm);
650   if (offset_reg != no_reg) {
651     if (is_store_mem) {
652       pinned.set(src);
653     }
654     dst = GetUnusedRegister(kGpReg, pinned).gp();
655     emit_ptrsize_add(dst, dst_addr, offset_reg);
656     dst_op = MemOperand(dst, offset_imm);
657   }
658 
659 #if defined(V8_TARGET_BIG_ENDIAN)
660   if (is_store_mem) {
661     pinned = pinned | LiftoffRegList{dst_op.rm(), src};
662     LiftoffRegister tmp = GetUnusedRegister(src.reg_class(), pinned);
663     // Save original value.
664     Move(tmp, src, type.value_type());
665 
666     src = tmp;
667     pinned.set(tmp);
668     liftoff::ChangeEndiannessStore(this, src, type, pinned);
669   }
670 #endif
671 
672   if (protected_store_pc) *protected_store_pc = pc_offset();
673   switch (type.value()) {
674     case StoreType::kI64Store8:
675       src = src.low();
676       V8_FALLTHROUGH;
677     case StoreType::kI32Store8:
678       sb(src.gp(), dst_op);
679       break;
680     case StoreType::kI64Store16:
681       src = src.low();
682       V8_FALLTHROUGH;
683     case StoreType::kI32Store16:
684       TurboAssembler::Ush(src.gp(), dst_op, t8);
685       break;
686     case StoreType::kI64Store32:
687       src = src.low();
688       V8_FALLTHROUGH;
689     case StoreType::kI32Store:
690       TurboAssembler::Usw(src.gp(), dst_op);
691       break;
692     case StoreType::kI64Store: {
693       MemOperand dst_op_lower(dst_op.rm(),
694                               offset_imm + liftoff::kLowWordOffset);
695       MemOperand dst_op_upper(dst_op.rm(),
696                               offset_imm + liftoff::kHighWordOffset);
697       TurboAssembler::Usw(src.low_gp(), dst_op_lower);
698       TurboAssembler::Usw(src.high_gp(), dst_op_upper);
699       break;
700     }
701     case StoreType::kF32Store:
702       TurboAssembler::Uswc1(src.fp(), dst_op, t8);
703       break;
704     case StoreType::kF64Store:
705       TurboAssembler::Usdc1(src.fp(), dst_op, t8);
706       break;
707     default:
708       UNREACHABLE();
709   }
710 }
711 
AtomicLoad(LiftoffRegister dst,Register src_addr,Register offset_reg,uint32_t offset_imm,LoadType type,LiftoffRegList pinned)712 void LiftoffAssembler::AtomicLoad(LiftoffRegister dst, Register src_addr,
713                                   Register offset_reg, uint32_t offset_imm,
714                                   LoadType type, LiftoffRegList pinned) {
715   bailout(kAtomics, "AtomicLoad");
716 }
717 
AtomicStore(Register dst_addr,Register offset_reg,uint32_t offset_imm,LiftoffRegister src,StoreType type,LiftoffRegList pinned)718 void LiftoffAssembler::AtomicStore(Register dst_addr, Register offset_reg,
719                                    uint32_t offset_imm, LiftoffRegister src,
720                                    StoreType type, LiftoffRegList pinned) {
721   bailout(kAtomics, "AtomicStore");
722 }
723 
AtomicAdd(Register dst_addr,Register offset_reg,uint32_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)724 void LiftoffAssembler::AtomicAdd(Register dst_addr, Register offset_reg,
725                                  uint32_t offset_imm, LiftoffRegister value,
726                                  LiftoffRegister result, StoreType type) {
727   bailout(kAtomics, "AtomicAdd");
728 }
729 
AtomicSub(Register dst_addr,Register offset_reg,uint32_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)730 void LiftoffAssembler::AtomicSub(Register dst_addr, Register offset_reg,
731                                  uint32_t offset_imm, LiftoffRegister value,
732                                  LiftoffRegister result, StoreType type) {
733   bailout(kAtomics, "AtomicSub");
734 }
735 
AtomicAnd(Register dst_addr,Register offset_reg,uint32_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)736 void LiftoffAssembler::AtomicAnd(Register dst_addr, Register offset_reg,
737                                  uint32_t offset_imm, LiftoffRegister value,
738                                  LiftoffRegister result, StoreType type) {
739   bailout(kAtomics, "AtomicAnd");
740 }
741 
AtomicOr(Register dst_addr,Register offset_reg,uint32_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)742 void LiftoffAssembler::AtomicOr(Register dst_addr, Register offset_reg,
743                                 uint32_t offset_imm, LiftoffRegister value,
744                                 LiftoffRegister result, StoreType type) {
745   bailout(kAtomics, "AtomicOr");
746 }
747 
AtomicXor(Register dst_addr,Register offset_reg,uint32_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)748 void LiftoffAssembler::AtomicXor(Register dst_addr, Register offset_reg,
749                                  uint32_t offset_imm, LiftoffRegister value,
750                                  LiftoffRegister result, StoreType type) {
751   bailout(kAtomics, "AtomicXor");
752 }
753 
AtomicExchange(Register dst_addr,Register offset_reg,uint32_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)754 void LiftoffAssembler::AtomicExchange(Register dst_addr, Register offset_reg,
755                                       uint32_t offset_imm,
756                                       LiftoffRegister value,
757                                       LiftoffRegister result, StoreType type) {
758   bailout(kAtomics, "AtomicExchange");
759 }
760 
AtomicCompareExchange(Register dst_addr,Register offset_reg,uint32_t offset_imm,LiftoffRegister expected,LiftoffRegister new_value,LiftoffRegister result,StoreType type)761 void LiftoffAssembler::AtomicCompareExchange(
762     Register dst_addr, Register offset_reg, uint32_t offset_imm,
763     LiftoffRegister expected, LiftoffRegister new_value, LiftoffRegister result,
764     StoreType type) {
765   bailout(kAtomics, "AtomicCompareExchange");
766 }
767 
AtomicFence()768 void LiftoffAssembler::AtomicFence() { sync(); }
769 
LoadCallerFrameSlot(LiftoffRegister dst,uint32_t caller_slot_idx,ValueKind kind)770 void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
771                                            uint32_t caller_slot_idx,
772                                            ValueKind kind) {
773   int32_t offset = kSystemPointerSize * (caller_slot_idx + 1);
774   liftoff::Load(this, dst, fp, offset, kind);
775 }
776 
StoreCallerFrameSlot(LiftoffRegister src,uint32_t caller_slot_idx,ValueKind kind)777 void LiftoffAssembler::StoreCallerFrameSlot(LiftoffRegister src,
778                                             uint32_t caller_slot_idx,
779                                             ValueKind kind) {
780   int32_t offset = kSystemPointerSize * (caller_slot_idx + 1);
781   liftoff::Store(this, fp, offset, src, kind);
782 }
783 
LoadReturnStackSlot(LiftoffRegister dst,int offset,ValueKind kind)784 void LiftoffAssembler::LoadReturnStackSlot(LiftoffRegister dst, int offset,
785                                            ValueKind kind) {
786   liftoff::Load(this, dst, sp, offset, kind);
787 }
788 
MoveStackValue(uint32_t dst_offset,uint32_t src_offset,ValueKind kind)789 void LiftoffAssembler::MoveStackValue(uint32_t dst_offset, uint32_t src_offset,
790                                       ValueKind kind) {
791   DCHECK_NE(dst_offset, src_offset);
792   LiftoffRegister reg = GetUnusedRegister(reg_class_for(kind), {});
793   Fill(reg, src_offset, kind);
794   Spill(dst_offset, reg, kind);
795 }
796 
Move(Register dst,Register src,ValueKind kind)797 void LiftoffAssembler::Move(Register dst, Register src, ValueKind kind) {
798   DCHECK_NE(dst, src);
799   TurboAssembler::mov(dst, src);
800 }
801 
Move(DoubleRegister dst,DoubleRegister src,ValueKind kind)802 void LiftoffAssembler::Move(DoubleRegister dst, DoubleRegister src,
803                             ValueKind kind) {
804   DCHECK_NE(dst, src);
805   TurboAssembler::Move(dst, src);
806 }
807 
Spill(int offset,LiftoffRegister reg,ValueKind kind)808 void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueKind kind) {
809   RecordUsedSpillOffset(offset);
810   MemOperand dst = liftoff::GetStackSlot(offset);
811   switch (kind) {
812     case kI32:
813     case kRef:
814     case kOptRef:
815     case kRtt:
816       sw(reg.gp(), dst);
817       break;
818     case kI64:
819       sw(reg.low_gp(), liftoff::GetHalfStackSlot(offset, kLowWord));
820       sw(reg.high_gp(), liftoff::GetHalfStackSlot(offset, kHighWord));
821       break;
822     case kF32:
823       swc1(reg.fp(), dst);
824       break;
825     case kF64:
826       TurboAssembler::Sdc1(reg.fp(), dst);
827       break;
828     default:
829       UNREACHABLE();
830   }
831 }
832 
Spill(int offset,WasmValue value)833 void LiftoffAssembler::Spill(int offset, WasmValue value) {
834   RecordUsedSpillOffset(offset);
835   MemOperand dst = liftoff::GetStackSlot(offset);
836   switch (value.type().kind()) {
837     case kI32:
838     case kRef:
839     case kOptRef: {
840       LiftoffRegister tmp = GetUnusedRegister(kGpReg, {});
841       TurboAssembler::li(tmp.gp(), Operand(value.to_i32()));
842       sw(tmp.gp(), dst);
843       break;
844     }
845     case kI64: {
846       LiftoffRegister tmp = GetUnusedRegister(kGpRegPair, {});
847 
848       int32_t low_word = value.to_i64();
849       int32_t high_word = value.to_i64() >> 32;
850       TurboAssembler::li(tmp.low_gp(), Operand(low_word));
851       TurboAssembler::li(tmp.high_gp(), Operand(high_word));
852 
853       sw(tmp.low_gp(), liftoff::GetHalfStackSlot(offset, kLowWord));
854       sw(tmp.high_gp(), liftoff::GetHalfStackSlot(offset, kHighWord));
855       break;
856     }
857     default:
858       // kWasmF32 and kWasmF64 are unreachable, since those
859       // constants are not tracked.
860       UNREACHABLE();
861   }
862 }
863 
Fill(LiftoffRegister reg,int offset,ValueKind kind)864 void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueKind kind) {
865   MemOperand src = liftoff::GetStackSlot(offset);
866   switch (kind) {
867     case kI32:
868     case kRef:
869     case kOptRef:
870       lw(reg.gp(), src);
871       break;
872     case kI64:
873       lw(reg.low_gp(), liftoff::GetHalfStackSlot(offset, kLowWord));
874       lw(reg.high_gp(), liftoff::GetHalfStackSlot(offset, kHighWord));
875       break;
876     case kF32:
877       lwc1(reg.fp(), src);
878       break;
879     case kF64:
880       TurboAssembler::Ldc1(reg.fp(), src);
881       break;
882     default:
883       UNREACHABLE();
884   }
885 }
886 
FillI64Half(Register reg,int offset,RegPairHalf half)887 void LiftoffAssembler::FillI64Half(Register reg, int offset, RegPairHalf half) {
888   lw(reg, liftoff::GetHalfStackSlot(offset, half));
889 }
890 
FillStackSlotsWithZero(int start,int size)891 void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) {
892   DCHECK_LT(0, size);
893   DCHECK_EQ(0, size % 4);
894   RecordUsedSpillOffset(start + size);
895 
896   if (size <= 48) {
897     // Special straight-line code for up to 12 words. Generates one
898     // instruction per word (<=12 instructions total).
899     for (int offset = 4; offset <= size; offset += 4) {
900       Sw(zero_reg, liftoff::GetStackSlot(start + offset));
901     }
902   } else {
903     // General case for bigger counts (12 instructions).
904     // Use a0 for start address (inclusive), a1 for end address (exclusive).
905     Push(a1, a0);
906     Addu(a0, fp, Operand(-start - size));
907     Addu(a1, fp, Operand(-start));
908 
909     Label loop;
910     bind(&loop);
911     Sw(zero_reg, MemOperand(a0));
912     addiu(a0, a0, kSystemPointerSize);
913     BranchShort(&loop, ne, a0, Operand(a1));
914 
915     Pop(a1, a0);
916   }
917 }
918 
emit_i32_mul(Register dst,Register lhs,Register rhs)919 void LiftoffAssembler::emit_i32_mul(Register dst, Register lhs, Register rhs) {
920   TurboAssembler::Mul(dst, lhs, rhs);
921 }
922 
emit_i32_divs(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero,Label * trap_div_unrepresentable)923 void LiftoffAssembler::emit_i32_divs(Register dst, Register lhs, Register rhs,
924                                      Label* trap_div_by_zero,
925                                      Label* trap_div_unrepresentable) {
926   TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
927 
928   // Check if lhs == kMinInt and rhs == -1, since this case is unrepresentable.
929   TurboAssembler::li(kScratchReg, 1);
930   TurboAssembler::li(kScratchReg2, 1);
931   TurboAssembler::LoadZeroOnCondition(kScratchReg, lhs, Operand(kMinInt), eq);
932   TurboAssembler::LoadZeroOnCondition(kScratchReg2, rhs, Operand(-1), eq);
933   addu(kScratchReg, kScratchReg, kScratchReg2);
934   TurboAssembler::Branch(trap_div_unrepresentable, eq, kScratchReg,
935                          Operand(zero_reg));
936 
937   TurboAssembler::Div(dst, lhs, rhs);
938 }
939 
emit_i32_divu(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero)940 void LiftoffAssembler::emit_i32_divu(Register dst, Register lhs, Register rhs,
941                                      Label* trap_div_by_zero) {
942   TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
943   TurboAssembler::Divu(dst, lhs, rhs);
944 }
945 
emit_i32_rems(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero)946 void LiftoffAssembler::emit_i32_rems(Register dst, Register lhs, Register rhs,
947                                      Label* trap_div_by_zero) {
948   TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
949   TurboAssembler::Mod(dst, lhs, rhs);
950 }
951 
emit_i32_remu(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero)952 void LiftoffAssembler::emit_i32_remu(Register dst, Register lhs, Register rhs,
953                                      Label* trap_div_by_zero) {
954   TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
955   TurboAssembler::Modu(dst, lhs, rhs);
956 }
957 
958 #define I32_BINOP(name, instruction)                                 \
959   void LiftoffAssembler::emit_i32_##name(Register dst, Register lhs, \
960                                          Register rhs) {             \
961     instruction(dst, lhs, rhs);                                      \
962   }
963 
964 // clang-format off
I32_BINOP(add,addu)965 I32_BINOP(add, addu)
966 I32_BINOP(sub, subu)
967 I32_BINOP(and, and_)
968 I32_BINOP(or, or_)
969 I32_BINOP(xor, xor_)
970 // clang-format on
971 
972 #undef I32_BINOP
973 
974 #define I32_BINOP_I(name, instruction)                                  \
975   void LiftoffAssembler::emit_i32_##name##i(Register dst, Register lhs, \
976                                             int32_t imm) {              \
977     instruction(dst, lhs, Operand(imm));                                \
978   }
979 
980 // clang-format off
981 I32_BINOP_I(add, Addu)
982 I32_BINOP_I(sub, Subu)
983 I32_BINOP_I(and, And)
984 I32_BINOP_I(or, Or)
985 I32_BINOP_I(xor, Xor)
986 // clang-format on
987 
988 #undef I32_BINOP_I
989 
990 void LiftoffAssembler::emit_i32_clz(Register dst, Register src) {
991   TurboAssembler::Clz(dst, src);
992 }
993 
emit_i32_ctz(Register dst,Register src)994 void LiftoffAssembler::emit_i32_ctz(Register dst, Register src) {
995   TurboAssembler::Ctz(dst, src);
996 }
997 
emit_i32_popcnt(Register dst,Register src)998 bool LiftoffAssembler::emit_i32_popcnt(Register dst, Register src) {
999   TurboAssembler::Popcnt(dst, src);
1000   return true;
1001 }
1002 
1003 #define I32_SHIFTOP(name, instruction)                               \
1004   void LiftoffAssembler::emit_i32_##name(Register dst, Register src, \
1005                                          Register amount) {          \
1006     instruction(dst, src, amount);                                   \
1007   }
1008 #define I32_SHIFTOP_I(name, instruction)                                \
1009   I32_SHIFTOP(name, instruction##v)                                     \
1010   void LiftoffAssembler::emit_i32_##name##i(Register dst, Register src, \
1011                                             int amount) {               \
1012     DCHECK(is_uint5(amount));                                           \
1013     instruction(dst, src, amount);                                      \
1014   }
1015 
I32_SHIFTOP_I(shl,sll)1016 I32_SHIFTOP_I(shl, sll)
1017 I32_SHIFTOP_I(sar, sra)
1018 I32_SHIFTOP_I(shr, srl)
1019 
1020 #undef I32_SHIFTOP
1021 #undef I32_SHIFTOP_I
1022 
1023 void LiftoffAssembler::emit_i64_addi(LiftoffRegister dst, LiftoffRegister lhs,
1024                                      int64_t imm) {
1025   LiftoffRegister imm_reg =
1026       GetUnusedRegister(kGpRegPair, LiftoffRegList{dst, lhs});
1027   int32_t imm_low_word = static_cast<int32_t>(imm);
1028   int32_t imm_high_word = static_cast<int32_t>(imm >> 32);
1029   TurboAssembler::li(imm_reg.low_gp(), imm_low_word);
1030   TurboAssembler::li(imm_reg.high_gp(), imm_high_word);
1031   TurboAssembler::AddPair(dst.low_gp(), dst.high_gp(), lhs.low_gp(),
1032                           lhs.high_gp(), imm_reg.low_gp(), imm_reg.high_gp(),
1033                           kScratchReg, kScratchReg2);
1034 }
1035 
emit_i64_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1036 void LiftoffAssembler::emit_i64_mul(LiftoffRegister dst, LiftoffRegister lhs,
1037                                     LiftoffRegister rhs) {
1038   TurboAssembler::MulPair(dst.low_gp(), dst.high_gp(), lhs.low_gp(),
1039                           lhs.high_gp(), rhs.low_gp(), rhs.high_gp(),
1040                           kScratchReg, kScratchReg2);
1041 }
1042 
emit_i64_divs(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero,Label * trap_div_unrepresentable)1043 bool LiftoffAssembler::emit_i64_divs(LiftoffRegister dst, LiftoffRegister lhs,
1044                                      LiftoffRegister rhs,
1045                                      Label* trap_div_by_zero,
1046                                      Label* trap_div_unrepresentable) {
1047   return false;
1048 }
1049 
emit_i64_divu(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero)1050 bool LiftoffAssembler::emit_i64_divu(LiftoffRegister dst, LiftoffRegister lhs,
1051                                      LiftoffRegister rhs,
1052                                      Label* trap_div_by_zero) {
1053   return false;
1054 }
1055 
emit_i64_rems(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero)1056 bool LiftoffAssembler::emit_i64_rems(LiftoffRegister dst, LiftoffRegister lhs,
1057                                      LiftoffRegister rhs,
1058                                      Label* trap_div_by_zero) {
1059   return false;
1060 }
1061 
emit_i64_remu(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero)1062 bool LiftoffAssembler::emit_i64_remu(LiftoffRegister dst, LiftoffRegister lhs,
1063                                      LiftoffRegister rhs,
1064                                      Label* trap_div_by_zero) {
1065   return false;
1066 }
1067 
emit_i64_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1068 void LiftoffAssembler::emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs,
1069                                     LiftoffRegister rhs) {
1070   TurboAssembler::AddPair(dst.low_gp(), dst.high_gp(), lhs.low_gp(),
1071                           lhs.high_gp(), rhs.low_gp(), rhs.high_gp(),
1072                           kScratchReg, kScratchReg2);
1073 }
1074 
emit_i64_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1075 void LiftoffAssembler::emit_i64_sub(LiftoffRegister dst, LiftoffRegister lhs,
1076                                     LiftoffRegister rhs) {
1077   TurboAssembler::SubPair(dst.low_gp(), dst.high_gp(), lhs.low_gp(),
1078                           lhs.high_gp(), rhs.low_gp(), rhs.high_gp(),
1079                           kScratchReg, kScratchReg2);
1080 }
1081 
1082 namespace liftoff {
1083 
IsRegInRegPair(LiftoffRegister pair,Register reg)1084 inline bool IsRegInRegPair(LiftoffRegister pair, Register reg) {
1085   DCHECK(pair.is_gp_pair());
1086   return pair.low_gp() == reg || pair.high_gp() == reg;
1087 }
1088 
Emit64BitShiftOperation(LiftoffAssembler * assm,LiftoffRegister dst,LiftoffRegister src,Register amount,void (TurboAssembler::* emit_shift)(Register,Register,Register,Register,Register,Register,Register))1089 inline void Emit64BitShiftOperation(
1090     LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister src,
1091     Register amount,
1092     void (TurboAssembler::*emit_shift)(Register, Register, Register, Register,
1093                                        Register, Register, Register)) {
1094   Label move, done;
1095   LiftoffRegList pinned = {dst, src, amount};
1096 
1097   // If some of destination registers are in use, get another, unused pair.
1098   // That way we prevent overwriting some input registers while shifting.
1099   // Do this before any branch so that the cache state will be correct for
1100   // all conditions.
1101   LiftoffRegister tmp = assm->GetUnusedRegister(kGpRegPair, pinned);
1102 
1103   // If shift amount is 0, don't do the shifting.
1104   assm->TurboAssembler::Branch(&move, eq, amount, Operand(zero_reg));
1105 
1106   if (liftoff::IsRegInRegPair(dst, amount) || dst.overlaps(src)) {
1107     // Do the actual shift.
1108     (assm->*emit_shift)(tmp.low_gp(), tmp.high_gp(), src.low_gp(),
1109                         src.high_gp(), amount, kScratchReg, kScratchReg2);
1110 
1111     // Place result in destination register.
1112     assm->TurboAssembler::Move(dst.high_gp(), tmp.high_gp());
1113     assm->TurboAssembler::Move(dst.low_gp(), tmp.low_gp());
1114   } else {
1115     (assm->*emit_shift)(dst.low_gp(), dst.high_gp(), src.low_gp(),
1116                         src.high_gp(), amount, kScratchReg, kScratchReg2);
1117   }
1118   assm->TurboAssembler::Branch(&done);
1119 
1120   // If shift amount is 0, move src to dst.
1121   assm->bind(&move);
1122   assm->TurboAssembler::Move(dst.high_gp(), src.high_gp());
1123   assm->TurboAssembler::Move(dst.low_gp(), src.low_gp());
1124 
1125   assm->bind(&done);
1126 }
1127 }  // namespace liftoff
1128 
emit_i64_shl(LiftoffRegister dst,LiftoffRegister src,Register amount)1129 void LiftoffAssembler::emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
1130                                     Register amount) {
1131   liftoff::Emit64BitShiftOperation(this, dst, src, amount,
1132                                    &TurboAssembler::ShlPair);
1133 }
1134 
emit_i64_shli(LiftoffRegister dst,LiftoffRegister src,int32_t amount)1135 void LiftoffAssembler::emit_i64_shli(LiftoffRegister dst, LiftoffRegister src,
1136                                      int32_t amount) {
1137   UseScratchRegisterScope temps(this);
1138   // {src.low_gp()} will still be needed after writing {dst.high_gp()} and
1139   // {dst.low_gp()}.
1140   Register src_low = liftoff::EnsureNoAlias(this, src.low_gp(), dst, &temps);
1141   Register src_high = src.high_gp();
1142   // {src.high_gp()} will still be needed after writing {dst.high_gp()}.
1143   if (src_high == dst.high_gp()) {
1144     mov(kScratchReg, src_high);
1145     src_high = kScratchReg;
1146   }
1147   DCHECK_NE(dst.low_gp(), kScratchReg);
1148   DCHECK_NE(dst.high_gp(), kScratchReg);
1149 
1150   ShlPair(dst.low_gp(), dst.high_gp(), src_low, src_high, amount, kScratchReg);
1151 }
1152 
emit_i64_sar(LiftoffRegister dst,LiftoffRegister src,Register amount)1153 void LiftoffAssembler::emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
1154                                     Register amount) {
1155   liftoff::Emit64BitShiftOperation(this, dst, src, amount,
1156                                    &TurboAssembler::SarPair);
1157 }
1158 
emit_i64_sari(LiftoffRegister dst,LiftoffRegister src,int32_t amount)1159 void LiftoffAssembler::emit_i64_sari(LiftoffRegister dst, LiftoffRegister src,
1160                                      int32_t amount) {
1161   UseScratchRegisterScope temps(this);
1162   // {src.high_gp()} will still be needed after writing {dst.high_gp()} and
1163   // {dst.low_gp()}.
1164   Register src_high = liftoff::EnsureNoAlias(this, src.high_gp(), dst, &temps);
1165   DCHECK_NE(dst.low_gp(), kScratchReg);
1166   DCHECK_NE(dst.high_gp(), kScratchReg);
1167 
1168   SarPair(dst.low_gp(), dst.high_gp(), src.low_gp(), src_high, amount,
1169           kScratchReg);
1170 }
1171 
emit_i64_shr(LiftoffRegister dst,LiftoffRegister src,Register amount)1172 void LiftoffAssembler::emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
1173                                     Register amount) {
1174   liftoff::Emit64BitShiftOperation(this, dst, src, amount,
1175                                    &TurboAssembler::ShrPair);
1176 }
1177 
emit_i64_shri(LiftoffRegister dst,LiftoffRegister src,int32_t amount)1178 void LiftoffAssembler::emit_i64_shri(LiftoffRegister dst, LiftoffRegister src,
1179                                      int32_t amount) {
1180   UseScratchRegisterScope temps(this);
1181   // {src.high_gp()} will still be needed after writing {dst.high_gp()} and
1182   // {dst.low_gp()}.
1183   Register src_high = liftoff::EnsureNoAlias(this, src.high_gp(), dst, &temps);
1184   DCHECK_NE(dst.low_gp(), kScratchReg);
1185   DCHECK_NE(dst.high_gp(), kScratchReg);
1186 
1187   ShrPair(dst.low_gp(), dst.high_gp(), src.low_gp(), src_high, amount,
1188           kScratchReg);
1189 }
1190 
emit_i64_clz(LiftoffRegister dst,LiftoffRegister src)1191 void LiftoffAssembler::emit_i64_clz(LiftoffRegister dst, LiftoffRegister src) {
1192   // return high == 0 ? 32 + CLZ32(low) : CLZ32(high);
1193   Label done;
1194   Label high_is_zero;
1195   Branch(&high_is_zero, eq, src.high_gp(), Operand(zero_reg));
1196 
1197   clz(dst.low_gp(), src.high_gp());
1198   jmp(&done);
1199 
1200   bind(&high_is_zero);
1201   clz(dst.low_gp(), src.low_gp());
1202   Addu(dst.low_gp(), dst.low_gp(), Operand(32));
1203 
1204   bind(&done);
1205   mov(dst.high_gp(), zero_reg);  // High word of result is always 0.
1206 }
1207 
emit_i64_ctz(LiftoffRegister dst,LiftoffRegister src)1208 void LiftoffAssembler::emit_i64_ctz(LiftoffRegister dst, LiftoffRegister src) {
1209   // return low == 0 ? 32 + CTZ32(high) : CTZ32(low);
1210   Label done;
1211   Label low_is_zero;
1212   Branch(&low_is_zero, eq, src.low_gp(), Operand(zero_reg));
1213 
1214   Ctz(dst.low_gp(), src.low_gp());
1215   jmp(&done);
1216 
1217   bind(&low_is_zero);
1218   Ctz(dst.low_gp(), src.high_gp());
1219   Addu(dst.low_gp(), dst.low_gp(), Operand(32));
1220 
1221   bind(&done);
1222   mov(dst.high_gp(), zero_reg);  // High word of result is always 0.
1223 }
1224 
emit_i64_popcnt(LiftoffRegister dst,LiftoffRegister src)1225 bool LiftoffAssembler::emit_i64_popcnt(LiftoffRegister dst,
1226                                        LiftoffRegister src) {
1227   // Produce partial popcnts in the two dst registers.
1228   Register src1 = src.high_gp() == dst.low_gp() ? src.high_gp() : src.low_gp();
1229   Register src2 = src.high_gp() == dst.low_gp() ? src.low_gp() : src.high_gp();
1230   TurboAssembler::Popcnt(dst.low_gp(), src1);
1231   TurboAssembler::Popcnt(dst.high_gp(), src2);
1232   // Now add the two into the lower dst reg and clear the higher dst reg.
1233   addu(dst.low_gp(), dst.low_gp(), dst.high_gp());
1234   mov(dst.high_gp(), zero_reg);
1235   return true;
1236 }
1237 
IncrementSmi(LiftoffRegister dst,int offset)1238 void LiftoffAssembler::IncrementSmi(LiftoffRegister dst, int offset) {
1239   UseScratchRegisterScope temps(this);
1240   Register scratch = temps.Acquire();
1241   lw(scratch, MemOperand(dst.gp(), offset));
1242   Addu(scratch, scratch, Operand(Smi::FromInt(1)));
1243   sw(scratch, MemOperand(dst.gp(), offset));
1244 }
1245 
emit_f32_neg(DoubleRegister dst,DoubleRegister src)1246 void LiftoffAssembler::emit_f32_neg(DoubleRegister dst, DoubleRegister src) {
1247   TurboAssembler::Neg_s(dst, src);
1248 }
1249 
emit_f64_neg(DoubleRegister dst,DoubleRegister src)1250 void LiftoffAssembler::emit_f64_neg(DoubleRegister dst, DoubleRegister src) {
1251   TurboAssembler::Neg_d(dst, src);
1252 }
1253 
emit_f32_min(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1254 void LiftoffAssembler::emit_f32_min(DoubleRegister dst, DoubleRegister lhs,
1255                                     DoubleRegister rhs) {
1256   Label ool, done;
1257   TurboAssembler::Float32Min(dst, lhs, rhs, &ool);
1258   Branch(&done);
1259 
1260   bind(&ool);
1261   TurboAssembler::Float32MinOutOfLine(dst, lhs, rhs);
1262   bind(&done);
1263 }
1264 
emit_f32_max(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1265 void LiftoffAssembler::emit_f32_max(DoubleRegister dst, DoubleRegister lhs,
1266                                     DoubleRegister rhs) {
1267   Label ool, done;
1268   TurboAssembler::Float32Max(dst, lhs, rhs, &ool);
1269   Branch(&done);
1270 
1271   bind(&ool);
1272   TurboAssembler::Float32MaxOutOfLine(dst, lhs, rhs);
1273   bind(&done);
1274 }
1275 
emit_f32_copysign(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1276 void LiftoffAssembler::emit_f32_copysign(DoubleRegister dst, DoubleRegister lhs,
1277                                          DoubleRegister rhs) {
1278   bailout(kComplexOperation, "f32_copysign");
1279 }
1280 
emit_f64_min(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1281 void LiftoffAssembler::emit_f64_min(DoubleRegister dst, DoubleRegister lhs,
1282                                     DoubleRegister rhs) {
1283   Label ool, done;
1284   TurboAssembler::Float64Min(dst, lhs, rhs, &ool);
1285   Branch(&done);
1286 
1287   bind(&ool);
1288   TurboAssembler::Float64MinOutOfLine(dst, lhs, rhs);
1289   bind(&done);
1290 }
1291 
emit_f64_max(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1292 void LiftoffAssembler::emit_f64_max(DoubleRegister dst, DoubleRegister lhs,
1293                                     DoubleRegister rhs) {
1294   Label ool, done;
1295   TurboAssembler::Float64Max(dst, lhs, rhs, &ool);
1296   Branch(&done);
1297 
1298   bind(&ool);
1299   TurboAssembler::Float64MaxOutOfLine(dst, lhs, rhs);
1300   bind(&done);
1301 }
1302 
emit_f64_copysign(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1303 void LiftoffAssembler::emit_f64_copysign(DoubleRegister dst, DoubleRegister lhs,
1304                                          DoubleRegister rhs) {
1305   bailout(kComplexOperation, "f64_copysign");
1306 }
1307 
1308 #define FP_BINOP(name, instruction)                                          \
1309   void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister lhs, \
1310                                      DoubleRegister rhs) {                   \
1311     instruction(dst, lhs, rhs);                                              \
1312   }
1313 #define FP_UNOP(name, instruction)                                             \
1314   void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister src) { \
1315     instruction(dst, src);                                                     \
1316   }
1317 #define FP_UNOP_RETURN_TRUE(name, instruction)                                 \
1318   bool LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister src) { \
1319     instruction(dst, src);                                                     \
1320     return true;                                                               \
1321   }
1322 
FP_BINOP(f32_add,add_s)1323 FP_BINOP(f32_add, add_s)
1324 FP_BINOP(f32_sub, sub_s)
1325 FP_BINOP(f32_mul, mul_s)
1326 FP_BINOP(f32_div, div_s)
1327 FP_UNOP(f32_abs, abs_s)
1328 FP_UNOP_RETURN_TRUE(f32_ceil, Ceil_s_s)
1329 FP_UNOP_RETURN_TRUE(f32_floor, Floor_s_s)
1330 FP_UNOP_RETURN_TRUE(f32_trunc, Trunc_s_s)
1331 FP_UNOP_RETURN_TRUE(f32_nearest_int, Round_s_s)
1332 FP_UNOP(f32_sqrt, sqrt_s)
1333 FP_BINOP(f64_add, add_d)
1334 FP_BINOP(f64_sub, sub_d)
1335 FP_BINOP(f64_mul, mul_d)
1336 FP_BINOP(f64_div, div_d)
1337 FP_UNOP(f64_abs, abs_d)
1338 FP_UNOP(f64_sqrt, sqrt_d)
1339 
1340 #undef FP_BINOP
1341 #undef FP_UNOP
1342 
1343 bool LiftoffAssembler::emit_f64_ceil(DoubleRegister dst, DoubleRegister src) {
1344   if ((IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) &&
1345       IsFp64Mode()) {
1346     Ceil_d_d(dst, src);
1347     return true;
1348   }
1349   return false;
1350 }
1351 
emit_f64_floor(DoubleRegister dst,DoubleRegister src)1352 bool LiftoffAssembler::emit_f64_floor(DoubleRegister dst, DoubleRegister src) {
1353   if ((IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) &&
1354       IsFp64Mode()) {
1355     Floor_d_d(dst, src);
1356     return true;
1357   }
1358   return false;
1359 }
1360 
emit_f64_trunc(DoubleRegister dst,DoubleRegister src)1361 bool LiftoffAssembler::emit_f64_trunc(DoubleRegister dst, DoubleRegister src) {
1362   if ((IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) &&
1363       IsFp64Mode()) {
1364     Trunc_d_d(dst, src);
1365     return true;
1366   }
1367   return false;
1368 }
1369 
emit_f64_nearest_int(DoubleRegister dst,DoubleRegister src)1370 bool LiftoffAssembler::emit_f64_nearest_int(DoubleRegister dst,
1371                                             DoubleRegister src) {
1372   if ((IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) &&
1373       IsFp64Mode()) {
1374     Round_d_d(dst, src);
1375     return true;
1376   }
1377   return false;
1378 }
1379 
emit_type_conversion(WasmOpcode opcode,LiftoffRegister dst,LiftoffRegister src,Label * trap)1380 bool LiftoffAssembler::emit_type_conversion(WasmOpcode opcode,
1381                                             LiftoffRegister dst,
1382                                             LiftoffRegister src, Label* trap) {
1383   switch (opcode) {
1384     case kExprI32ConvertI64:
1385       TurboAssembler::Move(dst.gp(), src.low_gp());
1386       return true;
1387     case kExprI32SConvertF32: {
1388       LiftoffRegister rounded = GetUnusedRegister(kFpReg, LiftoffRegList{src});
1389       LiftoffRegister converted_back =
1390           GetUnusedRegister(kFpReg, LiftoffRegList{src, rounded});
1391 
1392       // Real conversion.
1393       TurboAssembler::Trunc_s_s(rounded.fp(), src.fp());
1394       trunc_w_s(kScratchDoubleReg, rounded.fp());
1395       mfc1(dst.gp(), kScratchDoubleReg);
1396       // Avoid INT32_MAX as an overflow indicator and use INT32_MIN instead,
1397       // because INT32_MIN allows easier out-of-bounds detection.
1398       TurboAssembler::Addu(kScratchReg, dst.gp(), 1);
1399       TurboAssembler::Slt(kScratchReg2, kScratchReg, dst.gp());
1400       TurboAssembler::Movn(dst.gp(), kScratchReg, kScratchReg2);
1401 
1402       // Checking if trap.
1403       mtc1(dst.gp(), kScratchDoubleReg);
1404       cvt_s_w(converted_back.fp(), kScratchDoubleReg);
1405       TurboAssembler::CompareF32(EQ, rounded.fp(), converted_back.fp());
1406       TurboAssembler::BranchFalseF(trap);
1407       return true;
1408     }
1409     case kExprI32UConvertF32: {
1410       LiftoffRegister rounded = GetUnusedRegister(kFpReg, LiftoffRegList{src});
1411       LiftoffRegister converted_back =
1412           GetUnusedRegister(kFpReg, LiftoffRegList{src, rounded});
1413 
1414       // Real conversion.
1415       TurboAssembler::Trunc_s_s(rounded.fp(), src.fp());
1416       TurboAssembler::Trunc_uw_s(dst.gp(), rounded.fp(), kScratchDoubleReg);
1417       // Avoid UINT32_MAX as an overflow indicator and use 0 instead,
1418       // because 0 allows easier out-of-bounds detection.
1419       TurboAssembler::Addu(kScratchReg, dst.gp(), 1);
1420       TurboAssembler::Movz(dst.gp(), zero_reg, kScratchReg);
1421 
1422       // Checking if trap.
1423       TurboAssembler::Cvt_d_uw(converted_back.fp(), dst.gp(),
1424                                kScratchDoubleReg);
1425       cvt_s_d(converted_back.fp(), converted_back.fp());
1426       TurboAssembler::CompareF32(EQ, rounded.fp(), converted_back.fp());
1427       TurboAssembler::BranchFalseF(trap);
1428       return true;
1429     }
1430     case kExprI32SConvertF64: {
1431       LiftoffRegister scratch = GetUnusedRegister(kGpReg, LiftoffRegList{dst});
1432 
1433       // Try a conversion to a signed integer.
1434       trunc_w_d(kScratchDoubleReg, src.fp());
1435       mfc1(dst.gp(), kScratchDoubleReg);
1436       // Retrieve the FCSR.
1437       cfc1(scratch.gp(), FCSR);
1438       // Check for overflow and NaNs.
1439       And(scratch.gp(), scratch.gp(),
1440           kFCSROverflowCauseMask | kFCSRUnderflowCauseMask |
1441               kFCSRInvalidOpCauseMask);
1442       // If we had exceptions we are trap.
1443       Branch(trap, ne, scratch.gp(), Operand(zero_reg));
1444       return true;
1445     }
1446     case kExprI32UConvertF64: {
1447       if ((IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) &&
1448           IsFp64Mode()) {
1449         LiftoffRegister rounded =
1450             GetUnusedRegister(kFpReg, LiftoffRegList{src});
1451         LiftoffRegister converted_back =
1452             GetUnusedRegister(kFpReg, LiftoffRegList{src, rounded});
1453 
1454         // Real conversion.
1455         TurboAssembler::Trunc_d_d(rounded.fp(), src.fp());
1456         TurboAssembler::Trunc_uw_d(dst.gp(), rounded.fp(), kScratchDoubleReg);
1457 
1458         // Checking if trap.
1459         TurboAssembler::Cvt_d_uw(converted_back.fp(), dst.gp(),
1460                                  kScratchDoubleReg);
1461         TurboAssembler::CompareF64(EQ, rounded.fp(), converted_back.fp());
1462         TurboAssembler::BranchFalseF(trap);
1463         return true;
1464       }
1465       bailout(kUnsupportedArchitecture, "kExprI32UConvertF64");
1466       return true;
1467     }
1468     case kExprI32SConvertSatF32:
1469       bailout(kNonTrappingFloatToInt, "kExprI32SConvertSatF32");
1470       return true;
1471     case kExprI32UConvertSatF32:
1472       bailout(kNonTrappingFloatToInt, "kExprI32UConvertSatF32");
1473       return true;
1474     case kExprI32SConvertSatF64:
1475       bailout(kNonTrappingFloatToInt, "kExprI32SConvertSatF64");
1476       return true;
1477     case kExprI32UConvertSatF64:
1478       bailout(kNonTrappingFloatToInt, "kExprI32UConvertSatF64");
1479       return true;
1480     case kExprI64SConvertSatF32:
1481       bailout(kNonTrappingFloatToInt, "kExprI64SConvertSatF32");
1482       return true;
1483     case kExprI64UConvertSatF32:
1484       bailout(kNonTrappingFloatToInt, "kExprI64UConvertSatF32");
1485       return true;
1486     case kExprI64SConvertSatF64:
1487       bailout(kNonTrappingFloatToInt, "kExprI64SConvertSatF64");
1488       return true;
1489     case kExprI64UConvertSatF64:
1490       bailout(kNonTrappingFloatToInt, "kExprI64UConvertSatF64");
1491       return true;
1492     case kExprI32ReinterpretF32:
1493       mfc1(dst.gp(), src.fp());
1494       return true;
1495     case kExprI64SConvertI32:
1496       TurboAssembler::Move(dst.low_gp(), src.gp());
1497       TurboAssembler::Move(dst.high_gp(), src.gp());
1498       sra(dst.high_gp(), dst.high_gp(), 31);
1499       return true;
1500     case kExprI64UConvertI32:
1501       TurboAssembler::Move(dst.low_gp(), src.gp());
1502       TurboAssembler::Move(dst.high_gp(), zero_reg);
1503       return true;
1504     case kExprI64ReinterpretF64:
1505       mfc1(dst.low_gp(), src.fp());
1506       TurboAssembler::Mfhc1(dst.high_gp(), src.fp());
1507       return true;
1508     case kExprF32SConvertI32: {
1509       LiftoffRegister scratch = GetUnusedRegister(kFpReg, LiftoffRegList{dst});
1510       mtc1(src.gp(), scratch.fp());
1511       cvt_s_w(dst.fp(), scratch.fp());
1512       return true;
1513     }
1514     case kExprF32UConvertI32: {
1515       LiftoffRegister scratch = GetUnusedRegister(kFpReg, LiftoffRegList{dst});
1516       TurboAssembler::Cvt_d_uw(dst.fp(), src.gp(), scratch.fp());
1517       cvt_s_d(dst.fp(), dst.fp());
1518       return true;
1519     }
1520     case kExprF32ConvertF64:
1521       cvt_s_d(dst.fp(), src.fp());
1522       return true;
1523     case kExprF32ReinterpretI32:
1524       TurboAssembler::FmoveLow(dst.fp(), src.gp());
1525       return true;
1526     case kExprF64SConvertI32: {
1527       LiftoffRegister scratch = GetUnusedRegister(kFpReg, LiftoffRegList{dst});
1528       mtc1(src.gp(), scratch.fp());
1529       cvt_d_w(dst.fp(), scratch.fp());
1530       return true;
1531     }
1532     case kExprF64UConvertI32: {
1533       LiftoffRegister scratch = GetUnusedRegister(kFpReg, LiftoffRegList{dst});
1534       TurboAssembler::Cvt_d_uw(dst.fp(), src.gp(), scratch.fp());
1535       return true;
1536     }
1537     case kExprF64ConvertF32:
1538       cvt_d_s(dst.fp(), src.fp());
1539       return true;
1540     case kExprF64ReinterpretI64:
1541       mtc1(src.low_gp(), dst.fp());
1542       TurboAssembler::Mthc1(src.high_gp(), dst.fp());
1543       return true;
1544     default:
1545       return false;
1546   }
1547 }
1548 
emit_i32_signextend_i8(Register dst,Register src)1549 void LiftoffAssembler::emit_i32_signextend_i8(Register dst, Register src) {
1550   bailout(kComplexOperation, "i32_signextend_i8");
1551 }
1552 
emit_i32_signextend_i16(Register dst,Register src)1553 void LiftoffAssembler::emit_i32_signextend_i16(Register dst, Register src) {
1554   bailout(kComplexOperation, "i32_signextend_i16");
1555 }
1556 
emit_i64_signextend_i8(LiftoffRegister dst,LiftoffRegister src)1557 void LiftoffAssembler::emit_i64_signextend_i8(LiftoffRegister dst,
1558                                               LiftoffRegister src) {
1559   bailout(kComplexOperation, "i64_signextend_i8");
1560 }
1561 
emit_i64_signextend_i16(LiftoffRegister dst,LiftoffRegister src)1562 void LiftoffAssembler::emit_i64_signextend_i16(LiftoffRegister dst,
1563                                                LiftoffRegister src) {
1564   bailout(kComplexOperation, "i64_signextend_i16");
1565 }
1566 
emit_i64_signextend_i32(LiftoffRegister dst,LiftoffRegister src)1567 void LiftoffAssembler::emit_i64_signextend_i32(LiftoffRegister dst,
1568                                                LiftoffRegister src) {
1569   bailout(kComplexOperation, "i64_signextend_i32");
1570 }
1571 
emit_jump(Label * label)1572 void LiftoffAssembler::emit_jump(Label* label) {
1573   TurboAssembler::Branch(label);
1574 }
1575 
emit_jump(Register target)1576 void LiftoffAssembler::emit_jump(Register target) {
1577   TurboAssembler::Jump(target);
1578 }
1579 
emit_cond_jump(LiftoffCondition liftoff_cond,Label * label,ValueKind kind,Register lhs,Register rhs)1580 void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
1581                                       Label* label, ValueKind kind,
1582                                       Register lhs, Register rhs) {
1583   Condition cond = liftoff::ToCondition(liftoff_cond);
1584   if (rhs == no_reg) {
1585     DCHECK_EQ(kind, kI32);
1586     TurboAssembler::Branch(label, cond, lhs, Operand(zero_reg));
1587   } else {
1588     DCHECK(kind == kI32 || (is_reference(kind) && (liftoff_cond == kEqual ||
1589                                                    liftoff_cond == kUnequal)));
1590     TurboAssembler::Branch(label, cond, lhs, Operand(rhs));
1591   }
1592 }
1593 
emit_i32_cond_jumpi(LiftoffCondition liftoff_cond,Label * label,Register lhs,int32_t imm)1594 void LiftoffAssembler::emit_i32_cond_jumpi(LiftoffCondition liftoff_cond,
1595                                            Label* label, Register lhs,
1596                                            int32_t imm) {
1597   Condition cond = liftoff::ToCondition(liftoff_cond);
1598   TurboAssembler::Branch(label, cond, lhs, Operand(imm));
1599 }
1600 
emit_i32_subi_jump_negative(Register value,int subtrahend,Label * result_negative)1601 void LiftoffAssembler::emit_i32_subi_jump_negative(Register value,
1602                                                    int subtrahend,
1603                                                    Label* result_negative) {
1604   TurboAssembler::Subu(value, value, Operand(subtrahend));
1605   TurboAssembler::Branch(result_negative, less, value, Operand(zero_reg));
1606 }
1607 
emit_i32_eqz(Register dst,Register src)1608 void LiftoffAssembler::emit_i32_eqz(Register dst, Register src) {
1609   sltiu(dst, src, 1);
1610 }
1611 
emit_i32_set_cond(LiftoffCondition liftoff_cond,Register dst,Register lhs,Register rhs)1612 void LiftoffAssembler::emit_i32_set_cond(LiftoffCondition liftoff_cond,
1613                                          Register dst, Register lhs,
1614                                          Register rhs) {
1615   Condition cond = liftoff::ToCondition(liftoff_cond);
1616   Register tmp = dst;
1617   if (dst == lhs || dst == rhs) {
1618     tmp = GetUnusedRegister(kGpReg, LiftoffRegList{lhs, rhs}).gp();
1619   }
1620   // Write 1 as result.
1621   TurboAssembler::li(tmp, 1);
1622 
1623   // If negative condition is true, write 0 as result.
1624   Condition neg_cond = NegateCondition(cond);
1625   TurboAssembler::LoadZeroOnCondition(tmp, lhs, Operand(rhs), neg_cond);
1626 
1627   // If tmp != dst, result will be moved.
1628   TurboAssembler::Move(dst, tmp);
1629 }
1630 
emit_i64_eqz(Register dst,LiftoffRegister src)1631 void LiftoffAssembler::emit_i64_eqz(Register dst, LiftoffRegister src) {
1632   Register tmp = GetUnusedRegister(kGpReg, LiftoffRegList{src, dst}).gp();
1633   sltiu(tmp, src.low_gp(), 1);
1634   sltiu(dst, src.high_gp(), 1);
1635   and_(dst, dst, tmp);
1636 }
1637 
1638 namespace liftoff {
cond_make_unsigned(LiftoffCondition cond)1639 inline LiftoffCondition cond_make_unsigned(LiftoffCondition cond) {
1640   switch (cond) {
1641     case kSignedLessThan:
1642       return kUnsignedLessThan;
1643     case kSignedLessEqual:
1644       return kUnsignedLessEqual;
1645     case kSignedGreaterThan:
1646       return kUnsignedGreaterThan;
1647     case kSignedGreaterEqual:
1648       return kUnsignedGreaterEqual;
1649     default:
1650       return cond;
1651   }
1652 }
1653 }  // namespace liftoff
1654 
emit_i64_set_cond(LiftoffCondition liftoff_cond,Register dst,LiftoffRegister lhs,LiftoffRegister rhs)1655 void LiftoffAssembler::emit_i64_set_cond(LiftoffCondition liftoff_cond,
1656                                          Register dst, LiftoffRegister lhs,
1657                                          LiftoffRegister rhs) {
1658   Condition cond = liftoff::ToCondition(liftoff_cond);
1659   Label low, cont;
1660 
1661   // For signed i64 comparisons, we still need to use unsigned comparison for
1662   // the low word (the only bit carrying signedness information is the MSB in
1663   // the high word).
1664   Condition unsigned_cond =
1665       liftoff::ToCondition(liftoff::cond_make_unsigned(liftoff_cond));
1666 
1667   Register tmp = dst;
1668   if (liftoff::IsRegInRegPair(lhs, dst) || liftoff::IsRegInRegPair(rhs, dst)) {
1669     tmp = GetUnusedRegister(kGpReg, LiftoffRegList{dst, lhs, rhs}).gp();
1670   }
1671 
1672   // Write 1 initially in tmp register.
1673   TurboAssembler::li(tmp, 1);
1674 
1675   // If high words are equal, then compare low words, else compare high.
1676   Branch(&low, eq, lhs.high_gp(), Operand(rhs.high_gp()));
1677 
1678   TurboAssembler::LoadZeroOnCondition(
1679       tmp, lhs.high_gp(), Operand(rhs.high_gp()), NegateCondition(cond));
1680   Branch(&cont);
1681 
1682   bind(&low);
1683   TurboAssembler::LoadZeroOnCondition(tmp, lhs.low_gp(), Operand(rhs.low_gp()),
1684                                       NegateCondition(unsigned_cond));
1685 
1686   bind(&cont);
1687   // Move result to dst register if needed.
1688   TurboAssembler::Move(dst, tmp);
1689 }
1690 
1691 namespace liftoff {
1692 
ConditionToConditionCmpFPU(LiftoffCondition condition,bool * predicate)1693 inline FPUCondition ConditionToConditionCmpFPU(LiftoffCondition condition,
1694                                                bool* predicate) {
1695   switch (condition) {
1696     case kEqual:
1697       *predicate = true;
1698       return EQ;
1699     case kUnequal:
1700       *predicate = false;
1701       return EQ;
1702     case kUnsignedLessThan:
1703       *predicate = true;
1704       return OLT;
1705     case kUnsignedGreaterEqual:
1706       *predicate = false;
1707       return OLT;
1708     case kUnsignedLessEqual:
1709       *predicate = true;
1710       return OLE;
1711     case kUnsignedGreaterThan:
1712       *predicate = false;
1713       return OLE;
1714     default:
1715       *predicate = true;
1716       break;
1717   }
1718   UNREACHABLE();
1719 }
1720 
1721 }  // namespace liftoff
1722 
emit_f32_set_cond(LiftoffCondition liftoff_cond,Register dst,DoubleRegister lhs,DoubleRegister rhs)1723 void LiftoffAssembler::emit_f32_set_cond(LiftoffCondition liftoff_cond,
1724                                          Register dst, DoubleRegister lhs,
1725                                          DoubleRegister rhs) {
1726   Condition cond = liftoff::ToCondition(liftoff_cond);
1727   Label not_nan, cont;
1728   TurboAssembler::CompareIsNanF32(lhs, rhs);
1729   TurboAssembler::BranchFalseF(&not_nan);
1730   // If one of the operands is NaN, return 1 for f32.ne, else 0.
1731   if (cond == ne) {
1732     TurboAssembler::li(dst, 1);
1733   } else {
1734     TurboAssembler::Move(dst, zero_reg);
1735   }
1736   TurboAssembler::Branch(&cont);
1737 
1738   bind(&not_nan);
1739 
1740   TurboAssembler::li(dst, 1);
1741   bool predicate;
1742   FPUCondition fcond =
1743       liftoff::ConditionToConditionCmpFPU(liftoff_cond, &predicate);
1744   TurboAssembler::CompareF32(fcond, lhs, rhs);
1745   if (predicate) {
1746     TurboAssembler::LoadZeroIfNotFPUCondition(dst);
1747   } else {
1748     TurboAssembler::LoadZeroIfFPUCondition(dst);
1749   }
1750 
1751   bind(&cont);
1752 }
1753 
emit_f64_set_cond(LiftoffCondition liftoff_cond,Register dst,DoubleRegister lhs,DoubleRegister rhs)1754 void LiftoffAssembler::emit_f64_set_cond(LiftoffCondition liftoff_cond,
1755                                          Register dst, DoubleRegister lhs,
1756                                          DoubleRegister rhs) {
1757   Condition cond = liftoff::ToCondition(liftoff_cond);
1758   Label not_nan, cont;
1759   TurboAssembler::CompareIsNanF64(lhs, rhs);
1760   TurboAssembler::BranchFalseF(&not_nan);
1761   // If one of the operands is NaN, return 1 for f64.ne, else 0.
1762   if (cond == ne) {
1763     TurboAssembler::li(dst, 1);
1764   } else {
1765     TurboAssembler::Move(dst, zero_reg);
1766   }
1767   TurboAssembler::Branch(&cont);
1768 
1769   bind(&not_nan);
1770 
1771   TurboAssembler::li(dst, 1);
1772   bool predicate;
1773   FPUCondition fcond =
1774       liftoff::ConditionToConditionCmpFPU(liftoff_cond, &predicate);
1775   TurboAssembler::CompareF64(fcond, lhs, rhs);
1776   if (predicate) {
1777     TurboAssembler::LoadZeroIfNotFPUCondition(dst);
1778   } else {
1779     TurboAssembler::LoadZeroIfFPUCondition(dst);
1780   }
1781 
1782   bind(&cont);
1783 }
1784 
emit_select(LiftoffRegister dst,Register condition,LiftoffRegister true_value,LiftoffRegister false_value,ValueKind kind)1785 bool LiftoffAssembler::emit_select(LiftoffRegister dst, Register condition,
1786                                    LiftoffRegister true_value,
1787                                    LiftoffRegister false_value,
1788                                    ValueKind kind) {
1789   return false;
1790 }
1791 
emit_smi_check(Register obj,Label * target,SmiCheckMode mode)1792 void LiftoffAssembler::emit_smi_check(Register obj, Label* target,
1793                                       SmiCheckMode mode) {
1794   UseScratchRegisterScope temps(this);
1795   Register scratch = temps.Acquire();
1796   And(scratch, obj, Operand(kSmiTagMask));
1797   Condition condition = mode == kJumpOnSmi ? eq : ne;
1798   Branch(target, condition, scratch, Operand(zero_reg));
1799 }
1800 
LoadTransform(LiftoffRegister dst,Register src_addr,Register offset_reg,uintptr_t offset_imm,LoadType type,LoadTransformationKind transform,uint32_t * protected_load_pc)1801 void LiftoffAssembler::LoadTransform(LiftoffRegister dst, Register src_addr,
1802                                      Register offset_reg, uintptr_t offset_imm,
1803                                      LoadType type,
1804                                      LoadTransformationKind transform,
1805                                      uint32_t* protected_load_pc) {
1806   bailout(kSimd, "load extend and load splat unimplemented");
1807 }
1808 
StoreLane(Register dst,Register offset,uintptr_t offset_imm,LiftoffRegister src,StoreType type,uint8_t lane,uint32_t * protected_store_pc)1809 void LiftoffAssembler::StoreLane(Register dst, Register offset,
1810                                  uintptr_t offset_imm, LiftoffRegister src,
1811                                  StoreType type, uint8_t lane,
1812                                  uint32_t* protected_store_pc) {
1813   bailout(kSimd, "storelane");
1814 }
1815 
LoadLane(LiftoffRegister dst,LiftoffRegister src,Register addr,Register offset_reg,uintptr_t offset_imm,LoadType type,uint8_t laneidx,uint32_t * protected_load_pc)1816 void LiftoffAssembler::LoadLane(LiftoffRegister dst, LiftoffRegister src,
1817                                 Register addr, Register offset_reg,
1818                                 uintptr_t offset_imm, LoadType type,
1819                                 uint8_t laneidx, uint32_t* protected_load_pc) {
1820   bailout(kSimd, "loadlane");
1821 }
1822 
emit_i8x16_shuffle(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,const uint8_t shuffle[16],bool is_swizzle)1823 void LiftoffAssembler::emit_i8x16_shuffle(LiftoffRegister dst,
1824                                           LiftoffRegister lhs,
1825                                           LiftoffRegister rhs,
1826                                           const uint8_t shuffle[16],
1827                                           bool is_swizzle) {
1828   bailout(kSimd, "emit_i8x16_shuffle");
1829 }
1830 
emit_i8x16_swizzle(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1831 void LiftoffAssembler::emit_i8x16_swizzle(LiftoffRegister dst,
1832                                           LiftoffRegister lhs,
1833                                           LiftoffRegister rhs) {
1834   bailout(kSimd, "emit_i8x16_swizzle");
1835 }
1836 
emit_i8x16_splat(LiftoffRegister dst,LiftoffRegister src)1837 void LiftoffAssembler::emit_i8x16_splat(LiftoffRegister dst,
1838                                         LiftoffRegister src) {
1839   bailout(kSimd, "emit_i8x16_splat");
1840 }
1841 
emit_i16x8_splat(LiftoffRegister dst,LiftoffRegister src)1842 void LiftoffAssembler::emit_i16x8_splat(LiftoffRegister dst,
1843                                         LiftoffRegister src) {
1844   bailout(kSimd, "emit_i16x8_splat");
1845 }
1846 
emit_i32x4_splat(LiftoffRegister dst,LiftoffRegister src)1847 void LiftoffAssembler::emit_i32x4_splat(LiftoffRegister dst,
1848                                         LiftoffRegister src) {
1849   bailout(kSimd, "emit_i32x4_splat");
1850 }
1851 
emit_i64x2_splat(LiftoffRegister dst,LiftoffRegister src)1852 void LiftoffAssembler::emit_i64x2_splat(LiftoffRegister dst,
1853                                         LiftoffRegister src) {
1854   bailout(kSimd, "emit_i64x2_splat");
1855 }
1856 
emit_f32x4_splat(LiftoffRegister dst,LiftoffRegister src)1857 void LiftoffAssembler::emit_f32x4_splat(LiftoffRegister dst,
1858                                         LiftoffRegister src) {
1859   bailout(kSimd, "emit_f32x4_splat");
1860 }
1861 
emit_f64x2_splat(LiftoffRegister dst,LiftoffRegister src)1862 void LiftoffAssembler::emit_f64x2_splat(LiftoffRegister dst,
1863                                         LiftoffRegister src) {
1864   bailout(kSimd, "emit_f64x2_splat");
1865 }
1866 
1867 #define SIMD_BINOP(name, ilv_instr, dotp_instr)                          \
1868   void LiftoffAssembler::emit_##name(                                    \
1869       LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2) { \
1870     MSARegister dst_msa = MSARegister::from_code(dst.liftoff_code());    \
1871     MSARegister src1_msa = MSARegister::from_code(src1.liftoff_code());  \
1872     MSARegister src2_msa = MSARegister::from_code(src2.liftoff_code());  \
1873     xor_v(kSimd128RegZero, kSimd128RegZero, kSimd128RegZero);            \
1874     ilv_instr(kSimd128ScratchReg, kSimd128RegZero, src1_msa);            \
1875     ilv_instr(kSimd128RegZero, kSimd128RegZero, src2_msa);               \
1876     dotp_instr(dst_msa, kSimd128ScratchReg, kSimd128RegZero);            \
1877   }
1878 
SIMD_BINOP(i16x8_extmul_low_i8x16_s,ilvr_b,dotp_s_h)1879 SIMD_BINOP(i16x8_extmul_low_i8x16_s, ilvr_b, dotp_s_h)
1880 SIMD_BINOP(i16x8_extmul_high_i8x16_s, ilvl_b, dotp_s_h)
1881 SIMD_BINOP(i16x8_extmul_low_i8x16_u, ilvr_b, dotp_u_h)
1882 SIMD_BINOP(i16x8_extmul_high_i8x16_u, ilvl_b, dotp_u_h)
1883 
1884 SIMD_BINOP(i32x4_extmul_low_i16x8_s, ilvr_h, dotp_s_w)
1885 SIMD_BINOP(i32x4_extmul_high_i16x8_s, ilvl_h, dotp_s_w)
1886 SIMD_BINOP(i32x4_extmul_low_i16x8_u, ilvr_h, dotp_u_w)
1887 SIMD_BINOP(i32x4_extmul_high_i16x8_u, ilvl_h, dotp_u_w)
1888 
1889 SIMD_BINOP(i64x2_extmul_low_i32x4_s, ilvr_w, dotp_s_d)
1890 SIMD_BINOP(i64x2_extmul_high_i32x4_s, ilvl_w, dotp_s_d)
1891 SIMD_BINOP(i64x2_extmul_low_i32x4_u, ilvr_w, dotp_u_d)
1892 SIMD_BINOP(i64x2_extmul_high_i32x4_u, ilvl_w, dotp_u_d)
1893 
1894 #undef SIMD_BINOP
1895 
1896 void LiftoffAssembler::emit_i16x8_q15mulr_sat_s(LiftoffRegister dst,
1897                                                 LiftoffRegister src1,
1898                                                 LiftoffRegister src2) {
1899   bailout(kSimd, "i16x8_q15mulr_sat_s");
1900 }
1901 
emit_i8x16_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1902 void LiftoffAssembler::emit_i8x16_eq(LiftoffRegister dst, LiftoffRegister lhs,
1903                                      LiftoffRegister rhs) {
1904   bailout(kSimd, "emit_i8x16_eq");
1905 }
1906 
emit_i8x16_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1907 void LiftoffAssembler::emit_i8x16_ne(LiftoffRegister dst, LiftoffRegister lhs,
1908                                      LiftoffRegister rhs) {
1909   bailout(kSimd, "emit_i8x16_ne");
1910 }
1911 
emit_i8x16_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1912 void LiftoffAssembler::emit_i8x16_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
1913                                        LiftoffRegister rhs) {
1914   bailout(kSimd, "emit_i8x16_gt_s");
1915 }
1916 
emit_i8x16_gt_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1917 void LiftoffAssembler::emit_i8x16_gt_u(LiftoffRegister dst, LiftoffRegister lhs,
1918                                        LiftoffRegister rhs) {
1919   bailout(kSimd, "emit_i8x16_gt_u");
1920 }
1921 
emit_i8x16_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1922 void LiftoffAssembler::emit_i8x16_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
1923                                        LiftoffRegister rhs) {
1924   bailout(kSimd, "emit_i8x16_ge_s");
1925 }
1926 
emit_i8x16_ge_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1927 void LiftoffAssembler::emit_i8x16_ge_u(LiftoffRegister dst, LiftoffRegister lhs,
1928                                        LiftoffRegister rhs) {
1929   bailout(kSimd, "emit_i8x16_ge_u");
1930 }
1931 
emit_i16x8_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1932 void LiftoffAssembler::emit_i16x8_eq(LiftoffRegister dst, LiftoffRegister lhs,
1933                                      LiftoffRegister rhs) {
1934   bailout(kSimd, "emit_i16x8_eq");
1935 }
1936 
emit_i16x8_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1937 void LiftoffAssembler::emit_i16x8_ne(LiftoffRegister dst, LiftoffRegister lhs,
1938                                      LiftoffRegister rhs) {
1939   bailout(kSimd, "emit_i16x8_ne");
1940 }
1941 
emit_i16x8_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1942 void LiftoffAssembler::emit_i16x8_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
1943                                        LiftoffRegister rhs) {
1944   bailout(kSimd, "emit_i16x8_gt_s");
1945 }
1946 
emit_i16x8_gt_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1947 void LiftoffAssembler::emit_i16x8_gt_u(LiftoffRegister dst, LiftoffRegister lhs,
1948                                        LiftoffRegister rhs) {
1949   bailout(kSimd, "emit_i16x8_gt_u");
1950 }
1951 
emit_i16x8_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1952 void LiftoffAssembler::emit_i16x8_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
1953                                        LiftoffRegister rhs) {
1954   bailout(kSimd, "emit_i16x8_ge_s");
1955 }
1956 
emit_i16x8_ge_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1957 void LiftoffAssembler::emit_i16x8_ge_u(LiftoffRegister dst, LiftoffRegister lhs,
1958                                        LiftoffRegister rhs) {
1959   bailout(kSimd, "emit_i16x8_ge_u");
1960 }
1961 
emit_i32x4_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1962 void LiftoffAssembler::emit_i32x4_eq(LiftoffRegister dst, LiftoffRegister lhs,
1963                                      LiftoffRegister rhs) {
1964   bailout(kSimd, "emit_i32x4_eq");
1965 }
1966 
emit_i32x4_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1967 void LiftoffAssembler::emit_i32x4_ne(LiftoffRegister dst, LiftoffRegister lhs,
1968                                      LiftoffRegister rhs) {
1969   bailout(kSimd, "emit_i32x4_ne");
1970 }
1971 
emit_i32x4_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1972 void LiftoffAssembler::emit_i32x4_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
1973                                        LiftoffRegister rhs) {
1974   bailout(kSimd, "emit_i32x4_gt_s");
1975 }
1976 
emit_i32x4_gt_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1977 void LiftoffAssembler::emit_i32x4_gt_u(LiftoffRegister dst, LiftoffRegister lhs,
1978                                        LiftoffRegister rhs) {
1979   bailout(kSimd, "emit_i32x4_gt_u");
1980 }
1981 
emit_i32x4_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1982 void LiftoffAssembler::emit_i32x4_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
1983                                        LiftoffRegister rhs) {
1984   bailout(kSimd, "emit_i32x4_ge_s");
1985 }
1986 
emit_i32x4_ge_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1987 void LiftoffAssembler::emit_i32x4_ge_u(LiftoffRegister dst, LiftoffRegister lhs,
1988                                        LiftoffRegister rhs) {
1989   bailout(kSimd, "emit_i32x4_ge_u");
1990 }
1991 
emit_f32x4_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1992 void LiftoffAssembler::emit_f32x4_eq(LiftoffRegister dst, LiftoffRegister lhs,
1993                                      LiftoffRegister rhs) {
1994   bailout(kSimd, "emit_f32x4_eq");
1995 }
1996 
emit_f32x4_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1997 void LiftoffAssembler::emit_f32x4_ne(LiftoffRegister dst, LiftoffRegister lhs,
1998                                      LiftoffRegister rhs) {
1999   bailout(kSimd, "emit_f32x4_ne");
2000 }
2001 
emit_f32x4_lt(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2002 void LiftoffAssembler::emit_f32x4_lt(LiftoffRegister dst, LiftoffRegister lhs,
2003                                      LiftoffRegister rhs) {
2004   bailout(kSimd, "emit_f32x4_lt");
2005 }
2006 
emit_f32x4_le(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2007 void LiftoffAssembler::emit_f32x4_le(LiftoffRegister dst, LiftoffRegister lhs,
2008                                      LiftoffRegister rhs) {
2009   bailout(kSimd, "emit_f32x4_le");
2010 }
2011 
emit_i64x2_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2012 void LiftoffAssembler::emit_i64x2_eq(LiftoffRegister dst, LiftoffRegister lhs,
2013                                      LiftoffRegister rhs) {
2014   bailout(kSimd, "emit_i64x2_eq");
2015 }
2016 
emit_i64x2_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2017 void LiftoffAssembler::emit_i64x2_ne(LiftoffRegister dst, LiftoffRegister lhs,
2018                                      LiftoffRegister rhs) {
2019   bailout(kSimd, "emit_i64x2_ne");
2020 }
2021 
emit_i64x2_abs(LiftoffRegister dst,LiftoffRegister src)2022 void LiftoffAssembler::emit_i64x2_abs(LiftoffRegister dst,
2023                                       LiftoffRegister src) {
2024   bailout(kSimd, "emit_i64x2_abs");
2025 }
2026 
emit_f64x2_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2027 void LiftoffAssembler::emit_f64x2_eq(LiftoffRegister dst, LiftoffRegister lhs,
2028                                      LiftoffRegister rhs) {
2029   bailout(kSimd, "emit_f64x2_eq");
2030 }
2031 
emit_f64x2_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2032 void LiftoffAssembler::emit_f64x2_ne(LiftoffRegister dst, LiftoffRegister lhs,
2033                                      LiftoffRegister rhs) {
2034   bailout(kSimd, "emit_f64x2_ne");
2035 }
2036 
emit_f64x2_lt(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2037 void LiftoffAssembler::emit_f64x2_lt(LiftoffRegister dst, LiftoffRegister lhs,
2038                                      LiftoffRegister rhs) {
2039   bailout(kSimd, "emit_f64x2_lt");
2040 }
2041 
emit_f64x2_le(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2042 void LiftoffAssembler::emit_f64x2_le(LiftoffRegister dst, LiftoffRegister lhs,
2043                                      LiftoffRegister rhs) {
2044   bailout(kSimd, "emit_f64x2_le");
2045 }
2046 
emit_s128_const(LiftoffRegister dst,const uint8_t imms[16])2047 void LiftoffAssembler::emit_s128_const(LiftoffRegister dst,
2048                                        const uint8_t imms[16]) {
2049   bailout(kSimd, "emit_s128_const");
2050 }
2051 
emit_s128_not(LiftoffRegister dst,LiftoffRegister src)2052 void LiftoffAssembler::emit_s128_not(LiftoffRegister dst, LiftoffRegister src) {
2053   bailout(kSimd, "emit_s128_not");
2054 }
2055 
emit_s128_and(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2056 void LiftoffAssembler::emit_s128_and(LiftoffRegister dst, LiftoffRegister lhs,
2057                                      LiftoffRegister rhs) {
2058   bailout(kSimd, "emit_s128_and");
2059 }
2060 
emit_s128_or(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2061 void LiftoffAssembler::emit_s128_or(LiftoffRegister dst, LiftoffRegister lhs,
2062                                     LiftoffRegister rhs) {
2063   bailout(kSimd, "emit_s128_or");
2064 }
2065 
emit_s128_xor(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2066 void LiftoffAssembler::emit_s128_xor(LiftoffRegister dst, LiftoffRegister lhs,
2067                                      LiftoffRegister rhs) {
2068   bailout(kSimd, "emit_s128_xor");
2069 }
2070 
emit_s128_and_not(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2071 void LiftoffAssembler::emit_s128_and_not(LiftoffRegister dst,
2072                                          LiftoffRegister lhs,
2073                                          LiftoffRegister rhs) {
2074   bailout(kSimd, "emit_s128_and_not");
2075 }
2076 
emit_s128_select(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,LiftoffRegister mask)2077 void LiftoffAssembler::emit_s128_select(LiftoffRegister dst,
2078                                         LiftoffRegister src1,
2079                                         LiftoffRegister src2,
2080                                         LiftoffRegister mask) {
2081   bailout(kSimd, "emit_s128_select");
2082 }
2083 
emit_i8x16_neg(LiftoffRegister dst,LiftoffRegister src)2084 void LiftoffAssembler::emit_i8x16_neg(LiftoffRegister dst,
2085                                       LiftoffRegister src) {
2086   bailout(kSimd, "emit_i8x16_neg");
2087 }
2088 
emit_v128_anytrue(LiftoffRegister dst,LiftoffRegister src)2089 void LiftoffAssembler::emit_v128_anytrue(LiftoffRegister dst,
2090                                          LiftoffRegister src) {
2091   bailout(kSimd, "emit_v128_anytrue");
2092 }
2093 
emit_i8x16_alltrue(LiftoffRegister dst,LiftoffRegister src)2094 void LiftoffAssembler::emit_i8x16_alltrue(LiftoffRegister dst,
2095                                           LiftoffRegister src) {
2096   bailout(kSimd, "emit_i8x16_alltrue");
2097 }
2098 
emit_i8x16_bitmask(LiftoffRegister dst,LiftoffRegister src)2099 void LiftoffAssembler::emit_i8x16_bitmask(LiftoffRegister dst,
2100                                           LiftoffRegister src) {
2101   bailout(kSimd, "emit_i8x16_bitmask");
2102 }
2103 
emit_i8x16_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2104 void LiftoffAssembler::emit_i8x16_shl(LiftoffRegister dst, LiftoffRegister lhs,
2105                                       LiftoffRegister rhs) {
2106   bailout(kSimd, "emit_i8x16_shl");
2107 }
2108 
emit_i8x16_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2109 void LiftoffAssembler::emit_i8x16_shli(LiftoffRegister dst, LiftoffRegister lhs,
2110                                        int32_t rhs) {
2111   bailout(kSimd, "emit_i8x16_shli");
2112 }
2113 
emit_i8x16_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2114 void LiftoffAssembler::emit_i8x16_shr_s(LiftoffRegister dst,
2115                                         LiftoffRegister lhs,
2116                                         LiftoffRegister rhs) {
2117   bailout(kSimd, "emit_i8x16_shr_s");
2118 }
2119 
emit_i8x16_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2120 void LiftoffAssembler::emit_i8x16_shri_s(LiftoffRegister dst,
2121                                          LiftoffRegister lhs, int32_t rhs) {
2122   bailout(kSimd, "emit_i8x16_shri_s");
2123 }
2124 
emit_i8x16_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2125 void LiftoffAssembler::emit_i8x16_shr_u(LiftoffRegister dst,
2126                                         LiftoffRegister lhs,
2127                                         LiftoffRegister rhs) {
2128   bailout(kSimd, "emit_i8x16_shr_u");
2129 }
2130 
emit_i8x16_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2131 void LiftoffAssembler::emit_i8x16_shri_u(LiftoffRegister dst,
2132                                          LiftoffRegister lhs, int32_t rhs) {
2133   bailout(kSimd, "emit_i8x16_shri_u");
2134 }
2135 
emit_i8x16_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2136 void LiftoffAssembler::emit_i8x16_add(LiftoffRegister dst, LiftoffRegister lhs,
2137                                       LiftoffRegister rhs) {
2138   bailout(kSimd, "emit_i8x16_add");
2139 }
2140 
emit_i8x16_add_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2141 void LiftoffAssembler::emit_i8x16_add_sat_s(LiftoffRegister dst,
2142                                             LiftoffRegister lhs,
2143                                             LiftoffRegister rhs) {
2144   bailout(kSimd, "emit_i8x16_add_sat_s");
2145 }
2146 
emit_i8x16_add_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2147 void LiftoffAssembler::emit_i8x16_add_sat_u(LiftoffRegister dst,
2148                                             LiftoffRegister lhs,
2149                                             LiftoffRegister rhs) {
2150   bailout(kSimd, "emit_i8x16_add_sat_u");
2151 }
2152 
emit_i8x16_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2153 void LiftoffAssembler::emit_i8x16_sub(LiftoffRegister dst, LiftoffRegister lhs,
2154                                       LiftoffRegister rhs) {
2155   bailout(kSimd, "emit_i8x16_sub");
2156 }
2157 
emit_i8x16_sub_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2158 void LiftoffAssembler::emit_i8x16_sub_sat_s(LiftoffRegister dst,
2159                                             LiftoffRegister lhs,
2160                                             LiftoffRegister rhs) {
2161   bailout(kSimd, "emit_i8x16_sub_sat_s");
2162 }
2163 
emit_i8x16_sub_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2164 void LiftoffAssembler::emit_i8x16_sub_sat_u(LiftoffRegister dst,
2165                                             LiftoffRegister lhs,
2166                                             LiftoffRegister rhs) {
2167   bailout(kSimd, "emit_i8x16_sub_sat_u");
2168 }
2169 
emit_i8x16_min_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2170 void LiftoffAssembler::emit_i8x16_min_s(LiftoffRegister dst,
2171                                         LiftoffRegister lhs,
2172                                         LiftoffRegister rhs) {
2173   bailout(kSimd, "emit_i8x16_min_s");
2174 }
2175 
emit_i8x16_min_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2176 void LiftoffAssembler::emit_i8x16_min_u(LiftoffRegister dst,
2177                                         LiftoffRegister lhs,
2178                                         LiftoffRegister rhs) {
2179   bailout(kSimd, "emit_i8x16_min_u");
2180 }
2181 
emit_i8x16_max_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2182 void LiftoffAssembler::emit_i8x16_max_s(LiftoffRegister dst,
2183                                         LiftoffRegister lhs,
2184                                         LiftoffRegister rhs) {
2185   bailout(kSimd, "emit_i8x16_max_s");
2186 }
2187 
emit_i8x16_max_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2188 void LiftoffAssembler::emit_i8x16_max_u(LiftoffRegister dst,
2189                                         LiftoffRegister lhs,
2190                                         LiftoffRegister rhs) {
2191   bailout(kSimd, "emit_i8x16_max_u");
2192 }
2193 
emit_i8x16_popcnt(LiftoffRegister dst,LiftoffRegister src)2194 void LiftoffAssembler::emit_i8x16_popcnt(LiftoffRegister dst,
2195                                          LiftoffRegister src) {
2196   bailout(kSimd, "emit_i8x16_popcnt");
2197 }
2198 
emit_i16x8_neg(LiftoffRegister dst,LiftoffRegister src)2199 void LiftoffAssembler::emit_i16x8_neg(LiftoffRegister dst,
2200                                       LiftoffRegister src) {
2201   bailout(kSimd, "emit_i16x8_neg");
2202 }
2203 
emit_i16x8_alltrue(LiftoffRegister dst,LiftoffRegister src)2204 void LiftoffAssembler::emit_i16x8_alltrue(LiftoffRegister dst,
2205                                           LiftoffRegister src) {
2206   bailout(kSimd, "emit_i16x8_alltrue");
2207 }
2208 
emit_i16x8_bitmask(LiftoffRegister dst,LiftoffRegister src)2209 void LiftoffAssembler::emit_i16x8_bitmask(LiftoffRegister dst,
2210                                           LiftoffRegister src) {
2211   bailout(kSimd, "emit_i16x8_bitmask");
2212 }
2213 
emit_i16x8_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2214 void LiftoffAssembler::emit_i16x8_shl(LiftoffRegister dst, LiftoffRegister lhs,
2215                                       LiftoffRegister rhs) {
2216   bailout(kSimd, "emit_i16x8_shl");
2217 }
2218 
emit_i16x8_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2219 void LiftoffAssembler::emit_i16x8_shli(LiftoffRegister dst, LiftoffRegister lhs,
2220                                        int32_t rhs) {
2221   bailout(kSimd, "emit_i16x8_shli");
2222 }
2223 
emit_i16x8_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2224 void LiftoffAssembler::emit_i16x8_shr_s(LiftoffRegister dst,
2225                                         LiftoffRegister lhs,
2226                                         LiftoffRegister rhs) {
2227   bailout(kSimd, "emit_i16x8_shr_s");
2228 }
2229 
emit_i16x8_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2230 void LiftoffAssembler::emit_i16x8_shri_s(LiftoffRegister dst,
2231                                          LiftoffRegister lhs, int32_t rhs) {
2232   bailout(kSimd, "emit_i16x8_shri_s");
2233 }
2234 
emit_i16x8_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2235 void LiftoffAssembler::emit_i16x8_shr_u(LiftoffRegister dst,
2236                                         LiftoffRegister lhs,
2237                                         LiftoffRegister rhs) {
2238   bailout(kSimd, "emit_i16x8_shr_u");
2239 }
2240 
emit_i16x8_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2241 void LiftoffAssembler::emit_i16x8_shri_u(LiftoffRegister dst,
2242                                          LiftoffRegister lhs, int32_t rhs) {
2243   bailout(kSimd, "emit_i16x8_shri_u");
2244 }
2245 
emit_i16x8_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2246 void LiftoffAssembler::emit_i16x8_add(LiftoffRegister dst, LiftoffRegister lhs,
2247                                       LiftoffRegister rhs) {
2248   bailout(kSimd, "emit_i16x8_add");
2249 }
2250 
emit_i16x8_add_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2251 void LiftoffAssembler::emit_i16x8_add_sat_s(LiftoffRegister dst,
2252                                             LiftoffRegister lhs,
2253                                             LiftoffRegister rhs) {
2254   bailout(kSimd, "emit_i16x8_add_sat_s");
2255 }
2256 
emit_i16x8_add_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2257 void LiftoffAssembler::emit_i16x8_add_sat_u(LiftoffRegister dst,
2258                                             LiftoffRegister lhs,
2259                                             LiftoffRegister rhs) {
2260   bailout(kSimd, "emit_i16x8_add_sat_u");
2261 }
2262 
emit_i16x8_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2263 void LiftoffAssembler::emit_i16x8_sub(LiftoffRegister dst, LiftoffRegister lhs,
2264                                       LiftoffRegister rhs) {
2265   bailout(kSimd, "emit_i16x8_sub");
2266 }
2267 
emit_i16x8_sub_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2268 void LiftoffAssembler::emit_i16x8_sub_sat_s(LiftoffRegister dst,
2269                                             LiftoffRegister lhs,
2270                                             LiftoffRegister rhs) {
2271   bailout(kSimd, "emit_i16x8_sub_sat_s");
2272 }
2273 
emit_i16x8_sub_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2274 void LiftoffAssembler::emit_i16x8_sub_sat_u(LiftoffRegister dst,
2275                                             LiftoffRegister lhs,
2276                                             LiftoffRegister rhs) {
2277   bailout(kSimd, "emit_i16x8_sub_sat_u");
2278 }
2279 
emit_i16x8_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2280 void LiftoffAssembler::emit_i16x8_mul(LiftoffRegister dst, LiftoffRegister lhs,
2281                                       LiftoffRegister rhs) {
2282   bailout(kSimd, "emit_i16x8_mul");
2283 }
2284 
emit_i16x8_min_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2285 void LiftoffAssembler::emit_i16x8_min_s(LiftoffRegister dst,
2286                                         LiftoffRegister lhs,
2287                                         LiftoffRegister rhs) {
2288   bailout(kSimd, "emit_i16x8_min_s");
2289 }
2290 
emit_i16x8_min_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2291 void LiftoffAssembler::emit_i16x8_min_u(LiftoffRegister dst,
2292                                         LiftoffRegister lhs,
2293                                         LiftoffRegister rhs) {
2294   bailout(kSimd, "emit_i16x8_min_u");
2295 }
2296 
emit_i16x8_max_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2297 void LiftoffAssembler::emit_i16x8_max_s(LiftoffRegister dst,
2298                                         LiftoffRegister lhs,
2299                                         LiftoffRegister rhs) {
2300   bailout(kSimd, "emit_i16x8_max_s");
2301 }
2302 
emit_i16x8_max_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2303 void LiftoffAssembler::emit_i16x8_max_u(LiftoffRegister dst,
2304                                         LiftoffRegister lhs,
2305                                         LiftoffRegister rhs) {
2306   bailout(kSimd, "emit_i16x8_max_u");
2307 }
2308 
emit_i16x8_extadd_pairwise_i8x16_s(LiftoffRegister dst,LiftoffRegister src)2309 void LiftoffAssembler::emit_i16x8_extadd_pairwise_i8x16_s(LiftoffRegister dst,
2310                                                           LiftoffRegister src) {
2311   bailout(kSimd, "emit_i16x8_extadd_pairwise_i8x16_s");
2312 }
2313 
emit_i16x8_extadd_pairwise_i8x16_u(LiftoffRegister dst,LiftoffRegister src)2314 void LiftoffAssembler::emit_i16x8_extadd_pairwise_i8x16_u(LiftoffRegister dst,
2315                                                           LiftoffRegister src) {
2316   bailout(kSimd, "emit_i16x8_extadd_pairwise_i8x16_u");
2317 }
2318 
emit_i32x4_neg(LiftoffRegister dst,LiftoffRegister src)2319 void LiftoffAssembler::emit_i32x4_neg(LiftoffRegister dst,
2320                                       LiftoffRegister src) {
2321   bailout(kSimd, "emit_i32x4_neg");
2322 }
2323 
emit_i32x4_alltrue(LiftoffRegister dst,LiftoffRegister src)2324 void LiftoffAssembler::emit_i32x4_alltrue(LiftoffRegister dst,
2325                                           LiftoffRegister src) {
2326   bailout(kSimd, "emit_i32x4_alltrue");
2327 }
2328 
emit_i32x4_bitmask(LiftoffRegister dst,LiftoffRegister src)2329 void LiftoffAssembler::emit_i32x4_bitmask(LiftoffRegister dst,
2330                                           LiftoffRegister src) {
2331   bailout(kSimd, "emit_i32x4_bitmask");
2332 }
2333 
emit_i32x4_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2334 void LiftoffAssembler::emit_i32x4_shl(LiftoffRegister dst, LiftoffRegister lhs,
2335                                       LiftoffRegister rhs) {
2336   bailout(kSimd, "emit_i32x4_shl");
2337 }
2338 
emit_i32x4_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2339 void LiftoffAssembler::emit_i32x4_shli(LiftoffRegister dst, LiftoffRegister lhs,
2340                                        int32_t rhs) {
2341   bailout(kSimd, "emit_i32x4_shli");
2342 }
2343 
emit_i32x4_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2344 void LiftoffAssembler::emit_i32x4_shr_s(LiftoffRegister dst,
2345                                         LiftoffRegister lhs,
2346                                         LiftoffRegister rhs) {
2347   bailout(kSimd, "emit_i32x4_shr_s");
2348 }
2349 
emit_i32x4_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2350 void LiftoffAssembler::emit_i32x4_shri_s(LiftoffRegister dst,
2351                                          LiftoffRegister lhs, int32_t rhs) {
2352   bailout(kSimd, "emit_i32x4_shri_s");
2353 }
2354 
emit_i32x4_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2355 void LiftoffAssembler::emit_i32x4_shr_u(LiftoffRegister dst,
2356                                         LiftoffRegister lhs,
2357                                         LiftoffRegister rhs) {
2358   bailout(kSimd, "emit_i32x4_shr_u");
2359 }
2360 
emit_i32x4_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2361 void LiftoffAssembler::emit_i32x4_shri_u(LiftoffRegister dst,
2362                                          LiftoffRegister lhs, int32_t rhs) {
2363   bailout(kSimd, "emit_i32x4_shri_u");
2364 }
2365 
emit_i32x4_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2366 void LiftoffAssembler::emit_i32x4_add(LiftoffRegister dst, LiftoffRegister lhs,
2367                                       LiftoffRegister rhs) {
2368   bailout(kSimd, "emit_i32x4_add");
2369 }
2370 
emit_i32x4_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2371 void LiftoffAssembler::emit_i32x4_sub(LiftoffRegister dst, LiftoffRegister lhs,
2372                                       LiftoffRegister rhs) {
2373   bailout(kSimd, "emit_i32x4_sub");
2374 }
2375 
emit_i32x4_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2376 void LiftoffAssembler::emit_i32x4_mul(LiftoffRegister dst, LiftoffRegister lhs,
2377                                       LiftoffRegister rhs) {
2378   bailout(kSimd, "emit_i32x4_mul");
2379 }
2380 
emit_i32x4_min_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2381 void LiftoffAssembler::emit_i32x4_min_s(LiftoffRegister dst,
2382                                         LiftoffRegister lhs,
2383                                         LiftoffRegister rhs) {
2384   bailout(kSimd, "emit_i32x4_min_s");
2385 }
2386 
emit_i32x4_min_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2387 void LiftoffAssembler::emit_i32x4_min_u(LiftoffRegister dst,
2388                                         LiftoffRegister lhs,
2389                                         LiftoffRegister rhs) {
2390   bailout(kSimd, "emit_i32x4_min_u");
2391 }
2392 
emit_i32x4_max_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2393 void LiftoffAssembler::emit_i32x4_max_s(LiftoffRegister dst,
2394                                         LiftoffRegister lhs,
2395                                         LiftoffRegister rhs) {
2396   bailout(kSimd, "emit_i32x4_max_s");
2397 }
2398 
emit_i32x4_max_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2399 void LiftoffAssembler::emit_i32x4_max_u(LiftoffRegister dst,
2400                                         LiftoffRegister lhs,
2401                                         LiftoffRegister rhs) {
2402   bailout(kSimd, "emit_i32x4_max_u");
2403 }
2404 
emit_i32x4_dot_i16x8_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2405 void LiftoffAssembler::emit_i32x4_dot_i16x8_s(LiftoffRegister dst,
2406                                               LiftoffRegister lhs,
2407                                               LiftoffRegister rhs) {
2408   bailout(kSimd, "emit_i32x4_dot_i16x8_s");
2409 }
2410 
emit_i32x4_extadd_pairwise_i16x8_s(LiftoffRegister dst,LiftoffRegister src)2411 void LiftoffAssembler::emit_i32x4_extadd_pairwise_i16x8_s(LiftoffRegister dst,
2412                                                           LiftoffRegister src) {
2413   bailout(kSimd, "emit_i32x4_extadd_pairwise_i16x8_s");
2414 }
2415 
emit_i32x4_extadd_pairwise_i16x8_u(LiftoffRegister dst,LiftoffRegister src)2416 void LiftoffAssembler::emit_i32x4_extadd_pairwise_i16x8_u(LiftoffRegister dst,
2417                                                           LiftoffRegister src) {
2418   bailout(kSimd, "emit_i32x4_extadd_pairwise_i16x8_u");
2419 }
2420 
emit_i64x2_neg(LiftoffRegister dst,LiftoffRegister src)2421 void LiftoffAssembler::emit_i64x2_neg(LiftoffRegister dst,
2422                                       LiftoffRegister src) {
2423   bailout(kSimd, "emit_i64x2_neg");
2424 }
2425 
emit_i64x2_alltrue(LiftoffRegister dst,LiftoffRegister src)2426 void LiftoffAssembler::emit_i64x2_alltrue(LiftoffRegister dst,
2427                                           LiftoffRegister src) {
2428   bailout(kSimd, "emit_i64x2_alltrue");
2429 }
2430 
emit_i64x2_bitmask(LiftoffRegister dst,LiftoffRegister src)2431 void LiftoffAssembler::emit_i64x2_bitmask(LiftoffRegister dst,
2432                                           LiftoffRegister src) {
2433   bailout(kSimd, "emit_i64x2_bitmask");
2434 }
2435 
emit_i64x2_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2436 void LiftoffAssembler::emit_i64x2_shl(LiftoffRegister dst, LiftoffRegister lhs,
2437                                       LiftoffRegister rhs) {
2438   bailout(kSimd, "emit_i64x2_shl");
2439 }
2440 
emit_i64x2_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2441 void LiftoffAssembler::emit_i64x2_shli(LiftoffRegister dst, LiftoffRegister lhs,
2442                                        int32_t rhs) {
2443   bailout(kSimd, "emit_i64x2_shli");
2444 }
2445 
emit_i64x2_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2446 void LiftoffAssembler::emit_i64x2_shr_s(LiftoffRegister dst,
2447                                         LiftoffRegister lhs,
2448                                         LiftoffRegister rhs) {
2449   bailout(kSimd, "emit_i64x2_shr_s");
2450 }
2451 
emit_i64x2_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2452 void LiftoffAssembler::emit_i64x2_shri_s(LiftoffRegister dst,
2453                                          LiftoffRegister lhs, int32_t rhs) {
2454   bailout(kSimd, "emit_i64x2_shri_s");
2455 }
2456 
emit_i64x2_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2457 void LiftoffAssembler::emit_i64x2_shr_u(LiftoffRegister dst,
2458                                         LiftoffRegister lhs,
2459                                         LiftoffRegister rhs) {
2460   bailout(kSimd, "emit_i64x2_shr_u");
2461 }
2462 
emit_i64x2_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2463 void LiftoffAssembler::emit_i64x2_shri_u(LiftoffRegister dst,
2464                                          LiftoffRegister lhs, int32_t rhs) {
2465   bailout(kSimd, "emit_i64x2_shri_u");
2466 }
2467 
emit_i64x2_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2468 void LiftoffAssembler::emit_i64x2_add(LiftoffRegister dst, LiftoffRegister lhs,
2469                                       LiftoffRegister rhs) {
2470   bailout(kSimd, "emit_i64x2_add");
2471 }
2472 
emit_i64x2_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2473 void LiftoffAssembler::emit_i64x2_sub(LiftoffRegister dst, LiftoffRegister lhs,
2474                                       LiftoffRegister rhs) {
2475   bailout(kSimd, "emit_i64x2_sub");
2476 }
2477 
emit_i64x2_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2478 void LiftoffAssembler::emit_i64x2_mul(LiftoffRegister dst, LiftoffRegister lhs,
2479                                       LiftoffRegister rhs) {
2480   bailout(kSimd, "emit_i64x2_mul");
2481 }
2482 
emit_i64x2_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2483 void LiftoffAssembler::emit_i64x2_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
2484                                        LiftoffRegister rhs) {
2485   bailout(kSimd, "emit_i64x2_gt_s");
2486 }
2487 
emit_i64x2_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2488 void LiftoffAssembler::emit_i64x2_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
2489                                        LiftoffRegister rhs) {
2490   bailout(kSimd, "emit_i64x2_ge_s");
2491 }
2492 
emit_f32x4_abs(LiftoffRegister dst,LiftoffRegister src)2493 void LiftoffAssembler::emit_f32x4_abs(LiftoffRegister dst,
2494                                       LiftoffRegister src) {
2495   bailout(kSimd, "emit_f32x4_abs");
2496 }
2497 
emit_f32x4_neg(LiftoffRegister dst,LiftoffRegister src)2498 void LiftoffAssembler::emit_f32x4_neg(LiftoffRegister dst,
2499                                       LiftoffRegister src) {
2500   bailout(kSimd, "emit_f32x4_neg");
2501 }
2502 
emit_f32x4_sqrt(LiftoffRegister dst,LiftoffRegister src)2503 void LiftoffAssembler::emit_f32x4_sqrt(LiftoffRegister dst,
2504                                        LiftoffRegister src) {
2505   bailout(kSimd, "emit_f32x4_sqrt");
2506 }
2507 
emit_f32x4_ceil(LiftoffRegister dst,LiftoffRegister src)2508 bool LiftoffAssembler::emit_f32x4_ceil(LiftoffRegister dst,
2509                                        LiftoffRegister src) {
2510   return false;
2511 }
2512 
emit_f32x4_floor(LiftoffRegister dst,LiftoffRegister src)2513 bool LiftoffAssembler::emit_f32x4_floor(LiftoffRegister dst,
2514                                         LiftoffRegister src) {
2515   return false;
2516 }
2517 
emit_f32x4_trunc(LiftoffRegister dst,LiftoffRegister src)2518 bool LiftoffAssembler::emit_f32x4_trunc(LiftoffRegister dst,
2519                                         LiftoffRegister src) {
2520   return false;
2521 }
2522 
emit_f32x4_nearest_int(LiftoffRegister dst,LiftoffRegister src)2523 bool LiftoffAssembler::emit_f32x4_nearest_int(LiftoffRegister dst,
2524                                               LiftoffRegister src) {
2525   return false;
2526 }
2527 
emit_f32x4_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2528 void LiftoffAssembler::emit_f32x4_add(LiftoffRegister dst, LiftoffRegister lhs,
2529                                       LiftoffRegister rhs) {
2530   bailout(kSimd, "emit_f32x4_add");
2531 }
2532 
emit_f32x4_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2533 void LiftoffAssembler::emit_f32x4_sub(LiftoffRegister dst, LiftoffRegister lhs,
2534                                       LiftoffRegister rhs) {
2535   bailout(kSimd, "emit_f32x4_sub");
2536 }
2537 
emit_f32x4_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2538 void LiftoffAssembler::emit_f32x4_mul(LiftoffRegister dst, LiftoffRegister lhs,
2539                                       LiftoffRegister rhs) {
2540   bailout(kSimd, "emit_f32x4_mul");
2541 }
2542 
emit_f32x4_div(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2543 void LiftoffAssembler::emit_f32x4_div(LiftoffRegister dst, LiftoffRegister lhs,
2544                                       LiftoffRegister rhs) {
2545   bailout(kSimd, "emit_f32x4_div");
2546 }
2547 
emit_f32x4_min(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2548 void LiftoffAssembler::emit_f32x4_min(LiftoffRegister dst, LiftoffRegister lhs,
2549                                       LiftoffRegister rhs) {
2550   bailout(kSimd, "emit_f32x4_min");
2551 }
2552 
emit_f32x4_max(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2553 void LiftoffAssembler::emit_f32x4_max(LiftoffRegister dst, LiftoffRegister lhs,
2554                                       LiftoffRegister rhs) {
2555   bailout(kSimd, "emit_f32x4_max");
2556 }
2557 
emit_f32x4_pmin(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2558 void LiftoffAssembler::emit_f32x4_pmin(LiftoffRegister dst, LiftoffRegister lhs,
2559                                        LiftoffRegister rhs) {
2560   bailout(kSimd, "emit_f32x4_pmin");
2561 }
2562 
emit_f32x4_pmax(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2563 void LiftoffAssembler::emit_f32x4_pmax(LiftoffRegister dst, LiftoffRegister lhs,
2564                                        LiftoffRegister rhs) {
2565   bailout(kSimd, "emit_f32x4_pmax");
2566 }
2567 
emit_f64x2_abs(LiftoffRegister dst,LiftoffRegister src)2568 void LiftoffAssembler::emit_f64x2_abs(LiftoffRegister dst,
2569                                       LiftoffRegister src) {
2570   bailout(kSimd, "emit_f64x2_abs");
2571 }
2572 
emit_f64x2_neg(LiftoffRegister dst,LiftoffRegister src)2573 void LiftoffAssembler::emit_f64x2_neg(LiftoffRegister dst,
2574                                       LiftoffRegister src) {
2575   bailout(kSimd, "emit_f64x2_neg");
2576 }
2577 
emit_f64x2_sqrt(LiftoffRegister dst,LiftoffRegister src)2578 void LiftoffAssembler::emit_f64x2_sqrt(LiftoffRegister dst,
2579                                        LiftoffRegister src) {
2580   bailout(kSimd, "emit_f64x2_sqrt");
2581 }
2582 
emit_f64x2_ceil(LiftoffRegister dst,LiftoffRegister src)2583 bool LiftoffAssembler::emit_f64x2_ceil(LiftoffRegister dst,
2584                                        LiftoffRegister src) {
2585   return false;
2586 }
2587 
emit_f64x2_floor(LiftoffRegister dst,LiftoffRegister src)2588 bool LiftoffAssembler::emit_f64x2_floor(LiftoffRegister dst,
2589                                         LiftoffRegister src) {
2590   return false;
2591 }
2592 
emit_f64x2_trunc(LiftoffRegister dst,LiftoffRegister src)2593 bool LiftoffAssembler::emit_f64x2_trunc(LiftoffRegister dst,
2594                                         LiftoffRegister src) {
2595   return false;
2596 }
2597 
emit_f64x2_nearest_int(LiftoffRegister dst,LiftoffRegister src)2598 bool LiftoffAssembler::emit_f64x2_nearest_int(LiftoffRegister dst,
2599                                               LiftoffRegister src) {
2600   return false;
2601 }
2602 
emit_f64x2_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2603 void LiftoffAssembler::emit_f64x2_add(LiftoffRegister dst, LiftoffRegister lhs,
2604                                       LiftoffRegister rhs) {
2605   bailout(kSimd, "emit_f64x2_add");
2606 }
2607 
emit_f64x2_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2608 void LiftoffAssembler::emit_f64x2_sub(LiftoffRegister dst, LiftoffRegister lhs,
2609                                       LiftoffRegister rhs) {
2610   bailout(kSimd, "emit_f64x2_sub");
2611 }
2612 
emit_f64x2_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2613 void LiftoffAssembler::emit_f64x2_mul(LiftoffRegister dst, LiftoffRegister lhs,
2614                                       LiftoffRegister rhs) {
2615   bailout(kSimd, "emit_f64x2_mul");
2616 }
2617 
emit_f64x2_div(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2618 void LiftoffAssembler::emit_f64x2_div(LiftoffRegister dst, LiftoffRegister lhs,
2619                                       LiftoffRegister rhs) {
2620   bailout(kSimd, "emit_f64x2_div");
2621 }
2622 
emit_f64x2_min(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2623 void LiftoffAssembler::emit_f64x2_min(LiftoffRegister dst, LiftoffRegister lhs,
2624                                       LiftoffRegister rhs) {
2625   bailout(kSimd, "emit_f64x2_min");
2626 }
2627 
emit_f64x2_max(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2628 void LiftoffAssembler::emit_f64x2_max(LiftoffRegister dst, LiftoffRegister lhs,
2629                                       LiftoffRegister rhs) {
2630   bailout(kSimd, "emit_f64x2_max");
2631 }
2632 
emit_f64x2_pmin(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2633 void LiftoffAssembler::emit_f64x2_pmin(LiftoffRegister dst, LiftoffRegister lhs,
2634                                        LiftoffRegister rhs) {
2635   bailout(kSimd, "emit_f64x2_pmin");
2636 }
2637 
emit_f64x2_pmax(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2638 void LiftoffAssembler::emit_f64x2_pmax(LiftoffRegister dst, LiftoffRegister lhs,
2639                                        LiftoffRegister rhs) {
2640   bailout(kSimd, "emit_f64x2_pmax");
2641 }
2642 
emit_f64x2_convert_low_i32x4_s(LiftoffRegister dst,LiftoffRegister src)2643 void LiftoffAssembler::emit_f64x2_convert_low_i32x4_s(LiftoffRegister dst,
2644                                                       LiftoffRegister src) {
2645   bailout(kSimd, "emit_f64x2_convert_low_i32x4_s");
2646 }
2647 
emit_f64x2_convert_low_i32x4_u(LiftoffRegister dst,LiftoffRegister src)2648 void LiftoffAssembler::emit_f64x2_convert_low_i32x4_u(LiftoffRegister dst,
2649                                                       LiftoffRegister src) {
2650   bailout(kSimd, "emit_f64x2_convert_low_i32x4_u");
2651 }
2652 
emit_f64x2_promote_low_f32x4(LiftoffRegister dst,LiftoffRegister src)2653 void LiftoffAssembler::emit_f64x2_promote_low_f32x4(LiftoffRegister dst,
2654                                                     LiftoffRegister src) {
2655   bailout(kSimd, "emit_f64x2_promote_low_f32x4");
2656 }
2657 
emit_i32x4_sconvert_f32x4(LiftoffRegister dst,LiftoffRegister src)2658 void LiftoffAssembler::emit_i32x4_sconvert_f32x4(LiftoffRegister dst,
2659                                                  LiftoffRegister src) {
2660   bailout(kSimd, "emit_i32x4_sconvert_f32x4");
2661 }
2662 
emit_i32x4_uconvert_f32x4(LiftoffRegister dst,LiftoffRegister src)2663 void LiftoffAssembler::emit_i32x4_uconvert_f32x4(LiftoffRegister dst,
2664                                                  LiftoffRegister src) {
2665   bailout(kSimd, "emit_i32x4_uconvert_f32x4");
2666 }
2667 
emit_i32x4_trunc_sat_f64x2_s_zero(LiftoffRegister dst,LiftoffRegister src)2668 void LiftoffAssembler::emit_i32x4_trunc_sat_f64x2_s_zero(LiftoffRegister dst,
2669                                                          LiftoffRegister src) {
2670   bailout(kSimd, "emit_i32x4_trunc_sat_f64x2_s_zero");
2671 }
2672 
emit_i32x4_trunc_sat_f64x2_u_zero(LiftoffRegister dst,LiftoffRegister src)2673 void LiftoffAssembler::emit_i32x4_trunc_sat_f64x2_u_zero(LiftoffRegister dst,
2674                                                          LiftoffRegister src) {
2675   bailout(kSimd, "emit_i32x4_trunc_sat_f64x2_u_zero");
2676 }
2677 
emit_f32x4_sconvert_i32x4(LiftoffRegister dst,LiftoffRegister src)2678 void LiftoffAssembler::emit_f32x4_sconvert_i32x4(LiftoffRegister dst,
2679                                                  LiftoffRegister src) {
2680   bailout(kSimd, "emit_f32x4_sconvert_i32x4");
2681 }
2682 
emit_f32x4_uconvert_i32x4(LiftoffRegister dst,LiftoffRegister src)2683 void LiftoffAssembler::emit_f32x4_uconvert_i32x4(LiftoffRegister dst,
2684                                                  LiftoffRegister src) {
2685   bailout(kSimd, "emit_f32x4_uconvert_i32x4");
2686 }
2687 
emit_f32x4_demote_f64x2_zero(LiftoffRegister dst,LiftoffRegister src)2688 void LiftoffAssembler::emit_f32x4_demote_f64x2_zero(LiftoffRegister dst,
2689                                                     LiftoffRegister src) {
2690   bailout(kSimd, "emit_f32x4_demote_f64x2_zero");
2691 }
2692 
emit_i8x16_sconvert_i16x8(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2693 void LiftoffAssembler::emit_i8x16_sconvert_i16x8(LiftoffRegister dst,
2694                                                  LiftoffRegister lhs,
2695                                                  LiftoffRegister rhs) {
2696   bailout(kSimd, "emit_i8x16_sconvert_i16x8");
2697 }
2698 
emit_i8x16_uconvert_i16x8(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2699 void LiftoffAssembler::emit_i8x16_uconvert_i16x8(LiftoffRegister dst,
2700                                                  LiftoffRegister lhs,
2701                                                  LiftoffRegister rhs) {
2702   bailout(kSimd, "emit_i8x16_uconvert_i16x8");
2703 }
2704 
emit_i16x8_sconvert_i32x4(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2705 void LiftoffAssembler::emit_i16x8_sconvert_i32x4(LiftoffRegister dst,
2706                                                  LiftoffRegister lhs,
2707                                                  LiftoffRegister rhs) {
2708   bailout(kSimd, "emit_i16x8_sconvert_i32x4");
2709 }
2710 
emit_i16x8_uconvert_i32x4(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2711 void LiftoffAssembler::emit_i16x8_uconvert_i32x4(LiftoffRegister dst,
2712                                                  LiftoffRegister lhs,
2713                                                  LiftoffRegister rhs) {
2714   bailout(kSimd, "emit_i16x8_uconvert_i32x4");
2715 }
2716 
emit_i16x8_sconvert_i8x16_low(LiftoffRegister dst,LiftoffRegister src)2717 void LiftoffAssembler::emit_i16x8_sconvert_i8x16_low(LiftoffRegister dst,
2718                                                      LiftoffRegister src) {
2719   bailout(kSimd, "emit_i16x8_sconvert_i8x16_low");
2720 }
2721 
emit_i16x8_sconvert_i8x16_high(LiftoffRegister dst,LiftoffRegister src)2722 void LiftoffAssembler::emit_i16x8_sconvert_i8x16_high(LiftoffRegister dst,
2723                                                       LiftoffRegister src) {
2724   bailout(kSimd, "emit_i16x8_sconvert_i8x16_high");
2725 }
2726 
emit_i16x8_uconvert_i8x16_low(LiftoffRegister dst,LiftoffRegister src)2727 void LiftoffAssembler::emit_i16x8_uconvert_i8x16_low(LiftoffRegister dst,
2728                                                      LiftoffRegister src) {
2729   bailout(kSimd, "emit_i16x8_uconvert_i8x16_low");
2730 }
2731 
emit_i16x8_uconvert_i8x16_high(LiftoffRegister dst,LiftoffRegister src)2732 void LiftoffAssembler::emit_i16x8_uconvert_i8x16_high(LiftoffRegister dst,
2733                                                       LiftoffRegister src) {
2734   bailout(kSimd, "emit_i16x8_uconvert_i8x16_high");
2735 }
2736 
emit_i32x4_sconvert_i16x8_low(LiftoffRegister dst,LiftoffRegister src)2737 void LiftoffAssembler::emit_i32x4_sconvert_i16x8_low(LiftoffRegister dst,
2738                                                      LiftoffRegister src) {
2739   bailout(kSimd, "emit_i32x4_sconvert_i16x8_low");
2740 }
2741 
emit_i32x4_sconvert_i16x8_high(LiftoffRegister dst,LiftoffRegister src)2742 void LiftoffAssembler::emit_i32x4_sconvert_i16x8_high(LiftoffRegister dst,
2743                                                       LiftoffRegister src) {
2744   bailout(kSimd, "emit_i32x4_sconvert_i16x8_high");
2745 }
2746 
emit_i32x4_uconvert_i16x8_low(LiftoffRegister dst,LiftoffRegister src)2747 void LiftoffAssembler::emit_i32x4_uconvert_i16x8_low(LiftoffRegister dst,
2748                                                      LiftoffRegister src) {
2749   bailout(kSimd, "emit_i32x4_uconvert_i16x8_low");
2750 }
2751 
emit_i32x4_uconvert_i16x8_high(LiftoffRegister dst,LiftoffRegister src)2752 void LiftoffAssembler::emit_i32x4_uconvert_i16x8_high(LiftoffRegister dst,
2753                                                       LiftoffRegister src) {
2754   bailout(kSimd, "emit_i32x4_uconvert_i16x8_high");
2755 }
2756 
emit_i64x2_sconvert_i32x4_low(LiftoffRegister dst,LiftoffRegister src)2757 void LiftoffAssembler::emit_i64x2_sconvert_i32x4_low(LiftoffRegister dst,
2758                                                      LiftoffRegister src) {
2759   bailout(kSimd, "emit_i64x2_sconvert_i32x4_low");
2760 }
2761 
emit_i64x2_sconvert_i32x4_high(LiftoffRegister dst,LiftoffRegister src)2762 void LiftoffAssembler::emit_i64x2_sconvert_i32x4_high(LiftoffRegister dst,
2763                                                       LiftoffRegister src) {
2764   bailout(kSimd, "emit_i64x2_sconvert_i32x4_high");
2765 }
2766 
emit_i64x2_uconvert_i32x4_low(LiftoffRegister dst,LiftoffRegister src)2767 void LiftoffAssembler::emit_i64x2_uconvert_i32x4_low(LiftoffRegister dst,
2768                                                      LiftoffRegister src) {
2769   bailout(kSimd, "emit_i64x2_uconvert_i32x4_low");
2770 }
2771 
emit_i64x2_uconvert_i32x4_high(LiftoffRegister dst,LiftoffRegister src)2772 void LiftoffAssembler::emit_i64x2_uconvert_i32x4_high(LiftoffRegister dst,
2773                                                       LiftoffRegister src) {
2774   bailout(kSimd, "emit_i64x2_uconvert_i32x4_high");
2775 }
2776 
emit_i8x16_rounding_average_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2777 void LiftoffAssembler::emit_i8x16_rounding_average_u(LiftoffRegister dst,
2778                                                      LiftoffRegister lhs,
2779                                                      LiftoffRegister rhs) {
2780   bailout(kSimd, "emit_i8x16_rounding_average_u");
2781 }
2782 
emit_i16x8_rounding_average_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2783 void LiftoffAssembler::emit_i16x8_rounding_average_u(LiftoffRegister dst,
2784                                                      LiftoffRegister lhs,
2785                                                      LiftoffRegister rhs) {
2786   bailout(kSimd, "emit_i16x8_rounding_average_u");
2787 }
2788 
emit_i8x16_abs(LiftoffRegister dst,LiftoffRegister src)2789 void LiftoffAssembler::emit_i8x16_abs(LiftoffRegister dst,
2790                                       LiftoffRegister src) {
2791   bailout(kSimd, "emit_i8x16_abs");
2792 }
2793 
emit_i16x8_abs(LiftoffRegister dst,LiftoffRegister src)2794 void LiftoffAssembler::emit_i16x8_abs(LiftoffRegister dst,
2795                                       LiftoffRegister src) {
2796   bailout(kSimd, "emit_i16x8_abs");
2797 }
2798 
emit_i32x4_abs(LiftoffRegister dst,LiftoffRegister src)2799 void LiftoffAssembler::emit_i32x4_abs(LiftoffRegister dst,
2800                                       LiftoffRegister src) {
2801   bailout(kSimd, "emit_i32x4_abs");
2802 }
2803 
emit_i8x16_extract_lane_s(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2804 void LiftoffAssembler::emit_i8x16_extract_lane_s(LiftoffRegister dst,
2805                                                  LiftoffRegister lhs,
2806                                                  uint8_t imm_lane_idx) {
2807   bailout(kSimd, "emit_i8x16_extract_lane_s");
2808 }
2809 
emit_i8x16_extract_lane_u(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2810 void LiftoffAssembler::emit_i8x16_extract_lane_u(LiftoffRegister dst,
2811                                                  LiftoffRegister lhs,
2812                                                  uint8_t imm_lane_idx) {
2813   bailout(kSimd, "emit_i8x16_extract_lane_u");
2814 }
2815 
emit_i16x8_extract_lane_s(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2816 void LiftoffAssembler::emit_i16x8_extract_lane_s(LiftoffRegister dst,
2817                                                  LiftoffRegister lhs,
2818                                                  uint8_t imm_lane_idx) {
2819   bailout(kSimd, "emit_i16x8_extract_lane_s");
2820 }
2821 
emit_i16x8_extract_lane_u(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2822 void LiftoffAssembler::emit_i16x8_extract_lane_u(LiftoffRegister dst,
2823                                                  LiftoffRegister lhs,
2824                                                  uint8_t imm_lane_idx) {
2825   bailout(kSimd, "emit_i16x8_extract_lane_u");
2826 }
2827 
emit_i32x4_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2828 void LiftoffAssembler::emit_i32x4_extract_lane(LiftoffRegister dst,
2829                                                LiftoffRegister lhs,
2830                                                uint8_t imm_lane_idx) {
2831   bailout(kSimd, "emit_i32x4_extract_lane");
2832 }
2833 
emit_i64x2_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2834 void LiftoffAssembler::emit_i64x2_extract_lane(LiftoffRegister dst,
2835                                                LiftoffRegister lhs,
2836                                                uint8_t imm_lane_idx) {
2837   bailout(kSimd, "emit_i64x2_extract_lane");
2838 }
2839 
emit_f32x4_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2840 void LiftoffAssembler::emit_f32x4_extract_lane(LiftoffRegister dst,
2841                                                LiftoffRegister lhs,
2842                                                uint8_t imm_lane_idx) {
2843   bailout(kSimd, "emit_f32x4_extract_lane");
2844 }
2845 
emit_f64x2_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2846 void LiftoffAssembler::emit_f64x2_extract_lane(LiftoffRegister dst,
2847                                                LiftoffRegister lhs,
2848                                                uint8_t imm_lane_idx) {
2849   bailout(kSimd, "emit_f64x2_extract_lane");
2850 }
2851 
emit_i8x16_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2852 void LiftoffAssembler::emit_i8x16_replace_lane(LiftoffRegister dst,
2853                                                LiftoffRegister src1,
2854                                                LiftoffRegister src2,
2855                                                uint8_t imm_lane_idx) {
2856   bailout(kSimd, "emit_i8x16_replace_lane");
2857 }
2858 
emit_i16x8_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2859 void LiftoffAssembler::emit_i16x8_replace_lane(LiftoffRegister dst,
2860                                                LiftoffRegister src1,
2861                                                LiftoffRegister src2,
2862                                                uint8_t imm_lane_idx) {
2863   bailout(kSimd, "emit_i16x8_replace_lane");
2864 }
2865 
emit_i32x4_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2866 void LiftoffAssembler::emit_i32x4_replace_lane(LiftoffRegister dst,
2867                                                LiftoffRegister src1,
2868                                                LiftoffRegister src2,
2869                                                uint8_t imm_lane_idx) {
2870   bailout(kSimd, "emit_i32x4_replace_lane");
2871 }
2872 
emit_i64x2_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2873 void LiftoffAssembler::emit_i64x2_replace_lane(LiftoffRegister dst,
2874                                                LiftoffRegister src1,
2875                                                LiftoffRegister src2,
2876                                                uint8_t imm_lane_idx) {
2877   bailout(kSimd, "emit_i64x2_replace_lane");
2878 }
2879 
emit_f32x4_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2880 void LiftoffAssembler::emit_f32x4_replace_lane(LiftoffRegister dst,
2881                                                LiftoffRegister src1,
2882                                                LiftoffRegister src2,
2883                                                uint8_t imm_lane_idx) {
2884   bailout(kSimd, "emit_f32x4_replace_lane");
2885 }
2886 
emit_f64x2_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2887 void LiftoffAssembler::emit_f64x2_replace_lane(LiftoffRegister dst,
2888                                                LiftoffRegister src1,
2889                                                LiftoffRegister src2,
2890                                                uint8_t imm_lane_idx) {
2891   bailout(kSimd, "emit_f64x2_replace_lane");
2892 }
2893 
StackCheck(Label * ool_code,Register limit_address)2894 void LiftoffAssembler::StackCheck(Label* ool_code, Register limit_address) {
2895   TurboAssembler::Ulw(limit_address, MemOperand(limit_address));
2896   TurboAssembler::Branch(ool_code, ule, sp, Operand(limit_address));
2897 }
2898 
CallTrapCallbackForTesting()2899 void LiftoffAssembler::CallTrapCallbackForTesting() {
2900   PrepareCallCFunction(0, GetUnusedRegister(kGpReg, {}).gp());
2901   CallCFunction(ExternalReference::wasm_call_trap_callback_for_testing(), 0);
2902 }
2903 
AssertUnreachable(AbortReason reason)2904 void LiftoffAssembler::AssertUnreachable(AbortReason reason) {
2905   if (FLAG_debug_code) Abort(reason);
2906 }
2907 
PushRegisters(LiftoffRegList regs)2908 void LiftoffAssembler::PushRegisters(LiftoffRegList regs) {
2909   LiftoffRegList gp_regs = regs & kGpCacheRegList;
2910   unsigned num_gp_regs = gp_regs.GetNumRegsSet();
2911   if (num_gp_regs) {
2912     unsigned offset = num_gp_regs * kSystemPointerSize;
2913     addiu(sp, sp, -offset);
2914     while (!gp_regs.is_empty()) {
2915       LiftoffRegister reg = gp_regs.GetFirstRegSet();
2916       offset -= kSystemPointerSize;
2917       sw(reg.gp(), MemOperand(sp, offset));
2918       gp_regs.clear(reg);
2919     }
2920     DCHECK_EQ(offset, 0);
2921   }
2922   LiftoffRegList fp_regs = regs & kFpCacheRegList;
2923   unsigned num_fp_regs = fp_regs.GetNumRegsSet();
2924   if (num_fp_regs) {
2925     addiu(sp, sp, -(num_fp_regs * kStackSlotSize));
2926     unsigned offset = 0;
2927     while (!fp_regs.is_empty()) {
2928       LiftoffRegister reg = fp_regs.GetFirstRegSet();
2929       TurboAssembler::Sdc1(reg.fp(), MemOperand(sp, offset));
2930       fp_regs.clear(reg);
2931       offset += sizeof(double);
2932     }
2933     DCHECK_EQ(offset, num_fp_regs * sizeof(double));
2934   }
2935 }
2936 
PopRegisters(LiftoffRegList regs)2937 void LiftoffAssembler::PopRegisters(LiftoffRegList regs) {
2938   LiftoffRegList fp_regs = regs & kFpCacheRegList;
2939   unsigned fp_offset = 0;
2940   while (!fp_regs.is_empty()) {
2941     LiftoffRegister reg = fp_regs.GetFirstRegSet();
2942     TurboAssembler::Ldc1(reg.fp(), MemOperand(sp, fp_offset));
2943     fp_regs.clear(reg);
2944     fp_offset += sizeof(double);
2945   }
2946   if (fp_offset) addiu(sp, sp, fp_offset);
2947   LiftoffRegList gp_regs = regs & kGpCacheRegList;
2948   unsigned gp_offset = 0;
2949   while (!gp_regs.is_empty()) {
2950     LiftoffRegister reg = gp_regs.GetLastRegSet();
2951     lw(reg.gp(), MemOperand(sp, gp_offset));
2952     gp_regs.clear(reg);
2953     gp_offset += kSystemPointerSize;
2954   }
2955   addiu(sp, sp, gp_offset);
2956 }
2957 
RecordSpillsInSafepoint(SafepointTableBuilder::Safepoint & safepoint,LiftoffRegList all_spills,LiftoffRegList ref_spills,int spill_offset)2958 void LiftoffAssembler::RecordSpillsInSafepoint(
2959     SafepointTableBuilder::Safepoint& safepoint, LiftoffRegList all_spills,
2960     LiftoffRegList ref_spills, int spill_offset) {
2961   int spill_space_size = 0;
2962   while (!all_spills.is_empty()) {
2963     LiftoffRegister reg = all_spills.GetFirstRegSet();
2964     if (ref_spills.has(reg)) {
2965       safepoint.DefineTaggedStackSlot(spill_offset);
2966     }
2967     all_spills.clear(reg);
2968     ++spill_offset;
2969     spill_space_size += kSystemPointerSize;
2970   }
2971   // Record the number of additional spill slots.
2972   RecordOolSpillSpaceSize(spill_space_size);
2973 }
2974 
DropStackSlotsAndRet(uint32_t num_stack_slots)2975 void LiftoffAssembler::DropStackSlotsAndRet(uint32_t num_stack_slots) {
2976   DCHECK_LT(num_stack_slots,
2977             (1 << 16) / kSystemPointerSize);  // 16 bit immediate
2978   TurboAssembler::DropAndRet(static_cast<int>(num_stack_slots));
2979 }
2980 
CallC(const ValueKindSig * sig,const LiftoffRegister * args,const LiftoffRegister * rets,ValueKind out_argument_kind,int stack_bytes,ExternalReference ext_ref)2981 void LiftoffAssembler::CallC(const ValueKindSig* sig,
2982                              const LiftoffRegister* args,
2983                              const LiftoffRegister* rets,
2984                              ValueKind out_argument_kind, int stack_bytes,
2985                              ExternalReference ext_ref) {
2986   addiu(sp, sp, -stack_bytes);
2987 
2988   int arg_bytes = 0;
2989   for (ValueKind param_kind : sig->parameters()) {
2990     liftoff::Store(this, sp, arg_bytes, *args++, param_kind);
2991     arg_bytes += value_kind_size(param_kind);
2992   }
2993   DCHECK_LE(arg_bytes, stack_bytes);
2994 
2995   // Pass a pointer to the buffer with the arguments to the C function.
2996   // On mips, the first argument is passed in {a0}.
2997   constexpr Register kFirstArgReg = a0;
2998   mov(kFirstArgReg, sp);
2999 
3000   // Now call the C function.
3001   constexpr int kNumCCallArgs = 1;
3002   PrepareCallCFunction(kNumCCallArgs, kScratchReg);
3003   CallCFunction(ext_ref, kNumCCallArgs);
3004 
3005   // Move return value to the right register.
3006   const LiftoffRegister* next_result_reg = rets;
3007   if (sig->return_count() > 0) {
3008     DCHECK_EQ(1, sig->return_count());
3009     constexpr Register kReturnReg = v0;
3010     if (kReturnReg != next_result_reg->gp()) {
3011       Move(*next_result_reg, LiftoffRegister(kReturnReg), sig->GetReturn(0));
3012     }
3013     ++next_result_reg;
3014   }
3015 
3016   // Load potential output value from the buffer on the stack.
3017   if (out_argument_kind != kVoid) {
3018     liftoff::Load(this, *next_result_reg, sp, 0, out_argument_kind);
3019   }
3020 
3021   addiu(sp, sp, stack_bytes);
3022 }
3023 
CallNativeWasmCode(Address addr)3024 void LiftoffAssembler::CallNativeWasmCode(Address addr) {
3025   Call(addr, RelocInfo::WASM_CALL);
3026 }
3027 
TailCallNativeWasmCode(Address addr)3028 void LiftoffAssembler::TailCallNativeWasmCode(Address addr) {
3029   Jump(addr, RelocInfo::WASM_CALL);
3030 }
3031 
CallIndirect(const ValueKindSig * sig,compiler::CallDescriptor * call_descriptor,Register target)3032 void LiftoffAssembler::CallIndirect(const ValueKindSig* sig,
3033                                     compiler::CallDescriptor* call_descriptor,
3034                                     Register target) {
3035   if (target == no_reg) {
3036     pop(kScratchReg);
3037     Call(kScratchReg);
3038   } else {
3039     Call(target);
3040   }
3041 }
3042 
TailCallIndirect(Register target)3043 void LiftoffAssembler::TailCallIndirect(Register target) {
3044   if (target == no_reg) {
3045     Pop(kScratchReg);
3046     Jump(kScratchReg);
3047   } else {
3048     Jump(target);
3049   }
3050 }
3051 
CallRuntimeStub(WasmCode::RuntimeStubId sid)3052 void LiftoffAssembler::CallRuntimeStub(WasmCode::RuntimeStubId sid) {
3053   // A direct call to a wasm runtime stub defined in this module.
3054   // Just encode the stub index. This will be patched at relocation.
3055   Call(static_cast<Address>(sid), RelocInfo::WASM_STUB_CALL);
3056 }
3057 
AllocateStackSlot(Register addr,uint32_t size)3058 void LiftoffAssembler::AllocateStackSlot(Register addr, uint32_t size) {
3059   addiu(sp, sp, -size);
3060   TurboAssembler::Move(addr, sp);
3061 }
3062 
DeallocateStackSlot(uint32_t size)3063 void LiftoffAssembler::DeallocateStackSlot(uint32_t size) {
3064   addiu(sp, sp, size);
3065 }
3066 
MaybeOSR()3067 void LiftoffAssembler::MaybeOSR() {}
3068 
emit_set_if_nan(Register dst,FPURegister src,ValueKind kind)3069 void LiftoffAssembler::emit_set_if_nan(Register dst, FPURegister src,
3070                                        ValueKind kind) {
3071   UseScratchRegisterScope temps(this);
3072   Register scratch = temps.Acquire();
3073   Label not_nan;
3074   if (kind == kF32) {
3075     CompareIsNanF32(src, src);
3076   } else {
3077     DCHECK_EQ(kind, kF64);
3078     CompareIsNanF64(src, src);
3079   }
3080   BranchFalseShortF(&not_nan, USE_DELAY_SLOT);
3081   li(scratch, 1);
3082   sw(scratch, MemOperand(dst));
3083   bind(&not_nan);
3084 }
3085 
emit_s128_set_if_nan(Register dst,LiftoffRegister src,Register tmp_gp,LiftoffRegister tmp_s128,ValueKind lane_kind)3086 void LiftoffAssembler::emit_s128_set_if_nan(Register dst, LiftoffRegister src,
3087                                             Register tmp_gp,
3088                                             LiftoffRegister tmp_s128,
3089                                             ValueKind lane_kind) {
3090   UNIMPLEMENTED();
3091 }
3092 
Construct(int param_slots)3093 void LiftoffStackSlots::Construct(int param_slots) {
3094   DCHECK_LT(0, slots_.size());
3095   SortInPushOrder();
3096   int last_stack_slot = param_slots;
3097   for (auto& slot : slots_) {
3098     const int stack_slot = slot.dst_slot_;
3099     int stack_decrement = (last_stack_slot - stack_slot) * kSystemPointerSize;
3100     DCHECK_LT(0, stack_decrement);
3101     last_stack_slot = stack_slot;
3102     const LiftoffAssembler::VarState& src = slot.src_;
3103     switch (src.loc()) {
3104       case LiftoffAssembler::VarState::kStack: {
3105         if (src.kind() == kF64) {
3106           asm_->AllocateStackSpace(stack_decrement - kDoubleSize);
3107           DCHECK_EQ(kLowWord, slot.half_);
3108           asm_->lw(kScratchReg,
3109                    liftoff::GetHalfStackSlot(slot.src_offset_, kHighWord));
3110           asm_->push(kScratchReg);
3111         } else {
3112           asm_->AllocateStackSpace(stack_decrement - kSystemPointerSize);
3113         }
3114         asm_->lw(kScratchReg,
3115                  liftoff::GetHalfStackSlot(slot.src_offset_, slot.half_));
3116         asm_->push(kScratchReg);
3117         break;
3118       }
3119       case LiftoffAssembler::VarState::kRegister: {
3120         int pushed_bytes = SlotSizeInBytes(slot);
3121         asm_->AllocateStackSpace(stack_decrement - pushed_bytes);
3122         if (src.kind() == kI64) {
3123           liftoff::push(
3124               asm_, slot.half_ == kLowWord ? src.reg().low() : src.reg().high(),
3125               kI32);
3126         } else {
3127           liftoff::push(asm_, src.reg(), src.kind());
3128         }
3129         break;
3130       }
3131       case LiftoffAssembler::VarState::kIntConst: {
3132         // The high word is the sign extension of the low word.
3133         asm_->AllocateStackSpace(stack_decrement - kSystemPointerSize);
3134         asm_->li(kScratchReg,
3135                  Operand(slot.half_ == kLowWord ? src.i32_const()
3136                                                 : src.i32_const() >> 31));
3137         asm_->push(kScratchReg);
3138         break;
3139       }
3140     }
3141   }
3142 }
3143 
3144 }  // namespace wasm
3145 }  // namespace internal
3146 }  // namespace v8
3147 
3148 #endif  // V8_WASM_BASELINE_MIPS_LIFTOFF_ASSEMBLER_MIPS_H_
3149