• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2021 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_WASM_BASELINE_LOONG64_LIFTOFF_ASSEMBLER_LOONG64_H_
6 #define V8_WASM_BASELINE_LOONG64_LIFTOFF_ASSEMBLER_LOONG64_H_
7 
8 #include "src/base/platform/wrappers.h"
9 #include "src/codegen/machine-type.h"
10 #include "src/heap/memory-chunk.h"
11 #include "src/wasm/baseline/liftoff-assembler.h"
12 #include "src/wasm/wasm-objects.h"
13 
14 namespace v8 {
15 namespace internal {
16 namespace wasm {
17 
18 namespace liftoff {
19 
ToCondition(LiftoffCondition liftoff_cond)20 inline constexpr Condition ToCondition(LiftoffCondition liftoff_cond) {
21   switch (liftoff_cond) {
22     case kEqual:
23       return eq;
24     case kUnequal:
25       return ne;
26     case kSignedLessThan:
27       return lt;
28     case kSignedLessEqual:
29       return le;
30     case kSignedGreaterThan:
31       return gt;
32     case kSignedGreaterEqual:
33       return ge;
34     case kUnsignedLessThan:
35       return ult;
36     case kUnsignedLessEqual:
37       return ule;
38     case kUnsignedGreaterThan:
39       return ugt;
40     case kUnsignedGreaterEqual:
41       return uge;
42   }
43 }
44 
45 // Liftoff Frames.
46 //
47 //  slot      Frame
48 //       +--------------------+---------------------------
49 //  n+4  | optional padding slot to keep the stack 16 byte aligned.
50 //  n+3  |   parameter n      |
51 //  ...  |       ...          |
52 //   4   |   parameter 1      | or parameter 2
53 //   3   |   parameter 0      | or parameter 1
54 //   2   |  (result address)  | or parameter 0
55 //  -----+--------------------+---------------------------
56 //   1   | return addr (ra)   |
57 //   0   | previous frame (fp)|
58 //  -----+--------------------+  <-- frame ptr (fp)
59 //  -1   | StackFrame::WASM   |
60 //  -2   |     instance       |
61 //  -3   |     feedback vector|
62 //  -4   |     tiering budget |
63 //  -----+--------------------+---------------------------
64 //  -5   |     slot 0         |   ^
65 //  -6   |     slot 1         |   |
66 //       |                    | Frame slots
67 //       |                    |   |
68 //       |                    |   v
69 //       | optional padding slot to keep the stack 16 byte aligned.
70 //  -----+--------------------+  <-- stack ptr (sp)
71 //
72 
73 constexpr int kInstanceOffset = 2 * kSystemPointerSize;
74 constexpr int kFeedbackVectorOffset = 3 * kSystemPointerSize;
75 constexpr int kTierupBudgetOffset = 4 * kSystemPointerSize;
76 
GetStackSlot(int offset)77 inline MemOperand GetStackSlot(int offset) { return MemOperand(fp, -offset); }
78 
GetInstanceOperand()79 inline MemOperand GetInstanceOperand() { return GetStackSlot(kInstanceOffset); }
80 
81 template <typename T>
GetMemOp(LiftoffAssembler * assm,Register addr,Register offset,T offset_imm)82 inline MemOperand GetMemOp(LiftoffAssembler* assm, Register addr,
83                            Register offset, T offset_imm) {
84   if (is_int32(offset_imm)) {
85     int32_t offset_imm32 = static_cast<int32_t>(offset_imm);
86     if (offset == no_reg) return MemOperand(addr, offset_imm32);
87     assm->add_d(kScratchReg, addr, offset);
88     return MemOperand(kScratchReg, offset_imm32);
89   }
90   // Offset immediate does not fit in 31 bits.
91   assm->li(kScratchReg, Operand(offset_imm));
92   assm->add_d(kScratchReg, kScratchReg, addr);
93   if (offset != no_reg) {
94     assm->add_d(kScratchReg, kScratchReg, offset);
95   }
96   return MemOperand(kScratchReg, 0);
97 }
98 
Load(LiftoffAssembler * assm,LiftoffRegister dst,MemOperand src,ValueKind kind)99 inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src,
100                  ValueKind kind) {
101   switch (kind) {
102     case kI32:
103       assm->Ld_w(dst.gp(), src);
104       break;
105     case kI64:
106     case kRef:
107     case kOptRef:
108     case kRtt:
109       assm->Ld_d(dst.gp(), src);
110       break;
111     case kF32:
112       assm->Fld_s(dst.fp(), src);
113       break;
114     case kF64:
115       assm->Fld_d(dst.fp(), src);
116       break;
117     case kS128:
118       UNREACHABLE();
119       break;
120     default:
121       UNREACHABLE();
122   }
123 }
124 
Store(LiftoffAssembler * assm,Register base,int32_t offset,LiftoffRegister src,ValueKind kind)125 inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
126                   LiftoffRegister src, ValueKind kind) {
127   MemOperand dst(base, offset);
128   switch (kind) {
129     case kI32:
130       assm->St_w(src.gp(), dst);
131       break;
132     case kI64:
133     case kOptRef:
134     case kRef:
135     case kRtt:
136       assm->St_d(src.gp(), dst);
137       break;
138     case kF32:
139       assm->Fst_s(src.fp(), dst);
140       break;
141     case kF64:
142       assm->Fst_d(src.fp(), dst);
143       break;
144     default:
145       UNREACHABLE();
146   }
147 }
148 
push(LiftoffAssembler * assm,LiftoffRegister reg,ValueKind kind)149 inline void push(LiftoffAssembler* assm, LiftoffRegister reg, ValueKind kind) {
150   switch (kind) {
151     case kI32:
152       assm->addi_d(sp, sp, -kSystemPointerSize);
153       assm->St_w(reg.gp(), MemOperand(sp, 0));
154       break;
155     case kI64:
156     case kOptRef:
157     case kRef:
158     case kRtt:
159       assm->Push(reg.gp());
160       break;
161     case kF32:
162       assm->addi_d(sp, sp, -kSystemPointerSize);
163       assm->Fst_s(reg.fp(), MemOperand(sp, 0));
164       break;
165     case kF64:
166       assm->addi_d(sp, sp, -kSystemPointerSize);
167       assm->Fst_d(reg.fp(), MemOperand(sp, 0));
168       break;
169     case kS128:
170       UNREACHABLE();
171       break;
172     default:
173       UNREACHABLE();
174   }
175 }
176 
177 }  // namespace liftoff
178 
PrepareStackFrame()179 int LiftoffAssembler::PrepareStackFrame() {
180   int offset = pc_offset();
181   // When constant that represents size of stack frame can't be represented
182   // as 16bit we need three instructions to add it to sp, so we reserve space
183   // for this case.
184   addi_d(sp, sp, 0);
185   nop();
186   nop();
187   return offset;
188 }
189 
PrepareTailCall(int num_callee_stack_params,int stack_param_delta)190 void LiftoffAssembler::PrepareTailCall(int num_callee_stack_params,
191                                        int stack_param_delta) {
192   UseScratchRegisterScope temps(this);
193   Register scratch = temps.Acquire();
194 
195   // Push the return address and frame pointer to complete the stack frame.
196   Ld_d(scratch, MemOperand(fp, 8));
197   Push(scratch);
198   Ld_d(scratch, MemOperand(fp, 0));
199   Push(scratch);
200 
201   // Shift the whole frame upwards.
202   int slot_count = num_callee_stack_params + 2;
203   for (int i = slot_count - 1; i >= 0; --i) {
204     Ld_d(scratch, MemOperand(sp, i * 8));
205     St_d(scratch, MemOperand(fp, (i - stack_param_delta) * 8));
206   }
207 
208   // Set the new stack and frame pointer.
209   addi_d(sp, fp, -stack_param_delta * 8);
210   Pop(ra, fp);
211 }
212 
AlignFrameSize()213 void LiftoffAssembler::AlignFrameSize() {}
214 
PatchPrepareStackFrame(int offset,SafepointTableBuilder * safepoint_table_builder)215 void LiftoffAssembler::PatchPrepareStackFrame(
216     int offset, SafepointTableBuilder* safepoint_table_builder) {
217   // The frame_size includes the frame marker and the instance slot. Both are
218   // pushed as part of frame construction, so we don't need to allocate memory
219   // for them anymore.
220   int frame_size = GetTotalFrameSize() - 2 * kSystemPointerSize;
221 
222   // We can't run out of space, just pass anything big enough to not cause the
223   // assembler to try to grow the buffer.
224   constexpr int kAvailableSpace = 256;
225   TurboAssembler patching_assembler(
226       nullptr, AssemblerOptions{}, CodeObjectRequired::kNo,
227       ExternalAssemblerBuffer(buffer_start_ + offset, kAvailableSpace));
228 
229   if (V8_LIKELY(frame_size < 4 * KB)) {
230     // This is the standard case for small frames: just subtract from SP and be
231     // done with it.
232     patching_assembler.Add_d(sp, sp, Operand(-frame_size));
233     return;
234   }
235 
236   // The frame size is bigger than 4KB, so we might overflow the available stack
237   // space if we first allocate the frame and then do the stack check (we will
238   // need some remaining stack space for throwing the exception). That's why we
239   // check the available stack space before we allocate the frame. To do this we
240   // replace the {__ Add_d(sp, sp, -frame_size)} with a jump to OOL code that
241   // does this "extended stack check".
242   //
243   // The OOL code can simply be generated here with the normal assembler,
244   // because all other code generation, including OOL code, has already finished
245   // when {PatchPrepareStackFrame} is called. The function prologue then jumps
246   // to the current {pc_offset()} to execute the OOL code for allocating the
247   // large frame.
248   // Emit the unconditional branch in the function prologue (from {offset} to
249   // {pc_offset()}).
250 
251   int imm32 = pc_offset() - offset;
252   CHECK(is_int26(imm32));
253   patching_assembler.b(imm32 >> 2);
254 
255   // If the frame is bigger than the stack, we throw the stack overflow
256   // exception unconditionally. Thereby we can avoid the integer overflow
257   // check in the condition code.
258   RecordComment("OOL: stack check for large frame");
259   Label continuation;
260   if (frame_size < FLAG_stack_size * 1024) {
261     Register stack_limit = kScratchReg;
262     Ld_d(stack_limit,
263          FieldMemOperand(kWasmInstanceRegister,
264                          WasmInstanceObject::kRealStackLimitAddressOffset));
265     Ld_d(stack_limit, MemOperand(stack_limit, 0));
266     Add_d(stack_limit, stack_limit, Operand(frame_size));
267     Branch(&continuation, uge, sp, Operand(stack_limit));
268   }
269 
270   Call(wasm::WasmCode::kWasmStackOverflow, RelocInfo::WASM_STUB_CALL);
271   // The call will not return; just define an empty safepoint.
272   safepoint_table_builder->DefineSafepoint(this);
273   if (FLAG_debug_code) stop();
274 
275   bind(&continuation);
276 
277   // Now allocate the stack space. Note that this might do more than just
278   // decrementing the SP;
279   Add_d(sp, sp, Operand(-frame_size));
280 
281   // Jump back to the start of the function, from {pc_offset()} to
282   // right after the reserved space for the {__ Add_d(sp, sp, -framesize)}
283   // (which is a Branch now).
284   int func_start_offset = offset + 3 * kInstrSize;
285   imm32 = func_start_offset - pc_offset();
286   CHECK(is_int26(imm32));
287   b(imm32 >> 2);
288 }
289 
FinishCode()290 void LiftoffAssembler::FinishCode() {}
291 
AbortCompilation()292 void LiftoffAssembler::AbortCompilation() {}
293 
294 // static
StaticStackFrameSize()295 constexpr int LiftoffAssembler::StaticStackFrameSize() {
296   return liftoff::kTierupBudgetOffset;
297 }
298 
SlotSizeForType(ValueKind kind)299 int LiftoffAssembler::SlotSizeForType(ValueKind kind) {
300   switch (kind) {
301     case kS128:
302       return value_kind_size(kind);
303     default:
304       return kStackSlotSize;
305   }
306 }
307 
NeedsAlignment(ValueKind kind)308 bool LiftoffAssembler::NeedsAlignment(ValueKind kind) {
309   return kind == kS128 || is_reference(kind);
310 }
311 
LoadConstant(LiftoffRegister reg,WasmValue value,RelocInfo::Mode rmode)312 void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
313                                     RelocInfo::Mode rmode) {
314   switch (value.type().kind()) {
315     case kI32:
316       TurboAssembler::li(reg.gp(), Operand(value.to_i32(), rmode));
317       break;
318     case kI64:
319       TurboAssembler::li(reg.gp(), Operand(value.to_i64(), rmode));
320       break;
321     case kF32:
322       TurboAssembler::Move(reg.fp(), value.to_f32_boxed().get_bits());
323       break;
324     case kF64:
325       TurboAssembler::Move(reg.fp(), value.to_f64_boxed().get_bits());
326       break;
327     default:
328       UNREACHABLE();
329   }
330 }
331 
LoadInstanceFromFrame(Register dst)332 void LiftoffAssembler::LoadInstanceFromFrame(Register dst) {
333   Ld_d(dst, liftoff::GetInstanceOperand());
334 }
335 
LoadFromInstance(Register dst,Register instance,int offset,int size)336 void LiftoffAssembler::LoadFromInstance(Register dst, Register instance,
337                                         int offset, int size) {
338   DCHECK_LE(0, offset);
339   switch (size) {
340     case 1:
341       Ld_b(dst, MemOperand(instance, offset));
342       break;
343     case 4:
344       Ld_w(dst, MemOperand(instance, offset));
345       break;
346     case 8:
347       Ld_d(dst, MemOperand(instance, offset));
348       break;
349     default:
350       UNIMPLEMENTED();
351   }
352 }
353 
LoadTaggedPointerFromInstance(Register dst,Register instance,int32_t offset)354 void LiftoffAssembler::LoadTaggedPointerFromInstance(Register dst,
355                                                      Register instance,
356                                                      int32_t offset) {
357   STATIC_ASSERT(kTaggedSize == kSystemPointerSize);
358   Ld_d(dst, MemOperand(instance, offset));
359 }
360 
SpillInstance(Register instance)361 void LiftoffAssembler::SpillInstance(Register instance) {
362   St_d(instance, liftoff::GetInstanceOperand());
363 }
364 
ResetOSRTarget()365 void LiftoffAssembler::ResetOSRTarget() {}
366 
LoadTaggedPointer(Register dst,Register src_addr,Register offset_reg,int32_t offset_imm,LiftoffRegList pinned)367 void LiftoffAssembler::LoadTaggedPointer(Register dst, Register src_addr,
368                                          Register offset_reg,
369                                          int32_t offset_imm,
370                                          LiftoffRegList pinned) {
371   STATIC_ASSERT(kTaggedSize == kInt64Size);
372   MemOperand src_op = liftoff::GetMemOp(this, src_addr, offset_reg, offset_imm);
373   Ld_d(dst, src_op);
374 }
375 
LoadFullPointer(Register dst,Register src_addr,int32_t offset_imm)376 void LiftoffAssembler::LoadFullPointer(Register dst, Register src_addr,
377                                        int32_t offset_imm) {
378   MemOperand src_op = liftoff::GetMemOp(this, src_addr, no_reg, offset_imm);
379   Ld_d(dst, src_op);
380 }
381 
StoreTaggedPointer(Register dst_addr,Register offset_reg,int32_t offset_imm,LiftoffRegister src,LiftoffRegList pinned,SkipWriteBarrier skip_write_barrier)382 void LiftoffAssembler::StoreTaggedPointer(Register dst_addr,
383                                           Register offset_reg,
384                                           int32_t offset_imm,
385                                           LiftoffRegister src,
386                                           LiftoffRegList pinned,
387                                           SkipWriteBarrier skip_write_barrier) {
388   UseScratchRegisterScope temps(this);
389   Operand offset_op =
390       offset_reg.is_valid() ? Operand(offset_reg) : Operand(offset_imm);
391   // For the write barrier (below), we cannot have both an offset register and
392   // an immediate offset. Add them to a 32-bit offset initially, but in a 64-bit
393   // register, because that's needed in the MemOperand below.
394   if (offset_reg.is_valid() && offset_imm) {
395     Register effective_offset = temps.Acquire();
396     Add_d(effective_offset, offset_reg, Operand(offset_imm));
397     offset_op = Operand(effective_offset);
398   }
399   if (offset_op.is_reg()) {
400     St_d(src.gp(), MemOperand(dst_addr, offset_op.rm()));
401   } else {
402     St_d(src.gp(), MemOperand(dst_addr, offset_imm));
403   }
404 
405   if (skip_write_barrier || FLAG_disable_write_barriers) return;
406 
407   Label write_barrier;
408   Label exit;
409   CheckPageFlag(dst_addr, MemoryChunk::kPointersFromHereAreInterestingMask, ne,
410                 &write_barrier);
411   b(&exit);
412   bind(&write_barrier);
413   JumpIfSmi(src.gp(), &exit);
414   CheckPageFlag(src.gp(), MemoryChunk::kPointersToHereAreInterestingMask, eq,
415                 &exit);
416   CallRecordWriteStubSaveRegisters(
417       dst_addr, offset_op, RememberedSetAction::kEmit, SaveFPRegsMode::kSave,
418       StubCallMode::kCallWasmRuntimeStub);
419   bind(&exit);
420 }
421 
Load(LiftoffRegister dst,Register src_addr,Register offset_reg,uintptr_t offset_imm,LoadType type,LiftoffRegList pinned,uint32_t * protected_load_pc,bool is_load_mem,bool i64_offset)422 void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
423                             Register offset_reg, uintptr_t offset_imm,
424                             LoadType type, LiftoffRegList pinned,
425                             uint32_t* protected_load_pc, bool is_load_mem,
426                             bool i64_offset) {
427   MemOperand src_op = liftoff::GetMemOp(this, src_addr, offset_reg, offset_imm);
428 
429   if (protected_load_pc) *protected_load_pc = pc_offset();
430   switch (type.value()) {
431     case LoadType::kI32Load8U:
432     case LoadType::kI64Load8U:
433       Ld_bu(dst.gp(), src_op);
434       break;
435     case LoadType::kI32Load8S:
436     case LoadType::kI64Load8S:
437       Ld_b(dst.gp(), src_op);
438       break;
439     case LoadType::kI32Load16U:
440     case LoadType::kI64Load16U:
441       TurboAssembler::Ld_hu(dst.gp(), src_op);
442       break;
443     case LoadType::kI32Load16S:
444     case LoadType::kI64Load16S:
445       TurboAssembler::Ld_h(dst.gp(), src_op);
446       break;
447     case LoadType::kI64Load32U:
448       TurboAssembler::Ld_wu(dst.gp(), src_op);
449       break;
450     case LoadType::kI32Load:
451     case LoadType::kI64Load32S:
452       TurboAssembler::Ld_w(dst.gp(), src_op);
453       break;
454     case LoadType::kI64Load:
455       TurboAssembler::Ld_d(dst.gp(), src_op);
456       break;
457     case LoadType::kF32Load:
458       TurboAssembler::Fld_s(dst.fp(), src_op);
459       break;
460     case LoadType::kF64Load:
461       TurboAssembler::Fld_d(dst.fp(), src_op);
462       break;
463     case LoadType::kS128Load:
464       UNREACHABLE();
465       break;
466     default:
467       UNREACHABLE();
468   }
469 }
470 
Store(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister src,StoreType type,LiftoffRegList pinned,uint32_t * protected_store_pc,bool is_store_mem)471 void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
472                              uintptr_t offset_imm, LiftoffRegister src,
473                              StoreType type, LiftoffRegList pinned,
474                              uint32_t* protected_store_pc, bool is_store_mem) {
475   MemOperand dst_op = liftoff::GetMemOp(this, dst_addr, offset_reg, offset_imm);
476 
477   if (protected_store_pc) *protected_store_pc = pc_offset();
478   switch (type.value()) {
479     case StoreType::kI32Store8:
480     case StoreType::kI64Store8:
481       St_b(src.gp(), dst_op);
482       break;
483     case StoreType::kI32Store16:
484     case StoreType::kI64Store16:
485       TurboAssembler::St_h(src.gp(), dst_op);
486       break;
487     case StoreType::kI32Store:
488     case StoreType::kI64Store32:
489       TurboAssembler::St_w(src.gp(), dst_op);
490       break;
491     case StoreType::kI64Store:
492       TurboAssembler::St_d(src.gp(), dst_op);
493       break;
494     case StoreType::kF32Store:
495       TurboAssembler::Fst_s(src.fp(), dst_op);
496       break;
497     case StoreType::kF64Store:
498       TurboAssembler::Fst_d(src.fp(), dst_op);
499       break;
500     case StoreType::kS128Store:
501       UNREACHABLE();
502       break;
503     default:
504       UNREACHABLE();
505   }
506 }
507 
AtomicLoad(LiftoffRegister dst,Register src_addr,Register offset_reg,uintptr_t offset_imm,LoadType type,LiftoffRegList pinned)508 void LiftoffAssembler::AtomicLoad(LiftoffRegister dst, Register src_addr,
509                                   Register offset_reg, uintptr_t offset_imm,
510                                   LoadType type, LiftoffRegList pinned) {
511   UseScratchRegisterScope temps(this);
512   MemOperand src_op = liftoff::GetMemOp(this, src_addr, offset_reg, offset_imm);
513   switch (type.value()) {
514     case LoadType::kI32Load8U:
515     case LoadType::kI64Load8U: {
516       Ld_bu(dst.gp(), src_op);
517       dbar(0);
518       return;
519     }
520     case LoadType::kI32Load16U:
521     case LoadType::kI64Load16U: {
522       Ld_hu(dst.gp(), src_op);
523       dbar(0);
524       return;
525     }
526     case LoadType::kI32Load: {
527       Ld_w(dst.gp(), src_op);
528       dbar(0);
529       return;
530     }
531     case LoadType::kI64Load32U: {
532       Ld_wu(dst.gp(), src_op);
533       dbar(0);
534       return;
535     }
536     case LoadType::kI64Load: {
537       Ld_d(dst.gp(), src_op);
538       dbar(0);
539       return;
540     }
541     default:
542       UNREACHABLE();
543   }
544 }
545 
AtomicStore(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister src,StoreType type,LiftoffRegList pinned)546 void LiftoffAssembler::AtomicStore(Register dst_addr, Register offset_reg,
547                                    uintptr_t offset_imm, LiftoffRegister src,
548                                    StoreType type, LiftoffRegList pinned) {
549   UseScratchRegisterScope temps(this);
550   MemOperand dst_op = liftoff::GetMemOp(this, dst_addr, offset_reg, offset_imm);
551   switch (type.value()) {
552     case StoreType::kI64Store8:
553     case StoreType::kI32Store8: {
554       dbar(0);
555       St_b(src.gp(), dst_op);
556       return;
557     }
558     case StoreType::kI64Store16:
559     case StoreType::kI32Store16: {
560       dbar(0);
561       St_h(src.gp(), dst_op);
562       return;
563     }
564     case StoreType::kI64Store32:
565     case StoreType::kI32Store: {
566       dbar(0);
567       St_w(src.gp(), dst_op);
568       return;
569     }
570     case StoreType::kI64Store: {
571       dbar(0);
572       St_d(src.gp(), dst_op);
573       return;
574     }
575     default:
576       UNREACHABLE();
577   }
578 }
579 
580 #define ASSEMBLE_ATOMIC_BINOP_EXT(load_linked, store_conditional, size, \
581                                   bin_instr, aligned)                   \
582   do {                                                                  \
583     Label binop;                                                        \
584     andi(temp3, temp0, aligned);                                        \
585     Sub_d(temp0, temp0, Operand(temp3));                                \
586     slli_w(temp3, temp3, 3);                                            \
587     dbar(0);                                                            \
588     bind(&binop);                                                       \
589     load_linked(temp1, MemOperand(temp0, 0));                           \
590     ExtractBits(result.gp(), temp1, temp3, size, false);                \
591     bin_instr(temp2, result.gp(), Operand(value.gp()));                 \
592     InsertBits(temp1, temp2, temp3, size);                              \
593     store_conditional(temp1, MemOperand(temp0, 0));                     \
594     BranchShort(&binop, eq, temp1, Operand(zero_reg));                  \
595     dbar(0);                                                            \
596   } while (0)
597 
598 #define ATOMIC_BINOP_CASE(name, inst32, inst64, opcode)                  \
599   void LiftoffAssembler::Atomic##name(                                   \
600       Register dst_addr, Register offset_reg, uintptr_t offset_imm,      \
601       LiftoffRegister value, LiftoffRegister result, StoreType type) {   \
602     LiftoffRegList pinned = {dst_addr, offset_reg, value, result};       \
603     Register temp0 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp(); \
604     Register temp1 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp(); \
605     Register temp2 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp(); \
606     Register temp3 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp(); \
607     MemOperand dst_op =                                                  \
608         liftoff::GetMemOp(this, dst_addr, offset_reg, offset_imm);       \
609     Add_d(temp0, dst_op.base(), dst_op.offset());                        \
610     switch (type.value()) {                                              \
611       case StoreType::kI64Store8:                                        \
612         ASSEMBLE_ATOMIC_BINOP_EXT(Ll_d, Sc_d, 8, inst64, 7);             \
613         break;                                                           \
614       case StoreType::kI32Store8:                                        \
615         ASSEMBLE_ATOMIC_BINOP_EXT(Ll_w, Sc_w, 8, inst32, 3);             \
616         break;                                                           \
617       case StoreType::kI64Store16:                                       \
618         ASSEMBLE_ATOMIC_BINOP_EXT(Ll_d, Sc_d, 16, inst64, 7);            \
619         break;                                                           \
620       case StoreType::kI32Store16:                                       \
621         ASSEMBLE_ATOMIC_BINOP_EXT(Ll_w, Sc_w, 16, inst32, 3);            \
622         break;                                                           \
623       case StoreType::kI64Store32:                                       \
624         ASSEMBLE_ATOMIC_BINOP_EXT(Ll_d, Sc_d, 32, inst64, 7);            \
625         break;                                                           \
626       case StoreType::kI32Store:                                         \
627         am##opcode##_db_w(result.gp(), value.gp(), temp0);               \
628         break;                                                           \
629       case StoreType::kI64Store:                                         \
630         am##opcode##_db_d(result.gp(), value.gp(), temp0);               \
631         break;                                                           \
632       default:                                                           \
633         UNREACHABLE();                                                   \
634     }                                                                    \
635   }
636 
ATOMIC_BINOP_CASE(Add,Add_w,Add_d,add)637 ATOMIC_BINOP_CASE(Add, Add_w, Add_d, add)
638 ATOMIC_BINOP_CASE(And, And, And, and)
639 ATOMIC_BINOP_CASE(Or, Or, Or, or)
640 ATOMIC_BINOP_CASE(Xor, Xor, Xor, xor)
641 
642 #define ASSEMBLE_ATOMIC_BINOP(load_linked, store_conditional, bin_instr) \
643   do {                                                                   \
644     Label binop;                                                         \
645     dbar(0);                                                             \
646     bind(&binop);                                                        \
647     load_linked(result.gp(), MemOperand(temp0, 0));                      \
648     bin_instr(temp1, result.gp(), Operand(value.gp()));                  \
649     store_conditional(temp1, MemOperand(temp0, 0));                      \
650     BranchShort(&binop, eq, temp1, Operand(zero_reg));                   \
651     dbar(0);                                                             \
652   } while (0)
653 
654 void LiftoffAssembler::AtomicSub(Register dst_addr, Register offset_reg,
655                                  uintptr_t offset_imm, LiftoffRegister value,
656                                  LiftoffRegister result, StoreType type) {
657   LiftoffRegList pinned = {dst_addr, offset_reg, value, result};
658   Register temp0 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp();
659   Register temp1 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp();
660   Register temp2 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp();
661   Register temp3 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp();
662   MemOperand dst_op = liftoff::GetMemOp(this, dst_addr, offset_reg, offset_imm);
663   Add_d(temp0, dst_op.base(), dst_op.offset());
664   switch (type.value()) {
665     case StoreType::kI64Store8:
666       ASSEMBLE_ATOMIC_BINOP_EXT(Ll_d, Sc_d, 8, Sub_d, 7);
667       break;
668     case StoreType::kI32Store8:
669       ASSEMBLE_ATOMIC_BINOP_EXT(Ll_w, Sc_w, 8, Sub_w, 3);
670       break;
671     case StoreType::kI64Store16:
672       ASSEMBLE_ATOMIC_BINOP_EXT(Ll_d, Sc_d, 16, Sub_d, 7);
673       break;
674     case StoreType::kI32Store16:
675       ASSEMBLE_ATOMIC_BINOP_EXT(Ll_w, Sc_w, 16, Sub_w, 3);
676       break;
677     case StoreType::kI64Store32:
678       ASSEMBLE_ATOMIC_BINOP_EXT(Ll_d, Sc_d, 32, Sub_d, 7);
679       break;
680     case StoreType::kI32Store:
681       ASSEMBLE_ATOMIC_BINOP(Ll_w, Sc_w, Sub_w);
682       break;
683     case StoreType::kI64Store:
684       ASSEMBLE_ATOMIC_BINOP(Ll_d, Sc_d, Sub_d);
685       break;
686     default:
687       UNREACHABLE();
688   }
689 }
690 #undef ASSEMBLE_ATOMIC_BINOP
691 #undef ASSEMBLE_ATOMIC_BINOP_EXT
692 #undef ATOMIC_BINOP_CASE
693 
694 #define ASSEMBLE_ATOMIC_EXCHANGE_INTEGER_EXT(load_linked, store_conditional, \
695                                              size, aligned)                  \
696   do {                                                                       \
697     Label exchange;                                                          \
698     andi(temp1, temp0, aligned);                                             \
699     Sub_d(temp0, temp0, Operand(temp1));                                     \
700     slli_w(temp1, temp1, 3);                                                 \
701     dbar(0);                                                                 \
702     bind(&exchange);                                                         \
703     load_linked(temp2, MemOperand(temp0, 0));                                \
704     ExtractBits(result.gp(), temp2, temp1, size, false);                     \
705     InsertBits(temp2, value.gp(), temp1, size);                              \
706     store_conditional(temp2, MemOperand(temp0, 0));                          \
707     BranchShort(&exchange, eq, temp2, Operand(zero_reg));                    \
708     dbar(0);                                                                 \
709   } while (0)
710 
AtomicExchange(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)711 void LiftoffAssembler::AtomicExchange(Register dst_addr, Register offset_reg,
712                                       uintptr_t offset_imm,
713                                       LiftoffRegister value,
714                                       LiftoffRegister result, StoreType type) {
715   LiftoffRegList pinned = {dst_addr, offset_reg, value, result};
716   Register temp0 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp();
717   Register temp1 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp();
718   Register temp2 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp();
719   MemOperand dst_op = liftoff::GetMemOp(this, dst_addr, offset_reg, offset_imm);
720   Add_d(temp0, dst_op.base(), dst_op.offset());
721   switch (type.value()) {
722     case StoreType::kI64Store8:
723       ASSEMBLE_ATOMIC_EXCHANGE_INTEGER_EXT(Ll_d, Sc_d, 8, 7);
724       break;
725     case StoreType::kI32Store8:
726       ASSEMBLE_ATOMIC_EXCHANGE_INTEGER_EXT(Ll_w, Sc_w, 8, 3);
727       break;
728     case StoreType::kI64Store16:
729       ASSEMBLE_ATOMIC_EXCHANGE_INTEGER_EXT(Ll_d, Sc_d, 16, 7);
730       break;
731     case StoreType::kI32Store16:
732       ASSEMBLE_ATOMIC_EXCHANGE_INTEGER_EXT(Ll_w, Sc_w, 16, 3);
733       break;
734     case StoreType::kI64Store32:
735       ASSEMBLE_ATOMIC_EXCHANGE_INTEGER_EXT(Ll_d, Sc_d, 32, 7);
736       break;
737     case StoreType::kI32Store:
738       amswap_db_w(result.gp(), value.gp(), temp0);
739       break;
740     case StoreType::kI64Store:
741       amswap_db_d(result.gp(), value.gp(), temp0);
742       break;
743     default:
744       UNREACHABLE();
745   }
746 }
747 #undef ASSEMBLE_ATOMIC_EXCHANGE_INTEGER_EXT
748 
749 #define ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER(load_linked,       \
750                                                  store_conditional) \
751   do {                                                              \
752     Label compareExchange;                                          \
753     Label exit;                                                     \
754     dbar(0);                                                        \
755     bind(&compareExchange);                                         \
756     load_linked(result.gp(), MemOperand(temp0, 0));                 \
757     BranchShort(&exit, ne, expected.gp(), Operand(result.gp()));    \
758     mov(temp2, new_value.gp());                                     \
759     store_conditional(temp2, MemOperand(temp0, 0));                 \
760     BranchShort(&compareExchange, eq, temp2, Operand(zero_reg));    \
761     bind(&exit);                                                    \
762     dbar(0);                                                        \
763   } while (0)
764 
765 #define ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER_EXT(            \
766     load_linked, store_conditional, size, aligned)               \
767   do {                                                           \
768     Label compareExchange;                                       \
769     Label exit;                                                  \
770     andi(temp1, temp0, aligned);                                 \
771     Sub_d(temp0, temp0, Operand(temp1));                         \
772     slli_w(temp1, temp1, 3);                                     \
773     dbar(0);                                                     \
774     bind(&compareExchange);                                      \
775     load_linked(temp2, MemOperand(temp0, 0));                    \
776     ExtractBits(result.gp(), temp2, temp1, size, false);         \
777     ExtractBits(temp2, expected.gp(), zero_reg, size, false);    \
778     BranchShort(&exit, ne, temp2, Operand(result.gp()));         \
779     InsertBits(temp2, new_value.gp(), temp1, size);              \
780     store_conditional(temp2, MemOperand(temp0, 0));              \
781     BranchShort(&compareExchange, eq, temp2, Operand(zero_reg)); \
782     bind(&exit);                                                 \
783     dbar(0);                                                     \
784   } while (0)
785 
AtomicCompareExchange(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister expected,LiftoffRegister new_value,LiftoffRegister result,StoreType type)786 void LiftoffAssembler::AtomicCompareExchange(
787     Register dst_addr, Register offset_reg, uintptr_t offset_imm,
788     LiftoffRegister expected, LiftoffRegister new_value, LiftoffRegister result,
789     StoreType type) {
790   LiftoffRegList pinned = {dst_addr, offset_reg, expected, new_value, result};
791   Register temp0 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp();
792   Register temp1 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp();
793   Register temp2 = pinned.set(GetUnusedRegister(kGpReg, pinned)).gp();
794   MemOperand dst_op = liftoff::GetMemOp(this, dst_addr, offset_reg, offset_imm);
795   Add_d(temp0, dst_op.base(), dst_op.offset());
796   switch (type.value()) {
797     case StoreType::kI64Store8:
798       ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER_EXT(Ll_d, Sc_d, 8, 7);
799       break;
800     case StoreType::kI32Store8:
801       ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER_EXT(Ll_w, Sc_w, 8, 3);
802       break;
803     case StoreType::kI64Store16:
804       ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER_EXT(Ll_d, Sc_d, 16, 7);
805       break;
806     case StoreType::kI32Store16:
807       ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER_EXT(Ll_w, Sc_w, 16, 3);
808       break;
809     case StoreType::kI64Store32:
810       ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER_EXT(Ll_d, Sc_d, 32, 7);
811       break;
812     case StoreType::kI32Store:
813       ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER(Ll_w, Sc_w);
814       break;
815     case StoreType::kI64Store:
816       ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER(Ll_d, Sc_d);
817       break;
818     default:
819       UNREACHABLE();
820   }
821 }
822 #undef ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER
823 #undef ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_INTEGER_EXT
824 
AtomicFence()825 void LiftoffAssembler::AtomicFence() { dbar(0); }
826 
LoadCallerFrameSlot(LiftoffRegister dst,uint32_t caller_slot_idx,ValueKind kind)827 void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
828                                            uint32_t caller_slot_idx,
829                                            ValueKind kind) {
830   MemOperand src(fp, kSystemPointerSize * (caller_slot_idx + 1));
831   liftoff::Load(this, dst, src, kind);
832 }
833 
StoreCallerFrameSlot(LiftoffRegister src,uint32_t caller_slot_idx,ValueKind kind)834 void LiftoffAssembler::StoreCallerFrameSlot(LiftoffRegister src,
835                                             uint32_t caller_slot_idx,
836                                             ValueKind kind) {
837   int32_t offset = kSystemPointerSize * (caller_slot_idx + 1);
838   liftoff::Store(this, fp, offset, src, kind);
839 }
840 
LoadReturnStackSlot(LiftoffRegister dst,int offset,ValueKind kind)841 void LiftoffAssembler::LoadReturnStackSlot(LiftoffRegister dst, int offset,
842                                            ValueKind kind) {
843   liftoff::Load(this, dst, MemOperand(sp, offset), kind);
844 }
845 
MoveStackValue(uint32_t dst_offset,uint32_t src_offset,ValueKind kind)846 void LiftoffAssembler::MoveStackValue(uint32_t dst_offset, uint32_t src_offset,
847                                       ValueKind kind) {
848   DCHECK_NE(dst_offset, src_offset);
849   LiftoffRegister reg = GetUnusedRegister(reg_class_for(kind), {});
850   Fill(reg, src_offset, kind);
851   Spill(dst_offset, reg, kind);
852 }
853 
Move(Register dst,Register src,ValueKind kind)854 void LiftoffAssembler::Move(Register dst, Register src, ValueKind kind) {
855   DCHECK_NE(dst, src);
856   // TODO(ksreten): Handle different sizes here.
857   TurboAssembler::Move(dst, src);
858 }
859 
Move(DoubleRegister dst,DoubleRegister src,ValueKind kind)860 void LiftoffAssembler::Move(DoubleRegister dst, DoubleRegister src,
861                             ValueKind kind) {
862   DCHECK_NE(dst, src);
863   if (kind != kS128) {
864     TurboAssembler::Move(dst, src);
865   } else {
866     UNREACHABLE();
867   }
868 }
869 
Spill(int offset,LiftoffRegister reg,ValueKind kind)870 void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueKind kind) {
871   RecordUsedSpillOffset(offset);
872   MemOperand dst = liftoff::GetStackSlot(offset);
873   switch (kind) {
874     case kI32:
875       St_w(reg.gp(), dst);
876       break;
877     case kI64:
878     case kRef:
879     case kOptRef:
880     case kRtt:
881       St_d(reg.gp(), dst);
882       break;
883     case kF32:
884       Fst_s(reg.fp(), dst);
885       break;
886     case kF64:
887       TurboAssembler::Fst_d(reg.fp(), dst);
888       break;
889     case kS128:
890       UNREACHABLE();
891       break;
892     default:
893       UNREACHABLE();
894   }
895 }
896 
Spill(int offset,WasmValue value)897 void LiftoffAssembler::Spill(int offset, WasmValue value) {
898   RecordUsedSpillOffset(offset);
899   MemOperand dst = liftoff::GetStackSlot(offset);
900   switch (value.type().kind()) {
901     case kI32: {
902       LiftoffRegister tmp = GetUnusedRegister(kGpReg, {});
903       TurboAssembler::li(tmp.gp(), Operand(value.to_i32()));
904       St_w(tmp.gp(), dst);
905       break;
906     }
907     case kI64:
908     case kRef:
909     case kOptRef: {
910       LiftoffRegister tmp = GetUnusedRegister(kGpReg, {});
911       TurboAssembler::li(tmp.gp(), value.to_i64());
912       St_d(tmp.gp(), dst);
913       break;
914     }
915     default:
916       // kWasmF32 and kWasmF64 are unreachable, since those
917       // constants are not tracked.
918       UNREACHABLE();
919   }
920 }
921 
Fill(LiftoffRegister reg,int offset,ValueKind kind)922 void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueKind kind) {
923   MemOperand src = liftoff::GetStackSlot(offset);
924   switch (kind) {
925     case kI32:
926       Ld_w(reg.gp(), src);
927       break;
928     case kI64:
929     case kRef:
930     case kOptRef:
931     // TODO(LOONG_dev): LOONG64 Check, MIPS64 dosn't need, ARM64/LOONG64 need?
932     case kRtt:
933       Ld_d(reg.gp(), src);
934       break;
935     case kF32:
936       Fld_s(reg.fp(), src);
937       break;
938     case kF64:
939       TurboAssembler::Fld_d(reg.fp(), src);
940       break;
941     case kS128:
942       UNREACHABLE();
943       break;
944     default:
945       UNREACHABLE();
946   }
947 }
948 
FillI64Half(Register,int offset,RegPairHalf)949 void LiftoffAssembler::FillI64Half(Register, int offset, RegPairHalf) {
950   UNREACHABLE();
951 }
952 
FillStackSlotsWithZero(int start,int size)953 void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) {
954   DCHECK_LT(0, size);
955   RecordUsedSpillOffset(start + size);
956 
957   if (size <= 12 * kStackSlotSize) {
958     // Special straight-line code for up to 12 slots. Generates one
959     // instruction per slot (<= 12 instructions total).
960     uint32_t remainder = size;
961     for (; remainder >= kStackSlotSize; remainder -= kStackSlotSize) {
962       St_d(zero_reg, liftoff::GetStackSlot(start + remainder));
963     }
964     DCHECK(remainder == 4 || remainder == 0);
965     if (remainder) {
966       St_w(zero_reg, liftoff::GetStackSlot(start + remainder));
967     }
968   } else {
969     // General case for bigger counts (12 instructions).
970     // Use a0 for start address (inclusive), a1 for end address (exclusive).
971     Push(a1, a0);
972     Add_d(a0, fp, Operand(-start - size));
973     Add_d(a1, fp, Operand(-start));
974 
975     Label loop;
976     bind(&loop);
977     St_d(zero_reg, MemOperand(a0, 0));
978     addi_d(a0, a0, kSystemPointerSize);
979     BranchShort(&loop, ne, a0, Operand(a1));
980 
981     Pop(a1, a0);
982   }
983 }
984 
emit_i64_clz(LiftoffRegister dst,LiftoffRegister src)985 void LiftoffAssembler::emit_i64_clz(LiftoffRegister dst, LiftoffRegister src) {
986   TurboAssembler::Clz_d(dst.gp(), src.gp());
987 }
988 
emit_i64_ctz(LiftoffRegister dst,LiftoffRegister src)989 void LiftoffAssembler::emit_i64_ctz(LiftoffRegister dst, LiftoffRegister src) {
990   TurboAssembler::Ctz_d(dst.gp(), src.gp());
991 }
992 
emit_i64_popcnt(LiftoffRegister dst,LiftoffRegister src)993 bool LiftoffAssembler::emit_i64_popcnt(LiftoffRegister dst,
994                                        LiftoffRegister src) {
995   TurboAssembler::Popcnt_d(dst.gp(), src.gp());
996   return true;
997 }
998 
IncrementSmi(LiftoffRegister dst,int offset)999 void LiftoffAssembler::IncrementSmi(LiftoffRegister dst, int offset) {
1000   UseScratchRegisterScope temps(this);
1001   Register scratch = temps.Acquire();
1002   SmiUntag(scratch, MemOperand(dst.gp(), offset));
1003   Add_d(scratch, scratch, Operand(1));
1004   SmiTag(scratch);
1005   St_d(scratch, MemOperand(dst.gp(), offset));
1006 }
1007 
emit_i32_mul(Register dst,Register lhs,Register rhs)1008 void LiftoffAssembler::emit_i32_mul(Register dst, Register lhs, Register rhs) {
1009   TurboAssembler::Mul_w(dst, lhs, rhs);
1010 }
1011 
emit_i32_divs(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero,Label * trap_div_unrepresentable)1012 void LiftoffAssembler::emit_i32_divs(Register dst, Register lhs, Register rhs,
1013                                      Label* trap_div_by_zero,
1014                                      Label* trap_div_unrepresentable) {
1015   TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
1016 
1017   // Check if lhs == kMinInt and rhs == -1, since this case is unrepresentable.
1018   TurboAssembler::li(kScratchReg, 1);
1019   TurboAssembler::li(kScratchReg2, 1);
1020   TurboAssembler::LoadZeroOnCondition(kScratchReg, lhs, Operand(kMinInt), eq);
1021   TurboAssembler::LoadZeroOnCondition(kScratchReg2, rhs, Operand(-1), eq);
1022   add_d(kScratchReg, kScratchReg, kScratchReg2);
1023   TurboAssembler::Branch(trap_div_unrepresentable, eq, kScratchReg,
1024                          Operand(zero_reg));
1025 
1026   TurboAssembler::Div_w(dst, lhs, rhs);
1027 }
1028 
emit_i32_divu(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero)1029 void LiftoffAssembler::emit_i32_divu(Register dst, Register lhs, Register rhs,
1030                                      Label* trap_div_by_zero) {
1031   TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
1032   TurboAssembler::Div_wu(dst, lhs, rhs);
1033 }
1034 
emit_i32_rems(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero)1035 void LiftoffAssembler::emit_i32_rems(Register dst, Register lhs, Register rhs,
1036                                      Label* trap_div_by_zero) {
1037   TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
1038   TurboAssembler::Mod_w(dst, lhs, rhs);
1039 }
1040 
emit_i32_remu(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero)1041 void LiftoffAssembler::emit_i32_remu(Register dst, Register lhs, Register rhs,
1042                                      Label* trap_div_by_zero) {
1043   TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
1044   TurboAssembler::Mod_wu(dst, lhs, rhs);
1045 }
1046 
1047 #define I32_BINOP(name, instruction)                                 \
1048   void LiftoffAssembler::emit_i32_##name(Register dst, Register lhs, \
1049                                          Register rhs) {             \
1050     instruction(dst, lhs, rhs);                                      \
1051   }
1052 
1053 // clang-format off
I32_BINOP(add,add_w)1054 I32_BINOP(add, add_w)
1055 I32_BINOP(sub, sub_w)
1056 I32_BINOP(and, and_)
1057 I32_BINOP(or, or_)
1058 I32_BINOP(xor, xor_)
1059 // clang-format on
1060 
1061 #undef I32_BINOP
1062 
1063 #define I32_BINOP_I(name, instruction)                                  \
1064   void LiftoffAssembler::emit_i32_##name##i(Register dst, Register lhs, \
1065                                             int32_t imm) {              \
1066     instruction(dst, lhs, Operand(imm));                                \
1067   }
1068 
1069 // clang-format off
1070 I32_BINOP_I(add, Add_w)
1071 I32_BINOP_I(sub, Sub_w)
1072 I32_BINOP_I(and, And)
1073 I32_BINOP_I(or, Or)
1074 I32_BINOP_I(xor, Xor)
1075 // clang-format on
1076 
1077 #undef I32_BINOP_I
1078 
1079 void LiftoffAssembler::emit_i32_clz(Register dst, Register src) {
1080   TurboAssembler::Clz_w(dst, src);
1081 }
1082 
emit_i32_ctz(Register dst,Register src)1083 void LiftoffAssembler::emit_i32_ctz(Register dst, Register src) {
1084   TurboAssembler::Ctz_w(dst, src);
1085 }
1086 
emit_i32_popcnt(Register dst,Register src)1087 bool LiftoffAssembler::emit_i32_popcnt(Register dst, Register src) {
1088   TurboAssembler::Popcnt_w(dst, src);
1089   return true;
1090 }
1091 
1092 #define I32_SHIFTOP(name, instruction)                               \
1093   void LiftoffAssembler::emit_i32_##name(Register dst, Register src, \
1094                                          Register amount) {          \
1095     instruction(dst, src, amount);                                   \
1096   }
1097 #define I32_SHIFTOP_I(name, instruction, instruction1)                  \
1098   I32_SHIFTOP(name, instruction)                                        \
1099   void LiftoffAssembler::emit_i32_##name##i(Register dst, Register src, \
1100                                             int amount) {               \
1101     instruction1(dst, src, amount & 0x1f);                              \
1102   }
1103 
I32_SHIFTOP_I(shl,sll_w,slli_w)1104 I32_SHIFTOP_I(shl, sll_w, slli_w)
1105 I32_SHIFTOP_I(sar, sra_w, srai_w)
1106 I32_SHIFTOP_I(shr, srl_w, srli_w)
1107 
1108 #undef I32_SHIFTOP
1109 #undef I32_SHIFTOP_I
1110 
1111 void LiftoffAssembler::emit_i64_addi(LiftoffRegister dst, LiftoffRegister lhs,
1112                                      int64_t imm) {
1113   TurboAssembler::Add_d(dst.gp(), lhs.gp(), Operand(imm));
1114 }
1115 
emit_i64_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1116 void LiftoffAssembler::emit_i64_mul(LiftoffRegister dst, LiftoffRegister lhs,
1117                                     LiftoffRegister rhs) {
1118   TurboAssembler::Mul_d(dst.gp(), lhs.gp(), rhs.gp());
1119 }
1120 
emit_i64_divs(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero,Label * trap_div_unrepresentable)1121 bool LiftoffAssembler::emit_i64_divs(LiftoffRegister dst, LiftoffRegister lhs,
1122                                      LiftoffRegister rhs,
1123                                      Label* trap_div_by_zero,
1124                                      Label* trap_div_unrepresentable) {
1125   TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg));
1126 
1127   // Check if lhs == MinInt64 and rhs == -1, since this case is unrepresentable.
1128   TurboAssembler::li(kScratchReg, 1);
1129   TurboAssembler::li(kScratchReg2, 1);
1130   TurboAssembler::LoadZeroOnCondition(
1131       kScratchReg, lhs.gp(), Operand(std::numeric_limits<int64_t>::min()), eq);
1132   TurboAssembler::LoadZeroOnCondition(kScratchReg2, rhs.gp(), Operand(-1), eq);
1133   add_d(kScratchReg, kScratchReg, kScratchReg2);
1134   TurboAssembler::Branch(trap_div_unrepresentable, eq, kScratchReg,
1135                          Operand(zero_reg));
1136 
1137   TurboAssembler::Div_d(dst.gp(), lhs.gp(), rhs.gp());
1138   return true;
1139 }
1140 
emit_i64_divu(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero)1141 bool LiftoffAssembler::emit_i64_divu(LiftoffRegister dst, LiftoffRegister lhs,
1142                                      LiftoffRegister rhs,
1143                                      Label* trap_div_by_zero) {
1144   TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg));
1145   TurboAssembler::Div_du(dst.gp(), lhs.gp(), rhs.gp());
1146   return true;
1147 }
1148 
emit_i64_rems(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero)1149 bool LiftoffAssembler::emit_i64_rems(LiftoffRegister dst, LiftoffRegister lhs,
1150                                      LiftoffRegister rhs,
1151                                      Label* trap_div_by_zero) {
1152   TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg));
1153   TurboAssembler::Mod_d(dst.gp(), lhs.gp(), rhs.gp());
1154   return true;
1155 }
1156 
emit_i64_remu(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero)1157 bool LiftoffAssembler::emit_i64_remu(LiftoffRegister dst, LiftoffRegister lhs,
1158                                      LiftoffRegister rhs,
1159                                      Label* trap_div_by_zero) {
1160   TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg));
1161   TurboAssembler::Mod_du(dst.gp(), lhs.gp(), rhs.gp());
1162   return true;
1163 }
1164 
1165 #define I64_BINOP(name, instruction)                                   \
1166   void LiftoffAssembler::emit_i64_##name(                              \
1167       LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \
1168     instruction(dst.gp(), lhs.gp(), rhs.gp());                         \
1169   }
1170 
1171 // clang-format off
I64_BINOP(add,Add_d)1172 I64_BINOP(add, Add_d)
1173 I64_BINOP(sub, Sub_d)
1174 I64_BINOP(and, and_)
1175 I64_BINOP(or, or_)
1176 I64_BINOP(xor, xor_)
1177 // clang-format on
1178 
1179 #undef I64_BINOP
1180 
1181 #define I64_BINOP_I(name, instruction)                         \
1182   void LiftoffAssembler::emit_i64_##name##i(                   \
1183       LiftoffRegister dst, LiftoffRegister lhs, int32_t imm) { \
1184     instruction(dst.gp(), lhs.gp(), Operand(imm));             \
1185   }
1186 
1187 // clang-format off
1188 I64_BINOP_I(and, And)
1189 I64_BINOP_I(or, Or)
1190 I64_BINOP_I(xor, Xor)
1191 // clang-format on
1192 
1193 #undef I64_BINOP_I
1194 
1195 #define I64_SHIFTOP(name, instruction)                             \
1196   void LiftoffAssembler::emit_i64_##name(                          \
1197       LiftoffRegister dst, LiftoffRegister src, Register amount) { \
1198     instruction(dst.gp(), src.gp(), amount);                       \
1199   }
1200 #define I64_SHIFTOP_I(name, instruction, instructioni)                         \
1201   I64_SHIFTOP(name, instruction)                                               \
1202   void LiftoffAssembler::emit_i64_##name##i(LiftoffRegister dst,               \
1203                                             LiftoffRegister src, int amount) { \
1204     instructioni(dst.gp(), src.gp(), amount & 63);                             \
1205   }
1206 
1207 I64_SHIFTOP_I(shl, sll_d, slli_d)
1208 I64_SHIFTOP_I(sar, sra_d, srai_d)
1209 I64_SHIFTOP_I(shr, srl_d, srli_d)
1210 
1211 #undef I64_SHIFTOP
1212 #undef I64_SHIFTOP_I
1213 
1214 void LiftoffAssembler::emit_u32_to_uintptr(Register dst, Register src) {
1215   bstrpick_d(dst, src, 31, 0);
1216 }
1217 
emit_f32_neg(DoubleRegister dst,DoubleRegister src)1218 void LiftoffAssembler::emit_f32_neg(DoubleRegister dst, DoubleRegister src) {
1219   TurboAssembler::Neg_s(dst, src);
1220 }
1221 
emit_f64_neg(DoubleRegister dst,DoubleRegister src)1222 void LiftoffAssembler::emit_f64_neg(DoubleRegister dst, DoubleRegister src) {
1223   TurboAssembler::Neg_d(dst, src);
1224 }
1225 
emit_f32_min(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1226 void LiftoffAssembler::emit_f32_min(DoubleRegister dst, DoubleRegister lhs,
1227                                     DoubleRegister rhs) {
1228   Label ool, done;
1229   TurboAssembler::Float32Min(dst, lhs, rhs, &ool);
1230   Branch(&done);
1231 
1232   bind(&ool);
1233   TurboAssembler::Float32MinOutOfLine(dst, lhs, rhs);
1234   bind(&done);
1235 }
1236 
emit_f32_max(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1237 void LiftoffAssembler::emit_f32_max(DoubleRegister dst, DoubleRegister lhs,
1238                                     DoubleRegister rhs) {
1239   Label ool, done;
1240   TurboAssembler::Float32Max(dst, lhs, rhs, &ool);
1241   Branch(&done);
1242 
1243   bind(&ool);
1244   TurboAssembler::Float32MaxOutOfLine(dst, lhs, rhs);
1245   bind(&done);
1246 }
1247 
emit_f32_copysign(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1248 void LiftoffAssembler::emit_f32_copysign(DoubleRegister dst, DoubleRegister lhs,
1249                                          DoubleRegister rhs) {
1250   fcopysign_s(dst, lhs, rhs);
1251 }
1252 
emit_f64_min(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1253 void LiftoffAssembler::emit_f64_min(DoubleRegister dst, DoubleRegister lhs,
1254                                     DoubleRegister rhs) {
1255   Label ool, done;
1256   TurboAssembler::Float64Min(dst, lhs, rhs, &ool);
1257   Branch(&done);
1258 
1259   bind(&ool);
1260   TurboAssembler::Float64MinOutOfLine(dst, lhs, rhs);
1261   bind(&done);
1262 }
1263 
emit_f64_max(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1264 void LiftoffAssembler::emit_f64_max(DoubleRegister dst, DoubleRegister lhs,
1265                                     DoubleRegister rhs) {
1266   Label ool, done;
1267   TurboAssembler::Float64Max(dst, lhs, rhs, &ool);
1268   Branch(&done);
1269 
1270   bind(&ool);
1271   TurboAssembler::Float64MaxOutOfLine(dst, lhs, rhs);
1272   bind(&done);
1273 }
1274 
emit_f64_copysign(DoubleRegister dst,DoubleRegister lhs,DoubleRegister rhs)1275 void LiftoffAssembler::emit_f64_copysign(DoubleRegister dst, DoubleRegister lhs,
1276                                          DoubleRegister rhs) {
1277   fcopysign_d(dst, lhs, rhs);
1278 }
1279 
1280 #define FP_BINOP(name, instruction)                                          \
1281   void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister lhs, \
1282                                      DoubleRegister rhs) {                   \
1283     instruction(dst, lhs, rhs);                                              \
1284   }
1285 #define FP_UNOP(name, instruction)                                             \
1286   void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister src) { \
1287     instruction(dst, src);                                                     \
1288   }
1289 #define FP_UNOP_RETURN_TRUE(name, instruction)                                 \
1290   bool LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister src) { \
1291     instruction(dst, src);                                                     \
1292     return true;                                                               \
1293   }
1294 
FP_BINOP(f32_add,fadd_s)1295 FP_BINOP(f32_add, fadd_s)
1296 FP_BINOP(f32_sub, fsub_s)
1297 FP_BINOP(f32_mul, fmul_s)
1298 FP_BINOP(f32_div, fdiv_s)
1299 FP_UNOP(f32_abs, fabs_s)
1300 FP_UNOP_RETURN_TRUE(f32_ceil, Ceil_s)
1301 FP_UNOP_RETURN_TRUE(f32_floor, Floor_s)
1302 FP_UNOP_RETURN_TRUE(f32_trunc, Trunc_s)
1303 FP_UNOP_RETURN_TRUE(f32_nearest_int, Round_s)
1304 FP_UNOP(f32_sqrt, fsqrt_s)
1305 FP_BINOP(f64_add, fadd_d)
1306 FP_BINOP(f64_sub, fsub_d)
1307 FP_BINOP(f64_mul, fmul_d)
1308 FP_BINOP(f64_div, fdiv_d)
1309 FP_UNOP(f64_abs, fabs_d)
1310 FP_UNOP_RETURN_TRUE(f64_ceil, Ceil_d)
1311 FP_UNOP_RETURN_TRUE(f64_floor, Floor_d)
1312 FP_UNOP_RETURN_TRUE(f64_trunc, Trunc_d)
1313 FP_UNOP_RETURN_TRUE(f64_nearest_int, Round_d)
1314 FP_UNOP(f64_sqrt, fsqrt_d)
1315 
1316 #undef FP_BINOP
1317 #undef FP_UNOP
1318 #undef FP_UNOP_RETURN_TRUE
1319 
1320 bool LiftoffAssembler::emit_type_conversion(WasmOpcode opcode,
1321                                             LiftoffRegister dst,
1322                                             LiftoffRegister src, Label* trap) {
1323   switch (opcode) {
1324     case kExprI32ConvertI64:
1325       TurboAssembler::bstrpick_w(dst.gp(), src.gp(), 31, 0);
1326       return true;
1327     case kExprI32SConvertF32: {
1328       LiftoffRegister rounded = GetUnusedRegister(kFpReg, LiftoffRegList{src});
1329       LiftoffRegister converted_back =
1330           GetUnusedRegister(kFpReg, LiftoffRegList{src, rounded});
1331 
1332       // Real conversion.
1333       TurboAssembler::Trunc_s(rounded.fp(), src.fp());
1334       ftintrz_w_s(kScratchDoubleReg, rounded.fp());
1335       movfr2gr_s(dst.gp(), kScratchDoubleReg);
1336       // Avoid INT32_MAX as an overflow indicator and use INT32_MIN instead,
1337       // because INT32_MIN allows easier out-of-bounds detection.
1338       TurboAssembler::Add_w(kScratchReg, dst.gp(), 1);
1339       TurboAssembler::Slt(kScratchReg2, kScratchReg, dst.gp());
1340       TurboAssembler::Movn(dst.gp(), kScratchReg, kScratchReg2);
1341 
1342       // Checking if trap.
1343       movgr2fr_w(kScratchDoubleReg, dst.gp());
1344       ffint_s_w(converted_back.fp(), kScratchDoubleReg);
1345       TurboAssembler::CompareF32(rounded.fp(), converted_back.fp(), CEQ);
1346       TurboAssembler::BranchFalseF(trap);
1347       return true;
1348     }
1349     case kExprI32UConvertF32: {
1350       LiftoffRegister rounded = GetUnusedRegister(kFpReg, LiftoffRegList{src});
1351       LiftoffRegister converted_back =
1352           GetUnusedRegister(kFpReg, LiftoffRegList{src, rounded});
1353 
1354       // Real conversion.
1355       TurboAssembler::Trunc_s(rounded.fp(), src.fp());
1356       TurboAssembler::Ftintrz_uw_s(dst.gp(), rounded.fp(), kScratchDoubleReg);
1357       // Avoid UINT32_MAX as an overflow indicator and use 0 instead,
1358       // because 0 allows easier out-of-bounds detection.
1359       TurboAssembler::Add_w(kScratchReg, dst.gp(), 1);
1360       TurboAssembler::Movz(dst.gp(), zero_reg, kScratchReg);
1361 
1362       // Checking if trap.
1363       TurboAssembler::Ffint_d_uw(converted_back.fp(), dst.gp());
1364       fcvt_s_d(converted_back.fp(), converted_back.fp());
1365       TurboAssembler::CompareF32(rounded.fp(), converted_back.fp(), CEQ);
1366       TurboAssembler::BranchFalseF(trap);
1367       return true;
1368     }
1369     case kExprI32SConvertF64: {
1370       LiftoffRegister rounded = GetUnusedRegister(kFpReg, LiftoffRegList{src});
1371       LiftoffRegister converted_back =
1372           GetUnusedRegister(kFpReg, LiftoffRegList{src, rounded});
1373 
1374       // Real conversion.
1375       TurboAssembler::Trunc_d(rounded.fp(), src.fp());
1376       ftintrz_w_d(kScratchDoubleReg, rounded.fp());
1377       movfr2gr_s(dst.gp(), kScratchDoubleReg);
1378 
1379       // Checking if trap.
1380       ffint_d_w(converted_back.fp(), kScratchDoubleReg);
1381       TurboAssembler::CompareF64(rounded.fp(), converted_back.fp(), CEQ);
1382       TurboAssembler::BranchFalseF(trap);
1383       return true;
1384     }
1385     case kExprI32UConvertF64: {
1386       LiftoffRegister rounded = GetUnusedRegister(kFpReg, LiftoffRegList{src});
1387       LiftoffRegister converted_back =
1388           GetUnusedRegister(kFpReg, LiftoffRegList{src, rounded});
1389 
1390       // Real conversion.
1391       TurboAssembler::Trunc_d(rounded.fp(), src.fp());
1392       TurboAssembler::Ftintrz_uw_d(dst.gp(), rounded.fp(), kScratchDoubleReg);
1393 
1394       // Checking if trap.
1395       TurboAssembler::Ffint_d_uw(converted_back.fp(), dst.gp());
1396       TurboAssembler::CompareF64(rounded.fp(), converted_back.fp(), CEQ);
1397       TurboAssembler::BranchFalseF(trap);
1398       return true;
1399     }
1400     case kExprI32ReinterpretF32:
1401       TurboAssembler::FmoveLow(dst.gp(), src.fp());
1402       return true;
1403     case kExprI64SConvertI32:
1404       slli_w(dst.gp(), src.gp(), 0);
1405       return true;
1406     case kExprI64UConvertI32:
1407       TurboAssembler::bstrpick_d(dst.gp(), src.gp(), 31, 0);
1408       return true;
1409     case kExprI64SConvertF32: {
1410       LiftoffRegister rounded = GetUnusedRegister(kFpReg, LiftoffRegList{src});
1411       LiftoffRegister converted_back =
1412           GetUnusedRegister(kFpReg, LiftoffRegList{src, rounded});
1413 
1414       // Real conversion.
1415       TurboAssembler::Trunc_s(rounded.fp(), src.fp());
1416       ftintrz_l_s(kScratchDoubleReg, rounded.fp());
1417       movfr2gr_d(dst.gp(), kScratchDoubleReg);
1418       // Avoid INT64_MAX as an overflow indicator and use INT64_MIN instead,
1419       // because INT64_MIN allows easier out-of-bounds detection.
1420       TurboAssembler::Add_d(kScratchReg, dst.gp(), 1);
1421       TurboAssembler::Slt(kScratchReg2, kScratchReg, dst.gp());
1422       TurboAssembler::Movn(dst.gp(), kScratchReg, kScratchReg2);
1423 
1424       // Checking if trap.
1425       movgr2fr_d(kScratchDoubleReg, dst.gp());
1426       ffint_s_l(converted_back.fp(), kScratchDoubleReg);
1427       TurboAssembler::CompareF32(rounded.fp(), converted_back.fp(), CEQ);
1428       TurboAssembler::BranchFalseF(trap);
1429       return true;
1430     }
1431     case kExprI64UConvertF32: {
1432       // Real conversion.
1433       TurboAssembler::Ftintrz_ul_s(dst.gp(), src.fp(), kScratchDoubleReg,
1434                                    kScratchReg);
1435 
1436       // Checking if trap.
1437       TurboAssembler::Branch(trap, eq, kScratchReg, Operand(zero_reg));
1438       return true;
1439     }
1440     case kExprI64SConvertF64: {
1441       LiftoffRegister rounded = GetUnusedRegister(kFpReg, LiftoffRegList{src});
1442       LiftoffRegister converted_back =
1443           GetUnusedRegister(kFpReg, LiftoffRegList{src, rounded});
1444 
1445       // Real conversion.
1446       TurboAssembler::Trunc_d(rounded.fp(), src.fp());
1447       ftintrz_l_d(kScratchDoubleReg, rounded.fp());
1448       movfr2gr_d(dst.gp(), kScratchDoubleReg);
1449       // Avoid INT64_MAX as an overflow indicator and use INT64_MIN instead,
1450       // because INT64_MIN allows easier out-of-bounds detection.
1451       TurboAssembler::Add_d(kScratchReg, dst.gp(), 1);
1452       TurboAssembler::Slt(kScratchReg2, kScratchReg, dst.gp());
1453       TurboAssembler::Movn(dst.gp(), kScratchReg, kScratchReg2);
1454 
1455       // Checking if trap.
1456       movgr2fr_d(kScratchDoubleReg, dst.gp());
1457       ffint_d_l(converted_back.fp(), kScratchDoubleReg);
1458       TurboAssembler::CompareF64(rounded.fp(), converted_back.fp(), CEQ);
1459       TurboAssembler::BranchFalseF(trap);
1460       return true;
1461     }
1462     case kExprI64UConvertF64: {
1463       // Real conversion.
1464       TurboAssembler::Ftintrz_ul_d(dst.gp(), src.fp(), kScratchDoubleReg,
1465                                    kScratchReg);
1466 
1467       // Checking if trap.
1468       TurboAssembler::Branch(trap, eq, kScratchReg, Operand(zero_reg));
1469       return true;
1470     }
1471     case kExprI64ReinterpretF64:
1472       movfr2gr_d(dst.gp(), src.fp());
1473       return true;
1474     case kExprF32SConvertI32: {
1475       LiftoffRegister scratch = GetUnusedRegister(kFpReg, LiftoffRegList{dst});
1476       movgr2fr_w(scratch.fp(), src.gp());
1477       ffint_s_w(dst.fp(), scratch.fp());
1478       return true;
1479     }
1480     case kExprF32UConvertI32:
1481       TurboAssembler::Ffint_s_uw(dst.fp(), src.gp());
1482       return true;
1483     case kExprF32ConvertF64:
1484       fcvt_s_d(dst.fp(), src.fp());
1485       return true;
1486     case kExprF32ReinterpretI32:
1487       TurboAssembler::FmoveLow(dst.fp(), src.gp());
1488       return true;
1489     case kExprF64SConvertI32: {
1490       LiftoffRegister scratch = GetUnusedRegister(kFpReg, LiftoffRegList{dst});
1491       movgr2fr_w(scratch.fp(), src.gp());
1492       ffint_d_w(dst.fp(), scratch.fp());
1493       return true;
1494     }
1495     case kExprF64UConvertI32:
1496       TurboAssembler::Ffint_d_uw(dst.fp(), src.gp());
1497       return true;
1498     case kExprF64ConvertF32:
1499       fcvt_d_s(dst.fp(), src.fp());
1500       return true;
1501     case kExprF64ReinterpretI64:
1502       movgr2fr_d(dst.fp(), src.gp());
1503       return true;
1504     case kExprI32SConvertSatF32:
1505       ftintrz_w_s(kScratchDoubleReg, src.fp());
1506       movfr2gr_s(dst.gp(), kScratchDoubleReg);
1507       return true;
1508     case kExprI32UConvertSatF32: {
1509       Label isnan_or_lessthan_or_equal_zero;
1510       mov(dst.gp(), zero_reg);
1511       TurboAssembler::Move(kScratchDoubleReg, static_cast<float>(0.0));
1512       CompareF32(src.fp(), kScratchDoubleReg, CULE);
1513       BranchTrueShortF(&isnan_or_lessthan_or_equal_zero);
1514       Ftintrz_uw_s(dst.gp(), src.fp(), kScratchDoubleReg);
1515       bind(&isnan_or_lessthan_or_equal_zero);
1516       return true;
1517     }
1518     case kExprI32SConvertSatF64:
1519       ftintrz_w_d(kScratchDoubleReg, src.fp());
1520       movfr2gr_s(dst.gp(), kScratchDoubleReg);
1521       return true;
1522     case kExprI32UConvertSatF64: {
1523       Label isnan_or_lessthan_or_equal_zero;
1524       mov(dst.gp(), zero_reg);
1525       TurboAssembler::Move(kScratchDoubleReg, static_cast<double>(0.0));
1526       CompareF64(src.fp(), kScratchDoubleReg, CULE);
1527       BranchTrueShortF(&isnan_or_lessthan_or_equal_zero);
1528       Ftintrz_uw_d(dst.gp(), src.fp(), kScratchDoubleReg);
1529       bind(&isnan_or_lessthan_or_equal_zero);
1530       return true;
1531     }
1532     case kExprI64SConvertSatF32:
1533       ftintrz_l_s(kScratchDoubleReg, src.fp());
1534       movfr2gr_d(dst.gp(), kScratchDoubleReg);
1535       return true;
1536     case kExprI64UConvertSatF32: {
1537       Label isnan_or_lessthan_or_equal_zero;
1538       mov(dst.gp(), zero_reg);
1539       TurboAssembler::Move(kScratchDoubleReg, static_cast<float>(0.0));
1540       CompareF32(src.fp(), kScratchDoubleReg, CULE);
1541       BranchTrueShortF(&isnan_or_lessthan_or_equal_zero);
1542       Ftintrz_ul_s(dst.gp(), src.fp(), kScratchDoubleReg);
1543       bind(&isnan_or_lessthan_or_equal_zero);
1544       return true;
1545     }
1546     case kExprI64SConvertSatF64:
1547       ftintrz_l_d(kScratchDoubleReg, src.fp());
1548       movfr2gr_d(dst.gp(), kScratchDoubleReg);
1549       return true;
1550     case kExprI64UConvertSatF64: {
1551       Label isnan_or_lessthan_or_equal_zero;
1552       mov(dst.gp(), zero_reg);
1553       TurboAssembler::Move(kScratchDoubleReg, static_cast<double>(0.0));
1554       CompareF64(src.fp(), kScratchDoubleReg, CULE);
1555       BranchTrueShortF(&isnan_or_lessthan_or_equal_zero);
1556       Ftintrz_ul_d(dst.gp(), src.fp(), kScratchDoubleReg);
1557       bind(&isnan_or_lessthan_or_equal_zero);
1558       return true;
1559     }
1560     default:
1561       return false;
1562   }
1563 }
1564 
emit_i32_signextend_i8(Register dst,Register src)1565 void LiftoffAssembler::emit_i32_signextend_i8(Register dst, Register src) {
1566   ext_w_b(dst, src);
1567 }
1568 
emit_i32_signextend_i16(Register dst,Register src)1569 void LiftoffAssembler::emit_i32_signextend_i16(Register dst, Register src) {
1570   ext_w_h(dst, src);
1571 }
1572 
emit_i64_signextend_i8(LiftoffRegister dst,LiftoffRegister src)1573 void LiftoffAssembler::emit_i64_signextend_i8(LiftoffRegister dst,
1574                                               LiftoffRegister src) {
1575   ext_w_b(dst.gp(), src.gp());
1576 }
1577 
emit_i64_signextend_i16(LiftoffRegister dst,LiftoffRegister src)1578 void LiftoffAssembler::emit_i64_signextend_i16(LiftoffRegister dst,
1579                                                LiftoffRegister src) {
1580   ext_w_h(dst.gp(), src.gp());
1581 }
1582 
emit_i64_signextend_i32(LiftoffRegister dst,LiftoffRegister src)1583 void LiftoffAssembler::emit_i64_signextend_i32(LiftoffRegister dst,
1584                                                LiftoffRegister src) {
1585   slli_w(dst.gp(), src.gp(), 0);
1586 }
1587 
emit_jump(Label * label)1588 void LiftoffAssembler::emit_jump(Label* label) {
1589   TurboAssembler::Branch(label);
1590 }
1591 
emit_jump(Register target)1592 void LiftoffAssembler::emit_jump(Register target) {
1593   TurboAssembler::Jump(target);
1594 }
1595 
emit_cond_jump(LiftoffCondition liftoff_cond,Label * label,ValueKind kind,Register lhs,Register rhs)1596 void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
1597                                       Label* label, ValueKind kind,
1598                                       Register lhs, Register rhs) {
1599   Condition cond = liftoff::ToCondition(liftoff_cond);
1600   if (rhs == no_reg) {
1601     DCHECK(kind == kI32 || kind == kI64);
1602     TurboAssembler::Branch(label, cond, lhs, Operand(zero_reg));
1603   } else {
1604     DCHECK((kind == kI32 || kind == kI64) ||
1605            (is_reference(kind) &&
1606             (liftoff_cond == kEqual || liftoff_cond == kUnequal)));
1607     TurboAssembler::Branch(label, cond, lhs, Operand(rhs));
1608   }
1609 }
1610 
emit_i32_cond_jumpi(LiftoffCondition liftoff_cond,Label * label,Register lhs,int32_t imm)1611 void LiftoffAssembler::emit_i32_cond_jumpi(LiftoffCondition liftoff_cond,
1612                                            Label* label, Register lhs,
1613                                            int32_t imm) {
1614   Condition cond = liftoff::ToCondition(liftoff_cond);
1615   TurboAssembler::Branch(label, cond, lhs, Operand(imm));
1616 }
1617 
emit_i32_subi_jump_negative(Register value,int subtrahend,Label * result_negative)1618 void LiftoffAssembler::emit_i32_subi_jump_negative(Register value,
1619                                                    int subtrahend,
1620                                                    Label* result_negative) {
1621   TurboAssembler::Sub_d(value, value, Operand(subtrahend));
1622   TurboAssembler::Branch(result_negative, less, value, Operand(zero_reg));
1623 }
1624 
emit_i32_eqz(Register dst,Register src)1625 void LiftoffAssembler::emit_i32_eqz(Register dst, Register src) {
1626   sltui(dst, src, 1);
1627 }
1628 
emit_i32_set_cond(LiftoffCondition liftoff_cond,Register dst,Register lhs,Register rhs)1629 void LiftoffAssembler::emit_i32_set_cond(LiftoffCondition liftoff_cond,
1630                                          Register dst, Register lhs,
1631                                          Register rhs) {
1632   Condition cond = liftoff::ToCondition(liftoff_cond);
1633   Register tmp = dst;
1634   if (dst == lhs || dst == rhs) {
1635     tmp = GetUnusedRegister(kGpReg, LiftoffRegList{lhs, rhs}).gp();
1636   }
1637   // Write 1 as result.
1638   TurboAssembler::li(tmp, 1);
1639 
1640   // If negative condition is true, write 0 as result.
1641   Condition neg_cond = NegateCondition(cond);
1642   TurboAssembler::LoadZeroOnCondition(tmp, lhs, Operand(rhs), neg_cond);
1643 
1644   // If tmp != dst, result will be moved.
1645   TurboAssembler::Move(dst, tmp);
1646 }
1647 
emit_i64_eqz(Register dst,LiftoffRegister src)1648 void LiftoffAssembler::emit_i64_eqz(Register dst, LiftoffRegister src) {
1649   sltui(dst, src.gp(), 1);
1650 }
1651 
emit_i64_set_cond(LiftoffCondition liftoff_cond,Register dst,LiftoffRegister lhs,LiftoffRegister rhs)1652 void LiftoffAssembler::emit_i64_set_cond(LiftoffCondition liftoff_cond,
1653                                          Register dst, LiftoffRegister lhs,
1654                                          LiftoffRegister rhs) {
1655   Condition cond = liftoff::ToCondition(liftoff_cond);
1656   Register tmp = dst;
1657   if (dst == lhs.gp() || dst == rhs.gp()) {
1658     tmp = GetUnusedRegister(kGpReg, LiftoffRegList{lhs, rhs}).gp();
1659   }
1660   // Write 1 as result.
1661   TurboAssembler::li(tmp, 1);
1662 
1663   // If negative condition is true, write 0 as result.
1664   Condition neg_cond = NegateCondition(cond);
1665   TurboAssembler::LoadZeroOnCondition(tmp, lhs.gp(), Operand(rhs.gp()),
1666                                       neg_cond);
1667 
1668   // If tmp != dst, result will be moved.
1669   TurboAssembler::Move(dst, tmp);
1670 }
1671 
1672 namespace liftoff {
1673 
ConditionToConditionCmpFPU(LiftoffCondition condition,bool * predicate)1674 inline FPUCondition ConditionToConditionCmpFPU(LiftoffCondition condition,
1675                                                bool* predicate) {
1676   switch (condition) {
1677     case kEqual:
1678       *predicate = true;
1679       return CEQ;
1680     case kUnequal:
1681       *predicate = false;
1682       return CEQ;
1683     case kUnsignedLessThan:
1684       *predicate = true;
1685       return CLT;
1686     case kUnsignedGreaterEqual:
1687       *predicate = false;
1688       return CLT;
1689     case kUnsignedLessEqual:
1690       *predicate = true;
1691       return CLE;
1692     case kUnsignedGreaterThan:
1693       *predicate = false;
1694       return CLE;
1695     default:
1696       *predicate = true;
1697       break;
1698   }
1699   UNREACHABLE();
1700 }
1701 
1702 }  // namespace liftoff
1703 
emit_f32_set_cond(LiftoffCondition liftoff_cond,Register dst,DoubleRegister lhs,DoubleRegister rhs)1704 void LiftoffAssembler::emit_f32_set_cond(LiftoffCondition liftoff_cond,
1705                                          Register dst, DoubleRegister lhs,
1706                                          DoubleRegister rhs) {
1707   Condition cond = liftoff::ToCondition(liftoff_cond);
1708   Label not_nan, cont;
1709   TurboAssembler::CompareIsNanF32(lhs, rhs);
1710   TurboAssembler::BranchFalseF(&not_nan);
1711   // If one of the operands is NaN, return 1 for f32.ne, else 0.
1712   if (cond == ne) {
1713     TurboAssembler::li(dst, 1);
1714   } else {
1715     TurboAssembler::Move(dst, zero_reg);
1716   }
1717   TurboAssembler::Branch(&cont);
1718 
1719   bind(&not_nan);
1720 
1721   TurboAssembler::li(dst, 1);
1722   bool predicate;
1723   FPUCondition fcond =
1724       liftoff::ConditionToConditionCmpFPU(liftoff_cond, &predicate);
1725   TurboAssembler::CompareF32(lhs, rhs, fcond);
1726   if (predicate) {
1727     TurboAssembler::LoadZeroIfNotFPUCondition(dst);
1728   } else {
1729     TurboAssembler::LoadZeroIfFPUCondition(dst);
1730   }
1731 
1732   bind(&cont);
1733 }
1734 
emit_f64_set_cond(LiftoffCondition liftoff_cond,Register dst,DoubleRegister lhs,DoubleRegister rhs)1735 void LiftoffAssembler::emit_f64_set_cond(LiftoffCondition liftoff_cond,
1736                                          Register dst, DoubleRegister lhs,
1737                                          DoubleRegister rhs) {
1738   Condition cond = liftoff::ToCondition(liftoff_cond);
1739   Label not_nan, cont;
1740   TurboAssembler::CompareIsNanF64(lhs, rhs);
1741   TurboAssembler::BranchFalseF(&not_nan);
1742   // If one of the operands is NaN, return 1 for f64.ne, else 0.
1743   if (cond == ne) {
1744     TurboAssembler::li(dst, 1);
1745   } else {
1746     TurboAssembler::Move(dst, zero_reg);
1747   }
1748   TurboAssembler::Branch(&cont);
1749 
1750   bind(&not_nan);
1751 
1752   TurboAssembler::li(dst, 1);
1753   bool predicate;
1754   FPUCondition fcond =
1755       liftoff::ConditionToConditionCmpFPU(liftoff_cond, &predicate);
1756   TurboAssembler::CompareF64(lhs, rhs, fcond);
1757   if (predicate) {
1758     TurboAssembler::LoadZeroIfNotFPUCondition(dst);
1759   } else {
1760     TurboAssembler::LoadZeroIfFPUCondition(dst);
1761   }
1762 
1763   bind(&cont);
1764 }
1765 
emit_select(LiftoffRegister dst,Register condition,LiftoffRegister true_value,LiftoffRegister false_value,ValueKind kind)1766 bool LiftoffAssembler::emit_select(LiftoffRegister dst, Register condition,
1767                                    LiftoffRegister true_value,
1768                                    LiftoffRegister false_value,
1769                                    ValueKind kind) {
1770   return false;
1771 }
1772 
emit_smi_check(Register obj,Label * target,SmiCheckMode mode)1773 void LiftoffAssembler::emit_smi_check(Register obj, Label* target,
1774                                       SmiCheckMode mode) {
1775   UseScratchRegisterScope temps(this);
1776   Register scratch = temps.Acquire();
1777   And(scratch, obj, Operand(kSmiTagMask));
1778   Condition condition = mode == kJumpOnSmi ? eq : ne;
1779   Branch(target, condition, scratch, Operand(zero_reg));
1780 }
1781 
LoadTransform(LiftoffRegister dst,Register src_addr,Register offset_reg,uintptr_t offset_imm,LoadType type,LoadTransformationKind transform,uint32_t * protected_load_pc)1782 void LiftoffAssembler::LoadTransform(LiftoffRegister dst, Register src_addr,
1783                                      Register offset_reg, uintptr_t offset_imm,
1784                                      LoadType type,
1785                                      LoadTransformationKind transform,
1786                                      uint32_t* protected_load_pc) {
1787   bailout(kSimd, "load extend and load splat unimplemented");
1788 }
1789 
LoadLane(LiftoffRegister dst,LiftoffRegister src,Register addr,Register offset_reg,uintptr_t offset_imm,LoadType type,uint8_t laneidx,uint32_t * protected_load_pc)1790 void LiftoffAssembler::LoadLane(LiftoffRegister dst, LiftoffRegister src,
1791                                 Register addr, Register offset_reg,
1792                                 uintptr_t offset_imm, LoadType type,
1793                                 uint8_t laneidx, uint32_t* protected_load_pc) {
1794   bailout(kSimd, "loadlane");
1795 }
1796 
StoreLane(Register dst,Register offset,uintptr_t offset_imm,LiftoffRegister src,StoreType type,uint8_t lane,uint32_t * protected_store_pc)1797 void LiftoffAssembler::StoreLane(Register dst, Register offset,
1798                                  uintptr_t offset_imm, LiftoffRegister src,
1799                                  StoreType type, uint8_t lane,
1800                                  uint32_t* protected_store_pc) {
1801   bailout(kSimd, "storelane");
1802 }
1803 
emit_i8x16_shuffle(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,const uint8_t shuffle[16],bool is_swizzle)1804 void LiftoffAssembler::emit_i8x16_shuffle(LiftoffRegister dst,
1805                                           LiftoffRegister lhs,
1806                                           LiftoffRegister rhs,
1807                                           const uint8_t shuffle[16],
1808                                           bool is_swizzle) {
1809   bailout(kSimd, "emit_i8x16_shuffle");
1810 }
1811 
emit_i8x16_swizzle(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1812 void LiftoffAssembler::emit_i8x16_swizzle(LiftoffRegister dst,
1813                                           LiftoffRegister lhs,
1814                                           LiftoffRegister rhs) {
1815   bailout(kSimd, "emit_i8x16_swizzle");
1816 }
1817 
emit_i8x16_splat(LiftoffRegister dst,LiftoffRegister src)1818 void LiftoffAssembler::emit_i8x16_splat(LiftoffRegister dst,
1819                                         LiftoffRegister src) {
1820   bailout(kSimd, "emit_i8x16_splat");
1821 }
1822 
emit_i16x8_splat(LiftoffRegister dst,LiftoffRegister src)1823 void LiftoffAssembler::emit_i16x8_splat(LiftoffRegister dst,
1824                                         LiftoffRegister src) {
1825   bailout(kSimd, "emit_i16x8_splat");
1826 }
1827 
emit_i32x4_splat(LiftoffRegister dst,LiftoffRegister src)1828 void LiftoffAssembler::emit_i32x4_splat(LiftoffRegister dst,
1829                                         LiftoffRegister src) {
1830   bailout(kSimd, "emit_i32x4_splat");
1831 }
1832 
emit_i64x2_splat(LiftoffRegister dst,LiftoffRegister src)1833 void LiftoffAssembler::emit_i64x2_splat(LiftoffRegister dst,
1834                                         LiftoffRegister src) {
1835   bailout(kSimd, "emit_i64x2_splat");
1836 }
1837 
emit_f32x4_splat(LiftoffRegister dst,LiftoffRegister src)1838 void LiftoffAssembler::emit_f32x4_splat(LiftoffRegister dst,
1839                                         LiftoffRegister src) {
1840   bailout(kSimd, "emit_f32x4_splat");
1841 }
1842 
emit_f64x2_splat(LiftoffRegister dst,LiftoffRegister src)1843 void LiftoffAssembler::emit_f64x2_splat(LiftoffRegister dst,
1844                                         LiftoffRegister src) {
1845   bailout(kSimd, "emit_f64x2_splat");
1846 }
1847 
1848 #define SIMD_BINOP(name1, name2)                                         \
1849   void LiftoffAssembler::emit_##name1##_extmul_low_##name2(              \
1850       LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2) { \
1851     bailout(kSimd, "emit_" #name1 "_extmul_low_" #name2);                \
1852   }                                                                      \
1853   void LiftoffAssembler::emit_##name1##_extmul_high_##name2(             \
1854       LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2) { \
1855     bailout(kSimd, "emit_" #name1 "_extmul_high_" #name2);               \
1856   }
1857 
SIMD_BINOP(i16x8,i8x16_s)1858 SIMD_BINOP(i16x8, i8x16_s)
1859 SIMD_BINOP(i16x8, i8x16_u)
1860 
1861 SIMD_BINOP(i32x4, i16x8_s)
1862 SIMD_BINOP(i32x4, i16x8_u)
1863 
1864 SIMD_BINOP(i64x2, i32x4_s)
1865 SIMD_BINOP(i64x2, i32x4_u)
1866 
1867 #undef SIMD_BINOP
1868 
1869 #define SIMD_BINOP(name1, name2)                                 \
1870   void LiftoffAssembler::emit_##name1##_extadd_pairwise_##name2( \
1871       LiftoffRegister dst, LiftoffRegister src) {                \
1872     bailout(kSimd, "emit_" #name1 "_extadd_pairwise_" #name2);   \
1873   }
1874 
1875 SIMD_BINOP(i16x8, i8x16_s)
1876 SIMD_BINOP(i16x8, i8x16_u)
1877 SIMD_BINOP(i32x4, i16x8_s)
1878 SIMD_BINOP(i32x4, i16x8_u)
1879 #undef SIMD_BINOP
1880 
1881 void LiftoffAssembler::emit_i16x8_q15mulr_sat_s(LiftoffRegister dst,
1882                                                 LiftoffRegister src1,
1883                                                 LiftoffRegister src2) {
1884   bailout(kSimd, "emit_i16x8_q15mulr_sat_s");
1885 }
1886 
emit_i8x16_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1887 void LiftoffAssembler::emit_i8x16_eq(LiftoffRegister dst, LiftoffRegister lhs,
1888                                      LiftoffRegister rhs) {
1889   bailout(kSimd, "emit_i8x16_eq");
1890 }
1891 
emit_i8x16_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1892 void LiftoffAssembler::emit_i8x16_ne(LiftoffRegister dst, LiftoffRegister lhs,
1893                                      LiftoffRegister rhs) {
1894   bailout(kSimd, "emit_i8x16_ne");
1895 }
1896 
emit_i8x16_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1897 void LiftoffAssembler::emit_i8x16_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
1898                                        LiftoffRegister rhs) {
1899   bailout(kSimd, "emit_i8x16_gt_s");
1900 }
1901 
emit_i8x16_gt_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1902 void LiftoffAssembler::emit_i8x16_gt_u(LiftoffRegister dst, LiftoffRegister lhs,
1903                                        LiftoffRegister rhs) {
1904   bailout(kSimd, "emit_i8x16_gt_u");
1905 }
1906 
emit_i8x16_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1907 void LiftoffAssembler::emit_i8x16_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
1908                                        LiftoffRegister rhs) {
1909   bailout(kSimd, "emit_i8x16_ge_s");
1910 }
1911 
emit_i8x16_ge_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1912 void LiftoffAssembler::emit_i8x16_ge_u(LiftoffRegister dst, LiftoffRegister lhs,
1913                                        LiftoffRegister rhs) {
1914   bailout(kSimd, "emit_i8x16_ge_u");
1915 }
1916 
emit_i16x8_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1917 void LiftoffAssembler::emit_i16x8_eq(LiftoffRegister dst, LiftoffRegister lhs,
1918                                      LiftoffRegister rhs) {
1919   bailout(kSimd, "emit_i16x8_eq");
1920 }
1921 
emit_i16x8_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1922 void LiftoffAssembler::emit_i16x8_ne(LiftoffRegister dst, LiftoffRegister lhs,
1923                                      LiftoffRegister rhs) {
1924   bailout(kSimd, "emit_i16x8_ne");
1925 }
1926 
emit_i16x8_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1927 void LiftoffAssembler::emit_i16x8_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
1928                                        LiftoffRegister rhs) {
1929   bailout(kSimd, "emit_i16x8_gt_s");
1930 }
1931 
emit_i16x8_gt_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1932 void LiftoffAssembler::emit_i16x8_gt_u(LiftoffRegister dst, LiftoffRegister lhs,
1933                                        LiftoffRegister rhs) {
1934   bailout(kSimd, "emit_i16x8_gt_u");
1935 }
1936 
emit_i16x8_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1937 void LiftoffAssembler::emit_i16x8_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
1938                                        LiftoffRegister rhs) {
1939   bailout(kSimd, "emit_i16x8_ge_s");
1940 }
1941 
emit_i16x8_ge_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1942 void LiftoffAssembler::emit_i16x8_ge_u(LiftoffRegister dst, LiftoffRegister lhs,
1943                                        LiftoffRegister rhs) {
1944   bailout(kSimd, "emit_i16x8_ge_u");
1945 }
1946 
emit_i32x4_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1947 void LiftoffAssembler::emit_i32x4_eq(LiftoffRegister dst, LiftoffRegister lhs,
1948                                      LiftoffRegister rhs) {
1949   bailout(kSimd, "emit_i32x4_eq");
1950 }
1951 
emit_i32x4_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1952 void LiftoffAssembler::emit_i32x4_ne(LiftoffRegister dst, LiftoffRegister lhs,
1953                                      LiftoffRegister rhs) {
1954   bailout(kSimd, "emit_i32x4_ne");
1955 }
1956 
emit_i32x4_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1957 void LiftoffAssembler::emit_i32x4_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
1958                                        LiftoffRegister rhs) {
1959   bailout(kSimd, "emit_i32x4_gt_s");
1960 }
1961 
emit_i32x4_gt_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1962 void LiftoffAssembler::emit_i32x4_gt_u(LiftoffRegister dst, LiftoffRegister lhs,
1963                                        LiftoffRegister rhs) {
1964   bailout(kSimd, "emit_i32x4_gt_u");
1965 }
1966 
emit_i32x4_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1967 void LiftoffAssembler::emit_i32x4_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
1968                                        LiftoffRegister rhs) {
1969   bailout(kSimd, "emit_i32x4_ge_s");
1970 }
1971 
emit_i32x4_ge_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1972 void LiftoffAssembler::emit_i32x4_ge_u(LiftoffRegister dst, LiftoffRegister lhs,
1973                                        LiftoffRegister rhs) {
1974   bailout(kSimd, "emit_i32x4_ge_u");
1975 }
1976 
emit_f32x4_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1977 void LiftoffAssembler::emit_f32x4_eq(LiftoffRegister dst, LiftoffRegister lhs,
1978                                      LiftoffRegister rhs) {
1979   bailout(kSimd, "emit_f32x4_eq");
1980 }
1981 
emit_f32x4_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1982 void LiftoffAssembler::emit_f32x4_ne(LiftoffRegister dst, LiftoffRegister lhs,
1983                                      LiftoffRegister rhs) {
1984   bailout(kSimd, "emit_f32x4_ne");
1985 }
1986 
emit_f32x4_lt(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1987 void LiftoffAssembler::emit_f32x4_lt(LiftoffRegister dst, LiftoffRegister lhs,
1988                                      LiftoffRegister rhs) {
1989   bailout(kSimd, "emit_f32x4_lt");
1990 }
1991 
emit_f32x4_le(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1992 void LiftoffAssembler::emit_f32x4_le(LiftoffRegister dst, LiftoffRegister lhs,
1993                                      LiftoffRegister rhs) {
1994   bailout(kSimd, "emit_f32x4_le");
1995 }
1996 
emit_i64x2_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1997 void LiftoffAssembler::emit_i64x2_eq(LiftoffRegister dst, LiftoffRegister lhs,
1998                                      LiftoffRegister rhs) {
1999   bailout(kSimd, "emit_i64x2_eq");
2000 }
2001 
emit_i64x2_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2002 void LiftoffAssembler::emit_i64x2_ne(LiftoffRegister dst, LiftoffRegister lhs,
2003                                      LiftoffRegister rhs) {
2004   bailout(kSimd, "emit_i64x2_ne");
2005 }
2006 
emit_i64x2_abs(LiftoffRegister dst,LiftoffRegister src)2007 void LiftoffAssembler::emit_i64x2_abs(LiftoffRegister dst,
2008                                       LiftoffRegister src) {
2009   bailout(kSimd, "emit_i64x2_abs");
2010 }
2011 
emit_f64x2_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2012 void LiftoffAssembler::emit_f64x2_eq(LiftoffRegister dst, LiftoffRegister lhs,
2013                                      LiftoffRegister rhs) {
2014   bailout(kSimd, "emit_f64x2_eq");
2015 }
2016 
emit_f64x2_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2017 void LiftoffAssembler::emit_f64x2_ne(LiftoffRegister dst, LiftoffRegister lhs,
2018                                      LiftoffRegister rhs) {
2019   bailout(kSimd, "emit_f64x2_ne");
2020 }
2021 
emit_f64x2_lt(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2022 void LiftoffAssembler::emit_f64x2_lt(LiftoffRegister dst, LiftoffRegister lhs,
2023                                      LiftoffRegister rhs) {
2024   bailout(kSimd, "emit_f64x2_lt");
2025 }
2026 
emit_f64x2_le(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2027 void LiftoffAssembler::emit_f64x2_le(LiftoffRegister dst, LiftoffRegister lhs,
2028                                      LiftoffRegister rhs) {
2029   bailout(kSimd, "emit_f64x2_le");
2030 }
2031 
emit_s128_const(LiftoffRegister dst,const uint8_t imms[16])2032 void LiftoffAssembler::emit_s128_const(LiftoffRegister dst,
2033                                        const uint8_t imms[16]) {
2034   bailout(kSimd, "emit_s128_const");
2035 }
2036 
emit_s128_not(LiftoffRegister dst,LiftoffRegister src)2037 void LiftoffAssembler::emit_s128_not(LiftoffRegister dst, LiftoffRegister src) {
2038   bailout(kSimd, "emit_s128_not");
2039 }
2040 
emit_s128_and(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2041 void LiftoffAssembler::emit_s128_and(LiftoffRegister dst, LiftoffRegister lhs,
2042                                      LiftoffRegister rhs) {
2043   bailout(kSimd, "emit_s128_and");
2044 }
2045 
emit_s128_or(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2046 void LiftoffAssembler::emit_s128_or(LiftoffRegister dst, LiftoffRegister lhs,
2047                                     LiftoffRegister rhs) {
2048   bailout(kSimd, "emit_s128_or");
2049 }
2050 
emit_s128_xor(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2051 void LiftoffAssembler::emit_s128_xor(LiftoffRegister dst, LiftoffRegister lhs,
2052                                      LiftoffRegister rhs) {
2053   bailout(kSimd, "emit_s128_xor");
2054 }
2055 
emit_s128_and_not(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2056 void LiftoffAssembler::emit_s128_and_not(LiftoffRegister dst,
2057                                          LiftoffRegister lhs,
2058                                          LiftoffRegister rhs) {
2059   bailout(kSimd, "emit_s128_and_not");
2060 }
2061 
emit_s128_select(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,LiftoffRegister mask)2062 void LiftoffAssembler::emit_s128_select(LiftoffRegister dst,
2063                                         LiftoffRegister src1,
2064                                         LiftoffRegister src2,
2065                                         LiftoffRegister mask) {
2066   bailout(kSimd, "emit_s128_select");
2067 }
2068 
emit_i8x16_neg(LiftoffRegister dst,LiftoffRegister src)2069 void LiftoffAssembler::emit_i8x16_neg(LiftoffRegister dst,
2070                                       LiftoffRegister src) {
2071   bailout(kSimd, "emit_i8x16_neg");
2072 }
2073 
emit_v128_anytrue(LiftoffRegister dst,LiftoffRegister src)2074 void LiftoffAssembler::emit_v128_anytrue(LiftoffRegister dst,
2075                                          LiftoffRegister src) {
2076   bailout(kSimd, "emit_v128_anytrue");
2077 }
2078 
emit_i8x16_alltrue(LiftoffRegister dst,LiftoffRegister src)2079 void LiftoffAssembler::emit_i8x16_alltrue(LiftoffRegister dst,
2080                                           LiftoffRegister src) {
2081   bailout(kSimd, "emit_i8x16_alltrue");
2082 }
2083 
emit_i8x16_bitmask(LiftoffRegister dst,LiftoffRegister src)2084 void LiftoffAssembler::emit_i8x16_bitmask(LiftoffRegister dst,
2085                                           LiftoffRegister src) {
2086   bailout(kSimd, "emit_i8x16_bitmask");
2087 }
2088 
emit_i8x16_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2089 void LiftoffAssembler::emit_i8x16_shl(LiftoffRegister dst, LiftoffRegister lhs,
2090                                       LiftoffRegister rhs) {
2091   bailout(kSimd, "emit_i8x16_shl");
2092 }
2093 
emit_i8x16_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2094 void LiftoffAssembler::emit_i8x16_shli(LiftoffRegister dst, LiftoffRegister lhs,
2095                                        int32_t rhs) {
2096   bailout(kSimd, "emit_i8x16_shli");
2097 }
2098 
emit_i8x16_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2099 void LiftoffAssembler::emit_i8x16_shr_s(LiftoffRegister dst,
2100                                         LiftoffRegister lhs,
2101                                         LiftoffRegister rhs) {
2102   bailout(kSimd, "emit_i8x16_shr_s");
2103 }
2104 
emit_i8x16_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2105 void LiftoffAssembler::emit_i8x16_shri_s(LiftoffRegister dst,
2106                                          LiftoffRegister lhs, int32_t rhs) {
2107   bailout(kSimd, "emit_i8x16_shri_s");
2108 }
2109 
emit_i8x16_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2110 void LiftoffAssembler::emit_i8x16_shr_u(LiftoffRegister dst,
2111                                         LiftoffRegister lhs,
2112                                         LiftoffRegister rhs) {
2113   bailout(kSimd, "emit_i8x16_shr_u");
2114 }
2115 
emit_i8x16_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2116 void LiftoffAssembler::emit_i8x16_shri_u(LiftoffRegister dst,
2117                                          LiftoffRegister lhs, int32_t rhs) {
2118   bailout(kSimd, "emit_i8x16_shri_u");
2119 }
2120 
emit_i8x16_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2121 void LiftoffAssembler::emit_i8x16_add(LiftoffRegister dst, LiftoffRegister lhs,
2122                                       LiftoffRegister rhs) {
2123   bailout(kSimd, "emit_i8x16_add");
2124 }
2125 
emit_i8x16_add_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2126 void LiftoffAssembler::emit_i8x16_add_sat_s(LiftoffRegister dst,
2127                                             LiftoffRegister lhs,
2128                                             LiftoffRegister rhs) {
2129   bailout(kSimd, "emit_i8x16_add_sat_s");
2130 }
2131 
emit_i8x16_add_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2132 void LiftoffAssembler::emit_i8x16_add_sat_u(LiftoffRegister dst,
2133                                             LiftoffRegister lhs,
2134                                             LiftoffRegister rhs) {
2135   bailout(kSimd, "emit_i8x16_add_sat_u");
2136 }
2137 
emit_i8x16_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2138 void LiftoffAssembler::emit_i8x16_sub(LiftoffRegister dst, LiftoffRegister lhs,
2139                                       LiftoffRegister rhs) {
2140   bailout(kSimd, "emit_i8x16_sub");
2141 }
2142 
emit_i8x16_sub_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2143 void LiftoffAssembler::emit_i8x16_sub_sat_s(LiftoffRegister dst,
2144                                             LiftoffRegister lhs,
2145                                             LiftoffRegister rhs) {
2146   bailout(kSimd, "emit_i8x16_sub_sat_s");
2147 }
2148 
emit_i8x16_sub_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2149 void LiftoffAssembler::emit_i8x16_sub_sat_u(LiftoffRegister dst,
2150                                             LiftoffRegister lhs,
2151                                             LiftoffRegister rhs) {
2152   bailout(kSimd, "emit_i8x16_sub_sat_u");
2153 }
2154 
emit_i8x16_min_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2155 void LiftoffAssembler::emit_i8x16_min_s(LiftoffRegister dst,
2156                                         LiftoffRegister lhs,
2157                                         LiftoffRegister rhs) {
2158   bailout(kSimd, "emit_i8x16_min_s");
2159 }
2160 
emit_i8x16_min_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2161 void LiftoffAssembler::emit_i8x16_min_u(LiftoffRegister dst,
2162                                         LiftoffRegister lhs,
2163                                         LiftoffRegister rhs) {
2164   bailout(kSimd, "emit_i8x16_min_u");
2165 }
2166 
emit_i8x16_max_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2167 void LiftoffAssembler::emit_i8x16_max_s(LiftoffRegister dst,
2168                                         LiftoffRegister lhs,
2169                                         LiftoffRegister rhs) {
2170   bailout(kSimd, "emit_i8x16_max_s");
2171 }
2172 
emit_i8x16_max_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2173 void LiftoffAssembler::emit_i8x16_max_u(LiftoffRegister dst,
2174                                         LiftoffRegister lhs,
2175                                         LiftoffRegister rhs) {
2176   bailout(kSimd, "emit_i8x16_max_u");
2177 }
2178 
emit_i8x16_popcnt(LiftoffRegister dst,LiftoffRegister src)2179 void LiftoffAssembler::emit_i8x16_popcnt(LiftoffRegister dst,
2180                                          LiftoffRegister src) {
2181   bailout(kSimd, "emit_i8x16_popcnt");
2182 }
2183 
emit_i16x8_neg(LiftoffRegister dst,LiftoffRegister src)2184 void LiftoffAssembler::emit_i16x8_neg(LiftoffRegister dst,
2185                                       LiftoffRegister src) {
2186   bailout(kSimd, "emit_i16x8_neg");
2187 }
2188 
emit_i16x8_alltrue(LiftoffRegister dst,LiftoffRegister src)2189 void LiftoffAssembler::emit_i16x8_alltrue(LiftoffRegister dst,
2190                                           LiftoffRegister src) {
2191   bailout(kSimd, "emit_i16x8_alltrue");
2192 }
2193 
emit_i16x8_bitmask(LiftoffRegister dst,LiftoffRegister src)2194 void LiftoffAssembler::emit_i16x8_bitmask(LiftoffRegister dst,
2195                                           LiftoffRegister src) {
2196   bailout(kSimd, "emit_i16x8_bitmask");
2197 }
2198 
emit_i16x8_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2199 void LiftoffAssembler::emit_i16x8_shl(LiftoffRegister dst, LiftoffRegister lhs,
2200                                       LiftoffRegister rhs) {
2201   bailout(kSimd, "emit_i16x8_shl");
2202 }
2203 
emit_i16x8_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2204 void LiftoffAssembler::emit_i16x8_shli(LiftoffRegister dst, LiftoffRegister lhs,
2205                                        int32_t rhs) {
2206   bailout(kSimd, "emit_i16x8_shli");
2207 }
2208 
emit_i16x8_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2209 void LiftoffAssembler::emit_i16x8_shr_s(LiftoffRegister dst,
2210                                         LiftoffRegister lhs,
2211                                         LiftoffRegister rhs) {
2212   bailout(kSimd, "emit_i16x8_shr_s");
2213 }
2214 
emit_i16x8_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2215 void LiftoffAssembler::emit_i16x8_shri_s(LiftoffRegister dst,
2216                                          LiftoffRegister lhs, int32_t rhs) {
2217   bailout(kSimd, "emit_i16x8_shri_s");
2218 }
2219 
emit_i16x8_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2220 void LiftoffAssembler::emit_i16x8_shr_u(LiftoffRegister dst,
2221                                         LiftoffRegister lhs,
2222                                         LiftoffRegister rhs) {
2223   bailout(kSimd, "emit_i16x8_shr_u");
2224 }
2225 
emit_i16x8_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2226 void LiftoffAssembler::emit_i16x8_shri_u(LiftoffRegister dst,
2227                                          LiftoffRegister lhs, int32_t rhs) {
2228   bailout(kSimd, "emit_i16x8_shri_u");
2229 }
2230 
emit_i16x8_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2231 void LiftoffAssembler::emit_i16x8_add(LiftoffRegister dst, LiftoffRegister lhs,
2232                                       LiftoffRegister rhs) {
2233   bailout(kSimd, "emit_i16x8_add");
2234 }
2235 
emit_i16x8_add_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2236 void LiftoffAssembler::emit_i16x8_add_sat_s(LiftoffRegister dst,
2237                                             LiftoffRegister lhs,
2238                                             LiftoffRegister rhs) {
2239   bailout(kSimd, "emit_i16x8_add_sat_s");
2240 }
2241 
emit_i16x8_add_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2242 void LiftoffAssembler::emit_i16x8_add_sat_u(LiftoffRegister dst,
2243                                             LiftoffRegister lhs,
2244                                             LiftoffRegister rhs) {
2245   bailout(kSimd, "emit_i16x8_add_sat_u");
2246 }
2247 
emit_i16x8_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2248 void LiftoffAssembler::emit_i16x8_sub(LiftoffRegister dst, LiftoffRegister lhs,
2249                                       LiftoffRegister rhs) {
2250   bailout(kSimd, "emit_i16x8_sub");
2251 }
2252 
emit_i16x8_sub_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2253 void LiftoffAssembler::emit_i16x8_sub_sat_s(LiftoffRegister dst,
2254                                             LiftoffRegister lhs,
2255                                             LiftoffRegister rhs) {
2256   bailout(kSimd, "emit_i16x8_sub_sat_s");
2257 }
2258 
emit_i16x8_sub_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2259 void LiftoffAssembler::emit_i16x8_sub_sat_u(LiftoffRegister dst,
2260                                             LiftoffRegister lhs,
2261                                             LiftoffRegister rhs) {
2262   bailout(kSimd, "emit_i16x8_sub_sat_u");
2263 }
2264 
emit_i16x8_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2265 void LiftoffAssembler::emit_i16x8_mul(LiftoffRegister dst, LiftoffRegister lhs,
2266                                       LiftoffRegister rhs) {
2267   bailout(kSimd, "emit_i16x8_mul");
2268 }
2269 
emit_i16x8_min_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2270 void LiftoffAssembler::emit_i16x8_min_s(LiftoffRegister dst,
2271                                         LiftoffRegister lhs,
2272                                         LiftoffRegister rhs) {
2273   bailout(kSimd, "emit_i16x8_min_s");
2274 }
2275 
emit_i16x8_min_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2276 void LiftoffAssembler::emit_i16x8_min_u(LiftoffRegister dst,
2277                                         LiftoffRegister lhs,
2278                                         LiftoffRegister rhs) {
2279   bailout(kSimd, "emit_i16x8_min_u");
2280 }
2281 
emit_i16x8_max_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2282 void LiftoffAssembler::emit_i16x8_max_s(LiftoffRegister dst,
2283                                         LiftoffRegister lhs,
2284                                         LiftoffRegister rhs) {
2285   bailout(kSimd, "emit_i16x8_max_s");
2286 }
2287 
emit_i16x8_max_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2288 void LiftoffAssembler::emit_i16x8_max_u(LiftoffRegister dst,
2289                                         LiftoffRegister lhs,
2290                                         LiftoffRegister rhs) {
2291   bailout(kSimd, "emit_i16x8_max_u");
2292 }
2293 
emit_i32x4_neg(LiftoffRegister dst,LiftoffRegister src)2294 void LiftoffAssembler::emit_i32x4_neg(LiftoffRegister dst,
2295                                       LiftoffRegister src) {
2296   bailout(kSimd, "emit_i32x4_neg");
2297 }
2298 
emit_i32x4_alltrue(LiftoffRegister dst,LiftoffRegister src)2299 void LiftoffAssembler::emit_i32x4_alltrue(LiftoffRegister dst,
2300                                           LiftoffRegister src) {
2301   bailout(kSimd, "emit_i32x4_alltrue");
2302 }
2303 
emit_i32x4_bitmask(LiftoffRegister dst,LiftoffRegister src)2304 void LiftoffAssembler::emit_i32x4_bitmask(LiftoffRegister dst,
2305                                           LiftoffRegister src) {
2306   bailout(kSimd, "emit_i32x4_bitmask");
2307 }
2308 
emit_i32x4_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2309 void LiftoffAssembler::emit_i32x4_shl(LiftoffRegister dst, LiftoffRegister lhs,
2310                                       LiftoffRegister rhs) {
2311   bailout(kSimd, "emit_i32x4_shl");
2312 }
2313 
emit_i32x4_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2314 void LiftoffAssembler::emit_i32x4_shli(LiftoffRegister dst, LiftoffRegister lhs,
2315                                        int32_t rhs) {
2316   bailout(kSimd, "emit_i32x4_shli");
2317 }
2318 
emit_i32x4_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2319 void LiftoffAssembler::emit_i32x4_shr_s(LiftoffRegister dst,
2320                                         LiftoffRegister lhs,
2321                                         LiftoffRegister rhs) {
2322   bailout(kSimd, "emit_i32x4_shr_s");
2323 }
2324 
emit_i32x4_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2325 void LiftoffAssembler::emit_i32x4_shri_s(LiftoffRegister dst,
2326                                          LiftoffRegister lhs, int32_t rhs) {
2327   bailout(kSimd, "emit_i32x4_shri_s");
2328 }
2329 
emit_i32x4_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2330 void LiftoffAssembler::emit_i32x4_shr_u(LiftoffRegister dst,
2331                                         LiftoffRegister lhs,
2332                                         LiftoffRegister rhs) {
2333   bailout(kSimd, "emit_i32x4_shr_u");
2334 }
2335 
emit_i32x4_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2336 void LiftoffAssembler::emit_i32x4_shri_u(LiftoffRegister dst,
2337                                          LiftoffRegister lhs, int32_t rhs) {
2338   bailout(kSimd, "emit_i32x4_shri_u");
2339 }
2340 
emit_i32x4_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2341 void LiftoffAssembler::emit_i32x4_add(LiftoffRegister dst, LiftoffRegister lhs,
2342                                       LiftoffRegister rhs) {
2343   bailout(kSimd, "emit_i32x4_add");
2344 }
2345 
emit_i32x4_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2346 void LiftoffAssembler::emit_i32x4_sub(LiftoffRegister dst, LiftoffRegister lhs,
2347                                       LiftoffRegister rhs) {
2348   bailout(kSimd, "emit_i32x4_sub");
2349 }
2350 
emit_i32x4_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2351 void LiftoffAssembler::emit_i32x4_mul(LiftoffRegister dst, LiftoffRegister lhs,
2352                                       LiftoffRegister rhs) {
2353   bailout(kSimd, "emit_i32x4_mul");
2354 }
2355 
emit_i32x4_min_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2356 void LiftoffAssembler::emit_i32x4_min_s(LiftoffRegister dst,
2357                                         LiftoffRegister lhs,
2358                                         LiftoffRegister rhs) {
2359   bailout(kSimd, "emit_i32x4_min_s");
2360 }
2361 
emit_i32x4_min_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2362 void LiftoffAssembler::emit_i32x4_min_u(LiftoffRegister dst,
2363                                         LiftoffRegister lhs,
2364                                         LiftoffRegister rhs) {
2365   bailout(kSimd, "emit_i32x4_min_u");
2366 }
2367 
emit_i32x4_max_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2368 void LiftoffAssembler::emit_i32x4_max_s(LiftoffRegister dst,
2369                                         LiftoffRegister lhs,
2370                                         LiftoffRegister rhs) {
2371   bailout(kSimd, "emit_i32x4_max_s");
2372 }
2373 
emit_i32x4_max_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2374 void LiftoffAssembler::emit_i32x4_max_u(LiftoffRegister dst,
2375                                         LiftoffRegister lhs,
2376                                         LiftoffRegister rhs) {
2377   bailout(kSimd, "emit_i32x4_max_u");
2378 }
2379 
emit_i32x4_dot_i16x8_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2380 void LiftoffAssembler::emit_i32x4_dot_i16x8_s(LiftoffRegister dst,
2381                                               LiftoffRegister lhs,
2382                                               LiftoffRegister rhs) {
2383   bailout(kSimd, "emit_i32x4_dot_i16x8_s");
2384 }
2385 
emit_i64x2_neg(LiftoffRegister dst,LiftoffRegister src)2386 void LiftoffAssembler::emit_i64x2_neg(LiftoffRegister dst,
2387                                       LiftoffRegister src) {
2388   bailout(kSimd, "emit_i64x2_neg");
2389 }
2390 
emit_i64x2_alltrue(LiftoffRegister dst,LiftoffRegister src)2391 void LiftoffAssembler::emit_i64x2_alltrue(LiftoffRegister dst,
2392                                           LiftoffRegister src) {
2393   bailout(kSimd, "emit_i64x2_alltrue");
2394 }
2395 
emit_i64x2_bitmask(LiftoffRegister dst,LiftoffRegister src)2396 void LiftoffAssembler::emit_i64x2_bitmask(LiftoffRegister dst,
2397                                           LiftoffRegister src) {
2398   bailout(kSimd, "emit_i64x2_bitmask");
2399 }
2400 
emit_i64x2_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2401 void LiftoffAssembler::emit_i64x2_shl(LiftoffRegister dst, LiftoffRegister lhs,
2402                                       LiftoffRegister rhs) {
2403   bailout(kSimd, "emit_i64x2_shl");
2404 }
2405 
emit_i64x2_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2406 void LiftoffAssembler::emit_i64x2_shli(LiftoffRegister dst, LiftoffRegister lhs,
2407                                        int32_t rhs) {
2408   bailout(kSimd, "emit_i64x2_shli");
2409 }
2410 
emit_i64x2_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2411 void LiftoffAssembler::emit_i64x2_shr_s(LiftoffRegister dst,
2412                                         LiftoffRegister lhs,
2413                                         LiftoffRegister rhs) {
2414   bailout(kSimd, "emit_i64x2_shr_s");
2415 }
2416 
emit_i64x2_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2417 void LiftoffAssembler::emit_i64x2_shri_s(LiftoffRegister dst,
2418                                          LiftoffRegister lhs, int32_t rhs) {
2419   bailout(kSimd, "emit_i64x2_shri_s");
2420 }
2421 
emit_i64x2_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2422 void LiftoffAssembler::emit_i64x2_shr_u(LiftoffRegister dst,
2423                                         LiftoffRegister lhs,
2424                                         LiftoffRegister rhs) {
2425   bailout(kSimd, "emit_i64x2_shr_u");
2426 }
2427 
emit_i64x2_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2428 void LiftoffAssembler::emit_i64x2_shri_u(LiftoffRegister dst,
2429                                          LiftoffRegister lhs, int32_t rhs) {
2430   bailout(kSimd, "emit_i64x2_shri_u");
2431 }
2432 
emit_i64x2_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2433 void LiftoffAssembler::emit_i64x2_add(LiftoffRegister dst, LiftoffRegister lhs,
2434                                       LiftoffRegister rhs) {
2435   bailout(kSimd, "emit_i64x2_add");
2436 }
2437 
emit_i64x2_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2438 void LiftoffAssembler::emit_i64x2_sub(LiftoffRegister dst, LiftoffRegister lhs,
2439                                       LiftoffRegister rhs) {
2440   bailout(kSimd, "emit_i64x2_sub");
2441 }
2442 
emit_i64x2_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2443 void LiftoffAssembler::emit_i64x2_mul(LiftoffRegister dst, LiftoffRegister lhs,
2444                                       LiftoffRegister rhs) {
2445   bailout(kSimd, "emit_i64x2_mul");
2446 }
2447 
emit_i64x2_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2448 void LiftoffAssembler::emit_i64x2_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
2449                                        LiftoffRegister rhs) {
2450   bailout(kSimd, "emit_i64x2_gt_s");
2451 }
2452 
emit_i64x2_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2453 void LiftoffAssembler::emit_i64x2_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
2454                                        LiftoffRegister rhs) {
2455   bailout(kSimd, "emit_i64x2_ge_s");
2456 }
2457 
emit_f32x4_abs(LiftoffRegister dst,LiftoffRegister src)2458 void LiftoffAssembler::emit_f32x4_abs(LiftoffRegister dst,
2459                                       LiftoffRegister src) {
2460   bailout(kSimd, "emit_f32x4_abs");
2461 }
2462 
emit_f32x4_neg(LiftoffRegister dst,LiftoffRegister src)2463 void LiftoffAssembler::emit_f32x4_neg(LiftoffRegister dst,
2464                                       LiftoffRegister src) {
2465   bailout(kSimd, "emit_f32x4_neg");
2466 }
2467 
emit_f32x4_sqrt(LiftoffRegister dst,LiftoffRegister src)2468 void LiftoffAssembler::emit_f32x4_sqrt(LiftoffRegister dst,
2469                                        LiftoffRegister src) {
2470   bailout(kSimd, "emit_f32x4_sqrt");
2471 }
2472 
emit_f32x4_ceil(LiftoffRegister dst,LiftoffRegister src)2473 bool LiftoffAssembler::emit_f32x4_ceil(LiftoffRegister dst,
2474                                        LiftoffRegister src) {
2475   bailout(kSimd, "emit_f32x4_ceil");
2476   return true;
2477 }
2478 
emit_f32x4_floor(LiftoffRegister dst,LiftoffRegister src)2479 bool LiftoffAssembler::emit_f32x4_floor(LiftoffRegister dst,
2480                                         LiftoffRegister src) {
2481   bailout(kSimd, "emit_f32x4_floor");
2482   return true;
2483 }
2484 
emit_f32x4_trunc(LiftoffRegister dst,LiftoffRegister src)2485 bool LiftoffAssembler::emit_f32x4_trunc(LiftoffRegister dst,
2486                                         LiftoffRegister src) {
2487   bailout(kSimd, "emit_f32x4_trunc");
2488   return true;
2489 }
2490 
emit_f32x4_nearest_int(LiftoffRegister dst,LiftoffRegister src)2491 bool LiftoffAssembler::emit_f32x4_nearest_int(LiftoffRegister dst,
2492                                               LiftoffRegister src) {
2493   bailout(kSimd, "emit_f32x4_nearest_int");
2494   return true;
2495 }
2496 
emit_f32x4_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2497 void LiftoffAssembler::emit_f32x4_add(LiftoffRegister dst, LiftoffRegister lhs,
2498                                       LiftoffRegister rhs) {
2499   bailout(kSimd, "emit_f32x4_add");
2500 }
2501 
emit_f32x4_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2502 void LiftoffAssembler::emit_f32x4_sub(LiftoffRegister dst, LiftoffRegister lhs,
2503                                       LiftoffRegister rhs) {
2504   bailout(kSimd, "emit_f32x4_sub");
2505 }
2506 
emit_f32x4_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2507 void LiftoffAssembler::emit_f32x4_mul(LiftoffRegister dst, LiftoffRegister lhs,
2508                                       LiftoffRegister rhs) {
2509   bailout(kSimd, "emit_f32x4_mul");
2510 }
2511 
emit_f32x4_div(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2512 void LiftoffAssembler::emit_f32x4_div(LiftoffRegister dst, LiftoffRegister lhs,
2513                                       LiftoffRegister rhs) {
2514   bailout(kSimd, "emit_f32x4_div");
2515 }
2516 
emit_f32x4_min(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2517 void LiftoffAssembler::emit_f32x4_min(LiftoffRegister dst, LiftoffRegister lhs,
2518                                       LiftoffRegister rhs) {
2519   bailout(kSimd, "emit_f32x4_min");
2520 }
2521 
emit_f32x4_max(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2522 void LiftoffAssembler::emit_f32x4_max(LiftoffRegister dst, LiftoffRegister lhs,
2523                                       LiftoffRegister rhs) {
2524   bailout(kSimd, "emit_f32x4_max");
2525 }
2526 
emit_f32x4_pmin(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2527 void LiftoffAssembler::emit_f32x4_pmin(LiftoffRegister dst, LiftoffRegister lhs,
2528                                        LiftoffRegister rhs) {
2529   bailout(kSimd, "emit_f32x4_pmin");
2530 }
2531 
emit_f32x4_pmax(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2532 void LiftoffAssembler::emit_f32x4_pmax(LiftoffRegister dst, LiftoffRegister lhs,
2533                                        LiftoffRegister rhs) {
2534   bailout(kSimd, "emit_f32x4_pmax");
2535 }
2536 
emit_f64x2_abs(LiftoffRegister dst,LiftoffRegister src)2537 void LiftoffAssembler::emit_f64x2_abs(LiftoffRegister dst,
2538                                       LiftoffRegister src) {
2539   bailout(kSimd, "emit_f64x2_abs");
2540 }
2541 
emit_f64x2_neg(LiftoffRegister dst,LiftoffRegister src)2542 void LiftoffAssembler::emit_f64x2_neg(LiftoffRegister dst,
2543                                       LiftoffRegister src) {
2544   bailout(kSimd, "emit_f64x2_neg");
2545 }
2546 
emit_f64x2_sqrt(LiftoffRegister dst,LiftoffRegister src)2547 void LiftoffAssembler::emit_f64x2_sqrt(LiftoffRegister dst,
2548                                        LiftoffRegister src) {
2549   bailout(kSimd, "emit_f64x2_sqrt");
2550 }
2551 
emit_f64x2_ceil(LiftoffRegister dst,LiftoffRegister src)2552 bool LiftoffAssembler::emit_f64x2_ceil(LiftoffRegister dst,
2553                                        LiftoffRegister src) {
2554   bailout(kSimd, "emit_f64x2_ceil");
2555   return true;
2556 }
2557 
emit_f64x2_floor(LiftoffRegister dst,LiftoffRegister src)2558 bool LiftoffAssembler::emit_f64x2_floor(LiftoffRegister dst,
2559                                         LiftoffRegister src) {
2560   bailout(kSimd, "emit_f64x2_floor");
2561   return true;
2562 }
2563 
emit_f64x2_trunc(LiftoffRegister dst,LiftoffRegister src)2564 bool LiftoffAssembler::emit_f64x2_trunc(LiftoffRegister dst,
2565                                         LiftoffRegister src) {
2566   bailout(kSimd, "emit_f64x2_trunc");
2567   return true;
2568 }
2569 
emit_f64x2_nearest_int(LiftoffRegister dst,LiftoffRegister src)2570 bool LiftoffAssembler::emit_f64x2_nearest_int(LiftoffRegister dst,
2571                                               LiftoffRegister src) {
2572   bailout(kSimd, "emit_f64x2_nearest_int");
2573   return true;
2574 }
2575 
emit_f64x2_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2576 void LiftoffAssembler::emit_f64x2_add(LiftoffRegister dst, LiftoffRegister lhs,
2577                                       LiftoffRegister rhs) {
2578   bailout(kSimd, "emit_f64x2_add");
2579 }
2580 
emit_f64x2_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2581 void LiftoffAssembler::emit_f64x2_sub(LiftoffRegister dst, LiftoffRegister lhs,
2582                                       LiftoffRegister rhs) {
2583   bailout(kSimd, "emit_f64x2_sub");
2584 }
2585 
emit_f64x2_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2586 void LiftoffAssembler::emit_f64x2_mul(LiftoffRegister dst, LiftoffRegister lhs,
2587                                       LiftoffRegister rhs) {
2588   bailout(kSimd, "emit_f64x2_mul");
2589 }
2590 
emit_f64x2_div(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2591 void LiftoffAssembler::emit_f64x2_div(LiftoffRegister dst, LiftoffRegister lhs,
2592                                       LiftoffRegister rhs) {
2593   bailout(kSimd, "emit_f64x2_div");
2594 }
2595 
emit_f64x2_min(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2596 void LiftoffAssembler::emit_f64x2_min(LiftoffRegister dst, LiftoffRegister lhs,
2597                                       LiftoffRegister rhs) {
2598   bailout(kSimd, "emit_f64x2_min");
2599 }
2600 
emit_f64x2_max(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2601 void LiftoffAssembler::emit_f64x2_max(LiftoffRegister dst, LiftoffRegister lhs,
2602                                       LiftoffRegister rhs) {
2603   bailout(kSimd, "emit_f64x2_max");
2604 }
2605 
emit_f64x2_pmin(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2606 void LiftoffAssembler::emit_f64x2_pmin(LiftoffRegister dst, LiftoffRegister lhs,
2607                                        LiftoffRegister rhs) {
2608   bailout(kSimd, "emit_f64x2_pmin");
2609 }
2610 
emit_f64x2_pmax(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2611 void LiftoffAssembler::emit_f64x2_pmax(LiftoffRegister dst, LiftoffRegister lhs,
2612                                        LiftoffRegister rhs) {
2613   bailout(kSimd, "emit_f64x2_pmax");
2614 }
2615 
emit_f64x2_convert_low_i32x4_s(LiftoffRegister dst,LiftoffRegister src)2616 void LiftoffAssembler::emit_f64x2_convert_low_i32x4_s(LiftoffRegister dst,
2617                                                       LiftoffRegister src) {
2618   bailout(kSimd, "emit_f64x2_convert_low_i32x4_s");
2619 }
2620 
emit_f64x2_convert_low_i32x4_u(LiftoffRegister dst,LiftoffRegister src)2621 void LiftoffAssembler::emit_f64x2_convert_low_i32x4_u(LiftoffRegister dst,
2622                                                       LiftoffRegister src) {
2623   bailout(kSimd, "emit_f64x2_convert_low_i32x4_u");
2624 }
2625 
emit_f64x2_promote_low_f32x4(LiftoffRegister dst,LiftoffRegister src)2626 void LiftoffAssembler::emit_f64x2_promote_low_f32x4(LiftoffRegister dst,
2627                                                     LiftoffRegister src) {
2628   bailout(kSimd, "emit_f64x2_promote_low_f32x4");
2629 }
2630 
emit_i32x4_sconvert_f32x4(LiftoffRegister dst,LiftoffRegister src)2631 void LiftoffAssembler::emit_i32x4_sconvert_f32x4(LiftoffRegister dst,
2632                                                  LiftoffRegister src) {
2633   bailout(kSimd, "emit_i32x4_sconvert_f32x4");
2634 }
2635 
emit_i32x4_uconvert_f32x4(LiftoffRegister dst,LiftoffRegister src)2636 void LiftoffAssembler::emit_i32x4_uconvert_f32x4(LiftoffRegister dst,
2637                                                  LiftoffRegister src) {
2638   bailout(kSimd, "emit_i32x4_uconvert_f32x4");
2639 }
2640 
emit_i32x4_trunc_sat_f64x2_s_zero(LiftoffRegister dst,LiftoffRegister src)2641 void LiftoffAssembler::emit_i32x4_trunc_sat_f64x2_s_zero(LiftoffRegister dst,
2642                                                          LiftoffRegister src) {
2643   bailout(kSimd, "emit_i32x4_trunc_sat_f64x2_s_zero");
2644 }
2645 
emit_i32x4_trunc_sat_f64x2_u_zero(LiftoffRegister dst,LiftoffRegister src)2646 void LiftoffAssembler::emit_i32x4_trunc_sat_f64x2_u_zero(LiftoffRegister dst,
2647                                                          LiftoffRegister src) {
2648   bailout(kSimd, "emit_i32x4_trunc_sat_f64x2_u_zero");
2649 }
2650 
emit_f32x4_sconvert_i32x4(LiftoffRegister dst,LiftoffRegister src)2651 void LiftoffAssembler::emit_f32x4_sconvert_i32x4(LiftoffRegister dst,
2652                                                  LiftoffRegister src) {
2653   bailout(kSimd, "emit_f32x4_sconvert_i32x4");
2654 }
2655 
emit_f32x4_uconvert_i32x4(LiftoffRegister dst,LiftoffRegister src)2656 void LiftoffAssembler::emit_f32x4_uconvert_i32x4(LiftoffRegister dst,
2657                                                  LiftoffRegister src) {
2658   bailout(kSimd, "emit_f32x4_uconvert_i32x4");
2659 }
2660 
emit_f32x4_demote_f64x2_zero(LiftoffRegister dst,LiftoffRegister src)2661 void LiftoffAssembler::emit_f32x4_demote_f64x2_zero(LiftoffRegister dst,
2662                                                     LiftoffRegister src) {
2663   bailout(kSimd, "emit_f32x4_demote_f64x2_zero");
2664 }
2665 
emit_i8x16_sconvert_i16x8(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2666 void LiftoffAssembler::emit_i8x16_sconvert_i16x8(LiftoffRegister dst,
2667                                                  LiftoffRegister lhs,
2668                                                  LiftoffRegister rhs) {
2669   bailout(kSimd, "emit_i8x16_sconvert_i16x8");
2670 }
2671 
emit_i8x16_uconvert_i16x8(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2672 void LiftoffAssembler::emit_i8x16_uconvert_i16x8(LiftoffRegister dst,
2673                                                  LiftoffRegister lhs,
2674                                                  LiftoffRegister rhs) {
2675   bailout(kSimd, "emit_i8x16_uconvert_i16x8");
2676 }
2677 
emit_i16x8_sconvert_i32x4(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2678 void LiftoffAssembler::emit_i16x8_sconvert_i32x4(LiftoffRegister dst,
2679                                                  LiftoffRegister lhs,
2680                                                  LiftoffRegister rhs) {
2681   bailout(kSimd, "emit_i16x8_sconvert_i32x4");
2682 }
2683 
emit_i16x8_uconvert_i32x4(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2684 void LiftoffAssembler::emit_i16x8_uconvert_i32x4(LiftoffRegister dst,
2685                                                  LiftoffRegister lhs,
2686                                                  LiftoffRegister rhs) {
2687   bailout(kSimd, "emit_i16x8_uconvert_i32x4");
2688 }
2689 
emit_i16x8_sconvert_i8x16_low(LiftoffRegister dst,LiftoffRegister src)2690 void LiftoffAssembler::emit_i16x8_sconvert_i8x16_low(LiftoffRegister dst,
2691                                                      LiftoffRegister src) {
2692   bailout(kSimd, "emit_i16x8_sconvert_i8x16_low");
2693 }
2694 
emit_i16x8_sconvert_i8x16_high(LiftoffRegister dst,LiftoffRegister src)2695 void LiftoffAssembler::emit_i16x8_sconvert_i8x16_high(LiftoffRegister dst,
2696                                                       LiftoffRegister src) {
2697   bailout(kSimd, "emit_i16x8_sconvert_i8x16_high");
2698 }
2699 
emit_i16x8_uconvert_i8x16_low(LiftoffRegister dst,LiftoffRegister src)2700 void LiftoffAssembler::emit_i16x8_uconvert_i8x16_low(LiftoffRegister dst,
2701                                                      LiftoffRegister src) {
2702   bailout(kSimd, "emit_i16x8_uconvert_i8x16_low");
2703 }
2704 
emit_i16x8_uconvert_i8x16_high(LiftoffRegister dst,LiftoffRegister src)2705 void LiftoffAssembler::emit_i16x8_uconvert_i8x16_high(LiftoffRegister dst,
2706                                                       LiftoffRegister src) {
2707   bailout(kSimd, "emit_i16x8_uconvert_i8x16_high");
2708 }
2709 
emit_i32x4_sconvert_i16x8_low(LiftoffRegister dst,LiftoffRegister src)2710 void LiftoffAssembler::emit_i32x4_sconvert_i16x8_low(LiftoffRegister dst,
2711                                                      LiftoffRegister src) {
2712   bailout(kSimd, "emit_i32x4_sconvert_i16x8_low");
2713 }
2714 
emit_i32x4_sconvert_i16x8_high(LiftoffRegister dst,LiftoffRegister src)2715 void LiftoffAssembler::emit_i32x4_sconvert_i16x8_high(LiftoffRegister dst,
2716                                                       LiftoffRegister src) {
2717   bailout(kSimd, "emit_i32x4_sconvert_i16x8_high");
2718 }
2719 
emit_i32x4_uconvert_i16x8_low(LiftoffRegister dst,LiftoffRegister src)2720 void LiftoffAssembler::emit_i32x4_uconvert_i16x8_low(LiftoffRegister dst,
2721                                                      LiftoffRegister src) {
2722   bailout(kSimd, "emit_i32x4_uconvert_i16x8_low");
2723 }
2724 
emit_i32x4_uconvert_i16x8_high(LiftoffRegister dst,LiftoffRegister src)2725 void LiftoffAssembler::emit_i32x4_uconvert_i16x8_high(LiftoffRegister dst,
2726                                                       LiftoffRegister src) {
2727   bailout(kSimd, "emit_i32x4_uconvert_i16x8_high");
2728 }
2729 
emit_i64x2_sconvert_i32x4_low(LiftoffRegister dst,LiftoffRegister src)2730 void LiftoffAssembler::emit_i64x2_sconvert_i32x4_low(LiftoffRegister dst,
2731                                                      LiftoffRegister src) {
2732   bailout(kSimd, "emit_i64x2_sconvert_i32x4_low");
2733 }
2734 
emit_i64x2_sconvert_i32x4_high(LiftoffRegister dst,LiftoffRegister src)2735 void LiftoffAssembler::emit_i64x2_sconvert_i32x4_high(LiftoffRegister dst,
2736                                                       LiftoffRegister src) {
2737   bailout(kSimd, "emit_i64x2_sconvert_i32x4_high");
2738 }
2739 
emit_i64x2_uconvert_i32x4_low(LiftoffRegister dst,LiftoffRegister src)2740 void LiftoffAssembler::emit_i64x2_uconvert_i32x4_low(LiftoffRegister dst,
2741                                                      LiftoffRegister src) {
2742   bailout(kSimd, "emit_i64x2_uconvert_i32x4_low");
2743 }
2744 
emit_i64x2_uconvert_i32x4_high(LiftoffRegister dst,LiftoffRegister src)2745 void LiftoffAssembler::emit_i64x2_uconvert_i32x4_high(LiftoffRegister dst,
2746                                                       LiftoffRegister src) {
2747   bailout(kSimd, "emit_i64x2_uconvert_i32x4_high");
2748 }
2749 
emit_i8x16_rounding_average_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2750 void LiftoffAssembler::emit_i8x16_rounding_average_u(LiftoffRegister dst,
2751                                                      LiftoffRegister lhs,
2752                                                      LiftoffRegister rhs) {
2753   bailout(kSimd, "emit_i8x16_rounding_average_u");
2754 }
2755 
emit_i16x8_rounding_average_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2756 void LiftoffAssembler::emit_i16x8_rounding_average_u(LiftoffRegister dst,
2757                                                      LiftoffRegister lhs,
2758                                                      LiftoffRegister rhs) {
2759   bailout(kSimd, "emit_i16x8_rounding_average_u");
2760 }
2761 
emit_i8x16_abs(LiftoffRegister dst,LiftoffRegister src)2762 void LiftoffAssembler::emit_i8x16_abs(LiftoffRegister dst,
2763                                       LiftoffRegister src) {
2764   bailout(kSimd, "emit_i8x16_abs");
2765 }
2766 
emit_i16x8_abs(LiftoffRegister dst,LiftoffRegister src)2767 void LiftoffAssembler::emit_i16x8_abs(LiftoffRegister dst,
2768                                       LiftoffRegister src) {
2769   bailout(kSimd, "emit_i16x8_abs");
2770 }
2771 
emit_i32x4_abs(LiftoffRegister dst,LiftoffRegister src)2772 void LiftoffAssembler::emit_i32x4_abs(LiftoffRegister dst,
2773                                       LiftoffRegister src) {
2774   bailout(kSimd, "emit_i32x4_abs");
2775 }
2776 
emit_i8x16_extract_lane_s(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2777 void LiftoffAssembler::emit_i8x16_extract_lane_s(LiftoffRegister dst,
2778                                                  LiftoffRegister lhs,
2779                                                  uint8_t imm_lane_idx) {
2780   bailout(kSimd, "emit_i8x16_extract_lane_s");
2781 }
2782 
emit_i8x16_extract_lane_u(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2783 void LiftoffAssembler::emit_i8x16_extract_lane_u(LiftoffRegister dst,
2784                                                  LiftoffRegister lhs,
2785                                                  uint8_t imm_lane_idx) {
2786   bailout(kSimd, "emit_i8x16_extract_lane_u");
2787 }
2788 
emit_i16x8_extract_lane_s(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2789 void LiftoffAssembler::emit_i16x8_extract_lane_s(LiftoffRegister dst,
2790                                                  LiftoffRegister lhs,
2791                                                  uint8_t imm_lane_idx) {
2792   bailout(kSimd, "emit_i16x8_extract_lane_s");
2793 }
2794 
emit_i16x8_extract_lane_u(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2795 void LiftoffAssembler::emit_i16x8_extract_lane_u(LiftoffRegister dst,
2796                                                  LiftoffRegister lhs,
2797                                                  uint8_t imm_lane_idx) {
2798   bailout(kSimd, "emit_i16x8_extract_lane_u");
2799 }
2800 
emit_i32x4_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2801 void LiftoffAssembler::emit_i32x4_extract_lane(LiftoffRegister dst,
2802                                                LiftoffRegister lhs,
2803                                                uint8_t imm_lane_idx) {
2804   bailout(kSimd, "emit_i32x4_extract_lane");
2805 }
2806 
emit_i64x2_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2807 void LiftoffAssembler::emit_i64x2_extract_lane(LiftoffRegister dst,
2808                                                LiftoffRegister lhs,
2809                                                uint8_t imm_lane_idx) {
2810   bailout(kSimd, "emit_i64x2_extract_lane");
2811 }
2812 
emit_f32x4_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2813 void LiftoffAssembler::emit_f32x4_extract_lane(LiftoffRegister dst,
2814                                                LiftoffRegister lhs,
2815                                                uint8_t imm_lane_idx) {
2816   bailout(kSimd, "emit_f32x4_extract_lane");
2817 }
2818 
emit_f64x2_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2819 void LiftoffAssembler::emit_f64x2_extract_lane(LiftoffRegister dst,
2820                                                LiftoffRegister lhs,
2821                                                uint8_t imm_lane_idx) {
2822   bailout(kSimd, "emit_f64x2_extract_lane");
2823 }
2824 
emit_i8x16_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2825 void LiftoffAssembler::emit_i8x16_replace_lane(LiftoffRegister dst,
2826                                                LiftoffRegister src1,
2827                                                LiftoffRegister src2,
2828                                                uint8_t imm_lane_idx) {
2829   bailout(kSimd, "emit_i8x16_replace_lane");
2830 }
2831 
emit_i16x8_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2832 void LiftoffAssembler::emit_i16x8_replace_lane(LiftoffRegister dst,
2833                                                LiftoffRegister src1,
2834                                                LiftoffRegister src2,
2835                                                uint8_t imm_lane_idx) {
2836   bailout(kSimd, "emit_i16x8_replace_lane");
2837 }
2838 
emit_i32x4_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2839 void LiftoffAssembler::emit_i32x4_replace_lane(LiftoffRegister dst,
2840                                                LiftoffRegister src1,
2841                                                LiftoffRegister src2,
2842                                                uint8_t imm_lane_idx) {
2843   bailout(kSimd, "emit_i32x4_replace_lane");
2844 }
2845 
emit_i64x2_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2846 void LiftoffAssembler::emit_i64x2_replace_lane(LiftoffRegister dst,
2847                                                LiftoffRegister src1,
2848                                                LiftoffRegister src2,
2849                                                uint8_t imm_lane_idx) {
2850   bailout(kSimd, "emit_i64x2_replace_lane");
2851 }
2852 
emit_f32x4_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2853 void LiftoffAssembler::emit_f32x4_replace_lane(LiftoffRegister dst,
2854                                                LiftoffRegister src1,
2855                                                LiftoffRegister src2,
2856                                                uint8_t imm_lane_idx) {
2857   bailout(kSimd, "emit_f32x4_replace_lane");
2858 }
2859 
emit_f64x2_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2860 void LiftoffAssembler::emit_f64x2_replace_lane(LiftoffRegister dst,
2861                                                LiftoffRegister src1,
2862                                                LiftoffRegister src2,
2863                                                uint8_t imm_lane_idx) {
2864   bailout(kSimd, "emit_f64x2_replace_lane");
2865 }
2866 
StackCheck(Label * ool_code,Register limit_address)2867 void LiftoffAssembler::StackCheck(Label* ool_code, Register limit_address) {
2868   TurboAssembler::Ld_d(limit_address, MemOperand(limit_address, 0));
2869   TurboAssembler::Branch(ool_code, ule, sp, Operand(limit_address));
2870 }
2871 
CallTrapCallbackForTesting()2872 void LiftoffAssembler::CallTrapCallbackForTesting() {
2873   PrepareCallCFunction(0, GetUnusedRegister(kGpReg, {}).gp());
2874   CallCFunction(ExternalReference::wasm_call_trap_callback_for_testing(), 0);
2875 }
2876 
AssertUnreachable(AbortReason reason)2877 void LiftoffAssembler::AssertUnreachable(AbortReason reason) {
2878   if (FLAG_debug_code) Abort(reason);
2879 }
2880 
PushRegisters(LiftoffRegList regs)2881 void LiftoffAssembler::PushRegisters(LiftoffRegList regs) {
2882   LiftoffRegList gp_regs = regs & kGpCacheRegList;
2883   unsigned num_gp_regs = gp_regs.GetNumRegsSet();
2884   if (num_gp_regs) {
2885     unsigned offset = num_gp_regs * kSystemPointerSize;
2886     addi_d(sp, sp, -offset);
2887     while (!gp_regs.is_empty()) {
2888       LiftoffRegister reg = gp_regs.GetFirstRegSet();
2889       offset -= kSystemPointerSize;
2890       St_d(reg.gp(), MemOperand(sp, offset));
2891       gp_regs.clear(reg);
2892     }
2893     DCHECK_EQ(offset, 0);
2894   }
2895   LiftoffRegList fp_regs = regs & kFpCacheRegList;
2896   unsigned num_fp_regs = fp_regs.GetNumRegsSet();
2897   if (num_fp_regs) {
2898     unsigned slot_size = 8;
2899     addi_d(sp, sp, -(num_fp_regs * slot_size));
2900     unsigned offset = 0;
2901     while (!fp_regs.is_empty()) {
2902       LiftoffRegister reg = fp_regs.GetFirstRegSet();
2903       TurboAssembler::Fst_d(reg.fp(), MemOperand(sp, offset));
2904       fp_regs.clear(reg);
2905       offset += slot_size;
2906     }
2907     DCHECK_EQ(offset, num_fp_regs * slot_size);
2908   }
2909 }
2910 
PopRegisters(LiftoffRegList regs)2911 void LiftoffAssembler::PopRegisters(LiftoffRegList regs) {
2912   LiftoffRegList fp_regs = regs & kFpCacheRegList;
2913   unsigned fp_offset = 0;
2914   while (!fp_regs.is_empty()) {
2915     LiftoffRegister reg = fp_regs.GetFirstRegSet();
2916     TurboAssembler::Fld_d(reg.fp(), MemOperand(sp, fp_offset));
2917     fp_regs.clear(reg);
2918     fp_offset += 8;
2919   }
2920   if (fp_offset) addi_d(sp, sp, fp_offset);
2921   LiftoffRegList gp_regs = regs & kGpCacheRegList;
2922   unsigned gp_offset = 0;
2923   while (!gp_regs.is_empty()) {
2924     LiftoffRegister reg = gp_regs.GetLastRegSet();
2925     Ld_d(reg.gp(), MemOperand(sp, gp_offset));
2926     gp_regs.clear(reg);
2927     gp_offset += kSystemPointerSize;
2928   }
2929   addi_d(sp, sp, gp_offset);
2930 }
2931 
RecordSpillsInSafepoint(SafepointTableBuilder::Safepoint & safepoint,LiftoffRegList all_spills,LiftoffRegList ref_spills,int spill_offset)2932 void LiftoffAssembler::RecordSpillsInSafepoint(
2933     SafepointTableBuilder::Safepoint& safepoint, LiftoffRegList all_spills,
2934     LiftoffRegList ref_spills, int spill_offset) {
2935   int spill_space_size = 0;
2936   while (!all_spills.is_empty()) {
2937     LiftoffRegister reg = all_spills.GetFirstRegSet();
2938     if (ref_spills.has(reg)) {
2939       safepoint.DefineTaggedStackSlot(spill_offset);
2940     }
2941     all_spills.clear(reg);
2942     ++spill_offset;
2943     spill_space_size += kSystemPointerSize;
2944   }
2945   // Record the number of additional spill slots.
2946   RecordOolSpillSpaceSize(spill_space_size);
2947 }
2948 
DropStackSlotsAndRet(uint32_t num_stack_slots)2949 void LiftoffAssembler::DropStackSlotsAndRet(uint32_t num_stack_slots) {
2950   DCHECK_LT(num_stack_slots,
2951             (1 << 16) / kSystemPointerSize);  // 16 bit immediate
2952   Drop(static_cast<int>(num_stack_slots));
2953   Ret();
2954 }
2955 
CallC(const ValueKindSig * sig,const LiftoffRegister * args,const LiftoffRegister * rets,ValueKind out_argument_kind,int stack_bytes,ExternalReference ext_ref)2956 void LiftoffAssembler::CallC(const ValueKindSig* sig,
2957                              const LiftoffRegister* args,
2958                              const LiftoffRegister* rets,
2959                              ValueKind out_argument_kind, int stack_bytes,
2960                              ExternalReference ext_ref) {
2961   addi_d(sp, sp, -stack_bytes);
2962 
2963   int arg_bytes = 0;
2964   for (ValueKind param_kind : sig->parameters()) {
2965     liftoff::Store(this, sp, arg_bytes, *args++, param_kind);
2966     arg_bytes += value_kind_size(param_kind);
2967   }
2968   DCHECK_LE(arg_bytes, stack_bytes);
2969 
2970   // Pass a pointer to the buffer with the arguments to the C function.
2971   // On LoongArch, the first argument is passed in {a0}.
2972   constexpr Register kFirstArgReg = a0;
2973   mov(kFirstArgReg, sp);
2974 
2975   // Now call the C function.
2976   constexpr int kNumCCallArgs = 1;
2977   PrepareCallCFunction(kNumCCallArgs, kScratchReg);
2978   CallCFunction(ext_ref, kNumCCallArgs);
2979 
2980   // Move return value to the right register.
2981   const LiftoffRegister* next_result_reg = rets;
2982   if (sig->return_count() > 0) {
2983     DCHECK_EQ(1, sig->return_count());
2984     constexpr Register kReturnReg = a0;
2985     if (kReturnReg != next_result_reg->gp()) {
2986       Move(*next_result_reg, LiftoffRegister(kReturnReg), sig->GetReturn(0));
2987     }
2988     ++next_result_reg;
2989   }
2990 
2991   // Load potential output value from the buffer on the stack.
2992   if (out_argument_kind != kVoid) {
2993     liftoff::Load(this, *next_result_reg, MemOperand(sp, 0), out_argument_kind);
2994   }
2995 
2996   addi_d(sp, sp, stack_bytes);
2997 }
2998 
CallNativeWasmCode(Address addr)2999 void LiftoffAssembler::CallNativeWasmCode(Address addr) {
3000   Call(addr, RelocInfo::WASM_CALL);
3001 }
3002 
TailCallNativeWasmCode(Address addr)3003 void LiftoffAssembler::TailCallNativeWasmCode(Address addr) {
3004   Jump(addr, RelocInfo::WASM_CALL);
3005 }
3006 
CallIndirect(const ValueKindSig * sig,compiler::CallDescriptor * call_descriptor,Register target)3007 void LiftoffAssembler::CallIndirect(const ValueKindSig* sig,
3008                                     compiler::CallDescriptor* call_descriptor,
3009                                     Register target) {
3010   if (target == no_reg) {
3011     Pop(kScratchReg);
3012     Call(kScratchReg);
3013   } else {
3014     Call(target);
3015   }
3016 }
3017 
TailCallIndirect(Register target)3018 void LiftoffAssembler::TailCallIndirect(Register target) {
3019   if (target == no_reg) {
3020     Pop(kScratchReg);
3021     Jump(kScratchReg);
3022   } else {
3023     Jump(target);
3024   }
3025 }
3026 
CallRuntimeStub(WasmCode::RuntimeStubId sid)3027 void LiftoffAssembler::CallRuntimeStub(WasmCode::RuntimeStubId sid) {
3028   // A direct call to a wasm runtime stub defined in this module.
3029   // Just encode the stub index. This will be patched at relocation.
3030   Call(static_cast<Address>(sid), RelocInfo::WASM_STUB_CALL);
3031 }
3032 
AllocateStackSlot(Register addr,uint32_t size)3033 void LiftoffAssembler::AllocateStackSlot(Register addr, uint32_t size) {
3034   addi_d(sp, sp, -size);
3035   TurboAssembler::Move(addr, sp);
3036 }
3037 
DeallocateStackSlot(uint32_t size)3038 void LiftoffAssembler::DeallocateStackSlot(uint32_t size) {
3039   addi_d(sp, sp, size);
3040 }
3041 
MaybeOSR()3042 void LiftoffAssembler::MaybeOSR() {}
3043 
emit_set_if_nan(Register dst,FPURegister src,ValueKind kind)3044 void LiftoffAssembler::emit_set_if_nan(Register dst, FPURegister src,
3045                                        ValueKind kind) {
3046   UseScratchRegisterScope temps(this);
3047   Register scratch = temps.Acquire();
3048   Label not_nan;
3049   if (kind == kF32) {
3050     CompareIsNanF32(src, src);
3051   } else {
3052     DCHECK_EQ(kind, kF64);
3053     CompareIsNanF64(src, src);
3054   }
3055   BranchFalseShortF(&not_nan);
3056   li(scratch, 1);
3057   St_w(scratch, MemOperand(dst, 0));
3058   bind(&not_nan);
3059 }
3060 
emit_s128_set_if_nan(Register dst,LiftoffRegister src,Register tmp_gp,LiftoffRegister tmp_s128,ValueKind lane_kind)3061 void LiftoffAssembler::emit_s128_set_if_nan(Register dst, LiftoffRegister src,
3062                                             Register tmp_gp,
3063                                             LiftoffRegister tmp_s128,
3064                                             ValueKind lane_kind) {
3065   UNIMPLEMENTED();
3066 }
3067 
Construct(int param_slots)3068 void LiftoffStackSlots::Construct(int param_slots) {
3069   DCHECK_LT(0, slots_.size());
3070   SortInPushOrder();
3071   int last_stack_slot = param_slots;
3072   for (auto& slot : slots_) {
3073     const int stack_slot = slot.dst_slot_;
3074     int stack_decrement = (last_stack_slot - stack_slot) * kSystemPointerSize;
3075     DCHECK_LT(0, stack_decrement);
3076     last_stack_slot = stack_slot;
3077     const LiftoffAssembler::VarState& src = slot.src_;
3078     switch (src.loc()) {
3079       case LiftoffAssembler::VarState::kStack:
3080         if (src.kind() != kS128) {
3081           asm_->AllocateStackSpace(stack_decrement - kSystemPointerSize);
3082           asm_->Ld_d(kScratchReg, liftoff::GetStackSlot(slot.src_offset_));
3083           asm_->Push(kScratchReg);
3084         } else {
3085           asm_->AllocateStackSpace(stack_decrement - kSimd128Size);
3086           asm_->Ld_d(kScratchReg, liftoff::GetStackSlot(slot.src_offset_ - 8));
3087           asm_->Push(kScratchReg);
3088           asm_->Ld_d(kScratchReg, liftoff::GetStackSlot(slot.src_offset_));
3089           asm_->Push(kScratchReg);
3090         }
3091         break;
3092       case LiftoffAssembler::VarState::kRegister: {
3093         int pushed_bytes = SlotSizeInBytes(slot);
3094         asm_->AllocateStackSpace(stack_decrement - pushed_bytes);
3095         liftoff::push(asm_, src.reg(), src.kind());
3096         break;
3097       }
3098       case LiftoffAssembler::VarState::kIntConst: {
3099         asm_->AllocateStackSpace(stack_decrement - kSystemPointerSize);
3100         asm_->li(kScratchReg, Operand(src.i32_const()));
3101         asm_->Push(kScratchReg);
3102         break;
3103       }
3104     }
3105   }
3106 }
3107 
3108 }  // namespace wasm
3109 }  // namespace internal
3110 }  // namespace v8
3111 
3112 #endif  // V8_WASM_BASELINE_LOONG64_LIFTOFF_ASSEMBLER_LOONG64_H_
3113