• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_WASM_BASELINE_PPC_LIFTOFF_ASSEMBLER_PPC_H_
6 #define V8_WASM_BASELINE_PPC_LIFTOFF_ASSEMBLER_PPC_H_
7 
8 #include "src/base/platform/wrappers.h"
9 #include "src/codegen/assembler.h"
10 #include "src/heap/memory-chunk.h"
11 #include "src/wasm/baseline/liftoff-assembler.h"
12 #include "src/wasm/simd-shuffle.h"
13 #include "src/wasm/wasm-objects.h"
14 
15 namespace v8 {
16 namespace internal {
17 namespace wasm {
18 
19 namespace liftoff {
20 
21 //  half
22 //  slot        Frame
23 //  -----+--------------------+---------------------------
24 //  n+3  |   parameter n      |
25 //  ...  |       ...          |
26 //   4   |   parameter 1      | or parameter 2
27 //   3   |   parameter 0      | or parameter 1
28 //   2   |  (result address)  | or parameter 0
29 //  -----+--------------------+---------------------------
30 //   2   | return addr (lr)   |
31 //   1   | previous frame (fp)|
32 //   0   | const pool (r28)   | if const pool is enabled
33 //  -----+--------------------+  <-- frame ptr (fp) or cp
34 //  -1   | StackFrame::WASM   |
35 //  -2   |    instance        |
36 //  -3   |    feedback vector |
37 //  -4   |    tiering budget  |
38 //  -----+--------------------+---------------------------
39 //  -5   |    slot 0 (high)   |   ^
40 //  -6   |    slot 0 (low)    |   |
41 //  -7   |    slot 1 (high)   | Frame slots
42 //  -8   |    slot 1 (low)    |   |
43 //       |                    |   v
44 //  -----+--------------------+  <-- stack ptr (sp)
45 //
46 //
47 
48 constexpr int32_t kInstanceOffset =
49     (FLAG_enable_embedded_constant_pool ? 3 : 2) * kSystemPointerSize;
50 constexpr int kFeedbackVectorOffset =
51     (FLAG_enable_embedded_constant_pool ? 4 : 3) * kSystemPointerSize;
52 constexpr int kTierupBudgetOffset =
53     (FLAG_enable_embedded_constant_pool ? 5 : 4) * kSystemPointerSize;
54 
GetHalfStackSlot(int offset,RegPairHalf half)55 inline MemOperand GetHalfStackSlot(int offset, RegPairHalf half) {
56   int32_t half_offset =
57       half == kLowWord ? 0 : LiftoffAssembler::kStackSlotSize / 2;
58   return MemOperand(fp, -kInstanceOffset - offset + half_offset);
59 }
60 
GetStackSlot(uint32_t offset)61 inline MemOperand GetStackSlot(uint32_t offset) {
62   return MemOperand(fp, -static_cast<int32_t>(offset));
63 }
64 
GetInstanceOperand()65 inline MemOperand GetInstanceOperand() { return GetStackSlot(kInstanceOffset); }
66 
ToCondition(LiftoffCondition liftoff_cond)67 inline constexpr Condition ToCondition(LiftoffCondition liftoff_cond) {
68   switch (liftoff_cond) {
69     case kEqual:
70       return eq;
71     case kUnequal:
72       return ne;
73     case kSignedLessThan:
74     case kUnsignedLessThan:
75       return lt;
76     case kSignedLessEqual:
77     case kUnsignedLessEqual:
78       return le;
79     case kSignedGreaterEqual:
80     case kUnsignedGreaterEqual:
81       return ge;
82     case kSignedGreaterThan:
83     case kUnsignedGreaterThan:
84       return gt;
85   }
86 }
87 
UseSignedOp(LiftoffCondition liftoff_cond)88 inline constexpr bool UseSignedOp(LiftoffCondition liftoff_cond) {
89   switch (liftoff_cond) {
90     case kEqual:
91     case kUnequal:
92     case kSignedLessThan:
93     case kSignedLessEqual:
94     case kSignedGreaterThan:
95     case kSignedGreaterEqual:
96       return true;
97     case kUnsignedLessThan:
98     case kUnsignedLessEqual:
99     case kUnsignedGreaterThan:
100     case kUnsignedGreaterEqual:
101       return false;
102     default:
103       UNREACHABLE();
104   }
105   return false;
106 }
107 
108 }  // namespace liftoff
109 
PrepareStackFrame()110 int LiftoffAssembler::PrepareStackFrame() {
111   int offset = pc_offset();
112   addi(sp, sp, Operand::Zero());
113   return offset;
114 }
115 
PrepareTailCall(int num_callee_stack_params,int stack_param_delta)116 void LiftoffAssembler::PrepareTailCall(int num_callee_stack_params,
117                                        int stack_param_delta) {
118   Register scratch = ip;
119   // Push the return address and frame pointer to complete the stack frame.
120   AddS64(sp, sp, Operand(-2 * kSystemPointerSize), r0);
121   LoadU64(scratch, MemOperand(fp, kSystemPointerSize), r0);
122   StoreU64(scratch, MemOperand(sp, kSystemPointerSize), r0);
123   LoadU64(scratch, MemOperand(fp), r0);
124   StoreU64(scratch, MemOperand(sp), r0);
125 
126   // Shift the whole frame upwards.
127   int slot_count = num_callee_stack_params + 2;
128   for (int i = slot_count - 1; i >= 0; --i) {
129     LoadU64(scratch, MemOperand(sp, i * kSystemPointerSize), r0);
130     StoreU64(scratch,
131              MemOperand(fp, (i - stack_param_delta) * kSystemPointerSize), r0);
132   }
133 
134   // Set the new stack and frame pointer.
135   AddS64(sp, fp, Operand(-stack_param_delta * kSystemPointerSize), r0);
136   Pop(r0, fp);
137   mtlr(r0);
138 }
139 
AlignFrameSize()140 void LiftoffAssembler::AlignFrameSize() {}
141 
PatchPrepareStackFrame(int offset,SafepointTableBuilder * safepoint_table_builder)142 void LiftoffAssembler::PatchPrepareStackFrame(
143     int offset, SafepointTableBuilder* safepoint_table_builder) {
144   int frame_size =
145       GetTotalFrameSize() -
146       (FLAG_enable_embedded_constant_pool ? 3 : 2) * kSystemPointerSize;
147 
148   Assembler patching_assembler(
149       AssemblerOptions{},
150       ExternalAssemblerBuffer(buffer_start_ + offset, kInstrSize + kGap));
151 
152   if (V8_LIKELY(frame_size < 4 * KB)) {
153     patching_assembler.addi(sp, sp, Operand(-frame_size));
154     return;
155   }
156 
157   // The frame size is bigger than 4KB, so we might overflow the available stack
158   // space if we first allocate the frame and then do the stack check (we will
159   // need some remaining stack space for throwing the exception). That's why we
160   // check the available stack space before we allocate the frame. To do this we
161   // replace the {__ sub(sp, sp, framesize)} with a jump to OOL code that does
162   // this "extended stack check".
163   //
164   // The OOL code can simply be generated here with the normal assembler,
165   // because all other code generation, including OOL code, has already finished
166   // when {PatchPrepareStackFrame} is called. The function prologue then jumps
167   // to the current {pc_offset()} to execute the OOL code for allocating the
168   // large frame.
169 
170   // Emit the unconditional branch in the function prologue (from {offset} to
171   // {pc_offset()}).
172 
173   int jump_offset = pc_offset() - offset;
174   if (!is_int26(jump_offset)) {
175     bailout(kUnsupportedArchitecture, "branch offset overflow");
176     return;
177   }
178   patching_assembler.b(jump_offset, LeaveLK);
179 
180   // If the frame is bigger than the stack, we throw the stack overflow
181   // exception unconditionally. Thereby we can avoid the integer overflow
182   // check in the condition code.
183   RecordComment("OOL: stack check for large frame");
184   Label continuation;
185   if (frame_size < FLAG_stack_size * 1024) {
186     Register stack_limit = ip;
187     LoadU64(stack_limit,
188             FieldMemOperand(kWasmInstanceRegister,
189                             WasmInstanceObject::kRealStackLimitAddressOffset),
190             r0);
191     LoadU64(stack_limit, MemOperand(stack_limit), r0);
192     AddS64(stack_limit, stack_limit, Operand(frame_size), r0);
193     CmpU64(sp, stack_limit);
194     bge(&continuation);
195   }
196 
197   Call(wasm::WasmCode::kWasmStackOverflow, RelocInfo::WASM_STUB_CALL);
198   // The call will not return; just define an empty safepoint.
199   safepoint_table_builder->DefineSafepoint(this);
200   if (FLAG_debug_code) stop();
201 
202   bind(&continuation);
203 
204   // Now allocate the stack space. Note that this might do more than just
205   // decrementing the SP; consult {TurboAssembler::AllocateStackSpace}.
206   SubS64(sp, sp, Operand(frame_size), r0);
207 
208   // Jump back to the start of the function, from {pc_offset()} to
209   // right after the reserved space for the {__ sub(sp, sp, framesize)} (which
210   // is a branch now).
211   jump_offset = offset - pc_offset() + kInstrSize;
212   if (!is_int26(jump_offset)) {
213     bailout(kUnsupportedArchitecture, "branch offset overflow");
214     return;
215   }
216   b(jump_offset, LeaveLK);
217 }
218 
FinishCode()219 void LiftoffAssembler::FinishCode() { EmitConstantPool(); }
220 
AbortCompilation()221 void LiftoffAssembler::AbortCompilation() { FinishCode(); }
222 
223 // static
StaticStackFrameSize()224 constexpr int LiftoffAssembler::StaticStackFrameSize() {
225   return liftoff::kTierupBudgetOffset;
226 }
227 
SlotSizeForType(ValueKind kind)228 int LiftoffAssembler::SlotSizeForType(ValueKind kind) {
229   switch (kind) {
230     case kS128:
231       return value_kind_size(kind);
232     default:
233       return kStackSlotSize;
234   }
235 }
236 
NeedsAlignment(ValueKind kind)237 bool LiftoffAssembler::NeedsAlignment(ValueKind kind) {
238   return (kind == kS128 || is_reference(kind));
239 }
240 
LoadConstant(LiftoffRegister reg,WasmValue value,RelocInfo::Mode rmode)241 void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
242                                     RelocInfo::Mode rmode) {
243   switch (value.type().kind()) {
244     case kI32:
245       mov(reg.gp(), Operand(value.to_i32(), rmode));
246       break;
247     case kI64:
248       mov(reg.gp(), Operand(value.to_i64(), rmode));
249       break;
250     case kF32: {
251       UseScratchRegisterScope temps(this);
252       Register scratch = temps.Acquire();
253       mov(scratch, Operand(value.to_f32_boxed().get_bits()));
254       MovIntToFloat(reg.fp(), scratch, ip);
255       break;
256     }
257     case kF64: {
258       UseScratchRegisterScope temps(this);
259       Register scratch = temps.Acquire();
260       mov(scratch, Operand(value.to_f64_boxed().get_bits()));
261       MovInt64ToDouble(reg.fp(), scratch);
262       break;
263     }
264     default:
265       UNREACHABLE();
266   }
267 }
268 
LoadInstanceFromFrame(Register dst)269 void LiftoffAssembler::LoadInstanceFromFrame(Register dst) {
270   LoadU64(dst, liftoff::GetInstanceOperand(), r0);
271 }
272 
LoadFromInstance(Register dst,Register instance,int offset,int size)273 void LiftoffAssembler::LoadFromInstance(Register dst, Register instance,
274                                         int offset, int size) {
275   DCHECK_LE(0, offset);
276   switch (size) {
277     case 1:
278       LoadU8(dst, MemOperand(instance, offset), r0);
279       break;
280     case 4:
281       LoadU32(dst, MemOperand(instance, offset), r0);
282       break;
283     case 8:
284       LoadU64(dst, MemOperand(instance, offset), r0);
285       break;
286     default:
287       UNIMPLEMENTED();
288   }
289 }
290 
LoadTaggedPointerFromInstance(Register dst,Register instance,int offset)291 void LiftoffAssembler::LoadTaggedPointerFromInstance(Register dst,
292                                                      Register instance,
293                                                      int offset) {
294   LoadTaggedPointerField(dst, MemOperand(instance, offset), r0);
295 }
296 
SpillInstance(Register instance)297 void LiftoffAssembler::SpillInstance(Register instance) {
298   StoreU64(instance, liftoff::GetInstanceOperand(), r0);
299 }
300 
ResetOSRTarget()301 void LiftoffAssembler::ResetOSRTarget() {}
302 
LoadTaggedPointer(Register dst,Register src_addr,Register offset_reg,int32_t offset_imm,LiftoffRegList pinned)303 void LiftoffAssembler::LoadTaggedPointer(Register dst, Register src_addr,
304                                          Register offset_reg,
305                                          int32_t offset_imm,
306                                          LiftoffRegList pinned) {
307   LoadTaggedPointerField(dst, MemOperand(src_addr, offset_reg, offset_imm), r0);
308 }
309 
LoadFullPointer(Register dst,Register src_addr,int32_t offset_imm)310 void LiftoffAssembler::LoadFullPointer(Register dst, Register src_addr,
311                                        int32_t offset_imm) {
312   LoadU64(dst, MemOperand(src_addr, offset_imm), r0);
313 }
314 
StoreTaggedPointer(Register dst_addr,Register offset_reg,int32_t offset_imm,LiftoffRegister src,LiftoffRegList pinned,SkipWriteBarrier skip_write_barrier)315 void LiftoffAssembler::StoreTaggedPointer(Register dst_addr,
316                                           Register offset_reg,
317                                           int32_t offset_imm,
318                                           LiftoffRegister src,
319                                           LiftoffRegList pinned,
320                                           SkipWriteBarrier skip_write_barrier) {
321   MemOperand dst_op = MemOperand(dst_addr, offset_reg, offset_imm);
322   StoreTaggedField(src.gp(), dst_op, r0);
323 
324   if (skip_write_barrier || FLAG_disable_write_barriers) return;
325 
326   Label write_barrier;
327   Label exit;
328   CheckPageFlag(dst_addr, ip, MemoryChunk::kPointersFromHereAreInterestingMask,
329                 ne, &write_barrier);
330   b(&exit);
331   bind(&write_barrier);
332   JumpIfSmi(src.gp(), &exit);
333   if (COMPRESS_POINTERS_BOOL) {
334     DecompressTaggedPointer(src.gp(), src.gp());
335   }
336   CheckPageFlag(src.gp(), ip, MemoryChunk::kPointersToHereAreInterestingMask,
337                 eq, &exit);
338   mov(ip, Operand(offset_imm));
339   add(ip, ip, dst_addr);
340   if (offset_reg != no_reg) {
341     add(ip, ip, offset_reg);
342   }
343   CallRecordWriteStubSaveRegisters(dst_addr, ip, RememberedSetAction::kEmit,
344                                    SaveFPRegsMode::kSave,
345                                    StubCallMode::kCallWasmRuntimeStub);
346   bind(&exit);
347 }
348 
Load(LiftoffRegister dst,Register src_addr,Register offset_reg,uintptr_t offset_imm,LoadType type,LiftoffRegList pinned,uint32_t * protected_load_pc,bool is_load_mem,bool i64_offset)349 void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
350                             Register offset_reg, uintptr_t offset_imm,
351                             LoadType type, LiftoffRegList pinned,
352                             uint32_t* protected_load_pc, bool is_load_mem,
353                             bool i64_offset) {
354   if (!i64_offset && offset_reg != no_reg) {
355     ZeroExtWord32(ip, offset_reg);
356     offset_reg = ip;
357   }
358   MemOperand src_op = MemOperand(src_addr, offset_reg, offset_imm);
359   if (protected_load_pc) *protected_load_pc = pc_offset();
360   switch (type.value()) {
361     case LoadType::kI32Load8U:
362     case LoadType::kI64Load8U:
363       LoadU8(dst.gp(), src_op, r0);
364       break;
365     case LoadType::kI32Load8S:
366     case LoadType::kI64Load8S:
367       LoadS8(dst.gp(), src_op, r0);
368       break;
369     case LoadType::kI32Load16U:
370     case LoadType::kI64Load16U:
371       if (is_load_mem) {
372         LoadU16LE(dst.gp(), src_op, r0);
373       } else {
374         LoadU16(dst.gp(), src_op, r0);
375       }
376       break;
377     case LoadType::kI32Load16S:
378     case LoadType::kI64Load16S:
379       if (is_load_mem) {
380         LoadS16LE(dst.gp(), src_op, r0);
381       } else {
382         LoadS16(dst.gp(), src_op, r0);
383       }
384       break;
385     case LoadType::kI64Load32U:
386       if (is_load_mem) {
387         LoadU32LE(dst.gp(), src_op, r0);
388       } else {
389         LoadU32(dst.gp(), src_op, r0);
390       }
391       break;
392     case LoadType::kI32Load:
393     case LoadType::kI64Load32S:
394       if (is_load_mem) {
395         LoadS32LE(dst.gp(), src_op, r0);
396       } else {
397         LoadS32(dst.gp(), src_op, r0);
398       }
399       break;
400     case LoadType::kI64Load:
401       if (is_load_mem) {
402         LoadU64LE(dst.gp(), src_op, r0);
403       } else {
404         LoadU64(dst.gp(), src_op, r0);
405       }
406       break;
407     case LoadType::kF32Load:
408       if (is_load_mem) {
409         // `ip` could be used as offset_reg.
410         Register scratch = ip;
411         if (offset_reg == ip) {
412           scratch = GetRegisterThatIsNotOneOf(src_addr);
413           push(scratch);
414         }
415         LoadF32LE(dst.fp(), src_op, r0, scratch);
416         if (offset_reg == ip) {
417           pop(scratch);
418         }
419       } else {
420         LoadF32(dst.fp(), src_op, r0);
421       }
422       break;
423     case LoadType::kF64Load:
424       if (is_load_mem) {
425         // `ip` could be used as offset_reg.
426         Register scratch = ip;
427         if (offset_reg == ip) {
428           scratch = GetRegisterThatIsNotOneOf(src_addr);
429           push(scratch);
430         }
431         LoadF64LE(dst.fp(), src_op, r0, scratch);
432         if (offset_reg == ip) {
433           pop(scratch);
434         }
435       } else {
436         LoadF64(dst.fp(), src_op, r0);
437       }
438       break;
439     case LoadType::kS128Load:
440       bailout(kUnsupportedArchitecture, "SIMD");
441       break;
442     default:
443       UNREACHABLE();
444   }
445 }
446 
Store(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister src,StoreType type,LiftoffRegList pinned,uint32_t * protected_store_pc,bool is_store_mem)447 void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
448                              uintptr_t offset_imm, LiftoffRegister src,
449                              StoreType type, LiftoffRegList pinned,
450                              uint32_t* protected_store_pc, bool is_store_mem) {
451   MemOperand dst_op =
452       MemOperand(dst_addr, offset_reg, offset_imm);
453   if (protected_store_pc) *protected_store_pc = pc_offset();
454   switch (type.value()) {
455     case StoreType::kI32Store8:
456     case StoreType::kI64Store8:
457       StoreU8(src.gp(), dst_op, r0);
458       break;
459     case StoreType::kI32Store16:
460     case StoreType::kI64Store16:
461       if (is_store_mem) {
462         StoreU16LE(src.gp(), dst_op, r0);
463       } else {
464         StoreU16(src.gp(), dst_op, r0);
465       }
466       break;
467     case StoreType::kI32Store:
468     case StoreType::kI64Store32:
469       if (is_store_mem) {
470         StoreU32LE(src.gp(), dst_op, r0);
471       } else {
472         StoreU32(src.gp(), dst_op, r0);
473       }
474       break;
475     case StoreType::kI64Store:
476       if (is_store_mem) {
477         StoreU64LE(src.gp(), dst_op, r0);
478       } else {
479         StoreU64(src.gp(), dst_op, r0);
480       }
481       break;
482     case StoreType::kF32Store:
483       if (is_store_mem) {
484         Register scratch2 = GetUnusedRegister(kGpReg, pinned).gp();
485         StoreF32LE(src.fp(), dst_op, r0, scratch2);
486       } else {
487         StoreF32(src.fp(), dst_op, r0);
488       }
489       break;
490     case StoreType::kF64Store:
491       if (is_store_mem) {
492         Register scratch2 = GetUnusedRegister(kGpReg, pinned).gp();
493         StoreF64LE(src.fp(), dst_op, r0, scratch2);
494       } else {
495         StoreF64(src.fp(), dst_op, r0);
496       }
497       break;
498     case StoreType::kS128Store: {
499       bailout(kUnsupportedArchitecture, "SIMD");
500       break;
501     }
502     default:
503       UNREACHABLE();
504   }
505 }
506 
AtomicLoad(LiftoffRegister dst,Register src_addr,Register offset_reg,uintptr_t offset_imm,LoadType type,LiftoffRegList pinned)507 void LiftoffAssembler::AtomicLoad(LiftoffRegister dst, Register src_addr,
508                                   Register offset_reg, uintptr_t offset_imm,
509                                   LoadType type, LiftoffRegList pinned) {
510   Load(dst, src_addr, offset_reg, offset_imm, type, pinned, nullptr, true);
511   lwsync();
512 }
513 
AtomicStore(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister src,StoreType type,LiftoffRegList pinned)514 void LiftoffAssembler::AtomicStore(Register dst_addr, Register offset_reg,
515                                    uintptr_t offset_imm, LiftoffRegister src,
516                                    StoreType type, LiftoffRegList pinned) {
517   lwsync();
518   Store(dst_addr, offset_reg, offset_imm, src, type, pinned, nullptr, true);
519   sync();
520 }
521 
522 #ifdef V8_TARGET_BIG_ENDIAN
523 constexpr bool is_be = true;
524 #else
525 constexpr bool is_be = false;
526 #endif
527 
528 #define ATOMIC_OP(instr)                                                 \
529   {                                                                      \
530     Register offset = r0;                                                \
531     if (offset_imm != 0) {                                               \
532       mov(ip, Operand(offset_imm));                                      \
533       if (offset_reg != no_reg) {                                        \
534         add(ip, ip, offset_reg);                                         \
535       }                                                                  \
536       offset = ip;                                                       \
537     } else {                                                             \
538       if (offset_reg != no_reg) {                                        \
539         offset = offset_reg;                                             \
540       }                                                                  \
541     }                                                                    \
542                                                                          \
543     MemOperand dst = MemOperand(offset, dst_addr);                       \
544                                                                          \
545     switch (type.value()) {                                              \
546       case StoreType::kI32Store8:                                        \
547       case StoreType::kI64Store8: {                                      \
548         auto op_func = [&](Register dst, Register lhs, Register rhs) {   \
549           instr(dst, lhs, rhs);                                          \
550         };                                                               \
551         AtomicOps<uint8_t>(dst, value.gp(), result.gp(), r0, op_func);   \
552         break;                                                           \
553       }                                                                  \
554       case StoreType::kI32Store16:                                       \
555       case StoreType::kI64Store16: {                                     \
556         auto op_func = [&](Register dst, Register lhs, Register rhs) {   \
557           if (is_be) {                                                   \
558             Register scratch = GetRegisterThatIsNotOneOf(lhs, rhs, dst); \
559             push(scratch);                                               \
560             ByteReverseU16(dst, lhs, scratch);                           \
561             instr(dst, dst, rhs);                                        \
562             ByteReverseU16(dst, dst, scratch);                           \
563             pop(scratch);                                                \
564           } else {                                                       \
565             instr(dst, lhs, rhs);                                        \
566           }                                                              \
567         };                                                               \
568         AtomicOps<uint16_t>(dst, value.gp(), result.gp(), r0, op_func);  \
569         if (is_be) {                                                     \
570           ByteReverseU16(result.gp(), result.gp(), ip);                  \
571         }                                                                \
572         break;                                                           \
573       }                                                                  \
574       case StoreType::kI32Store:                                         \
575       case StoreType::kI64Store32: {                                     \
576         auto op_func = [&](Register dst, Register lhs, Register rhs) {   \
577           if (is_be) {                                                   \
578             Register scratch = GetRegisterThatIsNotOneOf(lhs, rhs, dst); \
579             push(scratch);                                               \
580             ByteReverseU32(dst, lhs, scratch);                           \
581             instr(dst, dst, rhs);                                        \
582             ByteReverseU32(dst, dst, scratch);                           \
583             pop(scratch);                                                \
584           } else {                                                       \
585             instr(dst, lhs, rhs);                                        \
586           }                                                              \
587         };                                                               \
588         AtomicOps<uint32_t>(dst, value.gp(), result.gp(), r0, op_func);  \
589         if (is_be) {                                                     \
590           ByteReverseU32(result.gp(), result.gp(), ip);                  \
591         }                                                                \
592         break;                                                           \
593       }                                                                  \
594       case StoreType::kI64Store: {                                       \
595         auto op_func = [&](Register dst, Register lhs, Register rhs) {   \
596           if (is_be) {                                                   \
597             ByteReverseU64(dst, lhs);                                    \
598             instr(dst, dst, rhs);                                        \
599             ByteReverseU64(dst, dst);                                    \
600           } else {                                                       \
601             instr(dst, lhs, rhs);                                        \
602           }                                                              \
603         };                                                               \
604         AtomicOps<uint64_t>(dst, value.gp(), result.gp(), r0, op_func);  \
605         if (is_be) {                                                     \
606           ByteReverseU64(result.gp(), result.gp());                      \
607         }                                                                \
608         break;                                                           \
609       }                                                                  \
610       default:                                                           \
611         UNREACHABLE();                                                   \
612     }                                                                    \
613   }
614 
AtomicAdd(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)615 void LiftoffAssembler::AtomicAdd(Register dst_addr, Register offset_reg,
616                                  uintptr_t offset_imm, LiftoffRegister value,
617                                  LiftoffRegister result, StoreType type) {
618   ATOMIC_OP(add);
619 }
620 
AtomicSub(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)621 void LiftoffAssembler::AtomicSub(Register dst_addr, Register offset_reg,
622                                  uintptr_t offset_imm, LiftoffRegister value,
623                                  LiftoffRegister result, StoreType type) {
624   ATOMIC_OP(sub);
625 }
626 
AtomicAnd(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)627 void LiftoffAssembler::AtomicAnd(Register dst_addr, Register offset_reg,
628                                  uintptr_t offset_imm, LiftoffRegister value,
629                                  LiftoffRegister result, StoreType type) {
630   ATOMIC_OP(and_);
631 }
632 
AtomicOr(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)633 void LiftoffAssembler::AtomicOr(Register dst_addr, Register offset_reg,
634                                 uintptr_t offset_imm, LiftoffRegister value,
635                                 LiftoffRegister result, StoreType type) {
636   ATOMIC_OP(orx);
637 }
638 
AtomicXor(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)639 void LiftoffAssembler::AtomicXor(Register dst_addr, Register offset_reg,
640                                  uintptr_t offset_imm, LiftoffRegister value,
641                                  LiftoffRegister result, StoreType type) {
642   ATOMIC_OP(xor_);
643 }
644 
AtomicExchange(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister value,LiftoffRegister result,StoreType type)645 void LiftoffAssembler::AtomicExchange(Register dst_addr, Register offset_reg,
646                                       uintptr_t offset_imm,
647                                       LiftoffRegister value,
648                                       LiftoffRegister result, StoreType type) {
649   Register offset = r0;
650   if (offset_imm != 0) {
651     mov(ip, Operand(offset_imm));
652     if (offset_reg != no_reg) {
653       add(ip, ip, offset_reg);
654     }
655     offset = ip;
656   } else {
657     if (offset_reg != no_reg) {
658       offset = offset_reg;
659     }
660   }
661   MemOperand dst = MemOperand(offset, dst_addr);
662   switch (type.value()) {
663     case StoreType::kI32Store8:
664     case StoreType::kI64Store8: {
665       TurboAssembler::AtomicExchange<uint8_t>(dst, value.gp(), result.gp());
666       break;
667     }
668     case StoreType::kI32Store16:
669     case StoreType::kI64Store16: {
670       if (is_be) {
671         Register scratch = GetRegisterThatIsNotOneOf(value.gp(), result.gp());
672         push(scratch);
673         ByteReverseU16(r0, value.gp(), scratch);
674         pop(scratch);
675         TurboAssembler::AtomicExchange<uint16_t>(dst, r0, result.gp());
676         ByteReverseU16(result.gp(), result.gp(), ip);
677       } else {
678         TurboAssembler::AtomicExchange<uint16_t>(dst, value.gp(), result.gp());
679       }
680       break;
681     }
682     case StoreType::kI32Store:
683     case StoreType::kI64Store32: {
684       if (is_be) {
685         Register scratch = GetRegisterThatIsNotOneOf(value.gp(), result.gp());
686         push(scratch);
687         ByteReverseU32(r0, value.gp(), scratch);
688         pop(scratch);
689         TurboAssembler::AtomicExchange<uint32_t>(dst, r0, result.gp());
690         ByteReverseU32(result.gp(), result.gp(), ip);
691       } else {
692         TurboAssembler::AtomicExchange<uint32_t>(dst, value.gp(), result.gp());
693       }
694       break;
695     }
696     case StoreType::kI64Store: {
697       if (is_be) {
698         ByteReverseU64(r0, value.gp());
699         TurboAssembler::AtomicExchange<uint64_t>(dst, r0, result.gp());
700         ByteReverseU64(result.gp(), result.gp());
701       } else {
702         TurboAssembler::AtomicExchange<uint64_t>(dst, value.gp(), result.gp());
703       }
704       break;
705     }
706     default:
707       UNREACHABLE();
708   }
709 }
710 
AtomicCompareExchange(Register dst_addr,Register offset_reg,uintptr_t offset_imm,LiftoffRegister expected,LiftoffRegister new_value,LiftoffRegister result,StoreType type)711 void LiftoffAssembler::AtomicCompareExchange(
712     Register dst_addr, Register offset_reg, uintptr_t offset_imm,
713     LiftoffRegister expected, LiftoffRegister new_value, LiftoffRegister result,
714     StoreType type) {
715   Register offset = r0;
716   if (offset_imm != 0) {
717     mov(ip, Operand(offset_imm));
718     if (offset_reg != no_reg) {
719       add(ip, ip, offset_reg);
720     }
721     offset = ip;
722   } else {
723     if (offset_reg != no_reg) {
724       offset = offset_reg;
725     }
726   }
727   MemOperand dst = MemOperand(offset, dst_addr);
728   switch (type.value()) {
729     case StoreType::kI32Store8:
730     case StoreType::kI64Store8: {
731       TurboAssembler::AtomicCompareExchange<uint8_t>(
732           dst, expected.gp(), new_value.gp(), result.gp(), r0);
733       break;
734     }
735     case StoreType::kI32Store16:
736     case StoreType::kI64Store16: {
737       if (is_be) {
738         Push(new_value.gp(), expected.gp());
739         Register scratch = GetRegisterThatIsNotOneOf(
740             new_value.gp(), expected.gp(), result.gp());
741         push(scratch);
742         ByteReverseU16(new_value.gp(), new_value.gp(), scratch);
743         ByteReverseU16(expected.gp(), expected.gp(), scratch);
744         pop(scratch);
745         TurboAssembler::AtomicCompareExchange<uint16_t>(
746             dst, expected.gp(), new_value.gp(), result.gp(), r0);
747         ByteReverseU16(result.gp(), result.gp(), r0);
748         Pop(new_value.gp(), expected.gp());
749       } else {
750         TurboAssembler::AtomicCompareExchange<uint16_t>(
751             dst, expected.gp(), new_value.gp(), result.gp(), r0);
752       }
753       break;
754     }
755     case StoreType::kI32Store:
756     case StoreType::kI64Store32: {
757       if (is_be) {
758         Push(new_value.gp(), expected.gp());
759         Register scratch = GetRegisterThatIsNotOneOf(
760             new_value.gp(), expected.gp(), result.gp());
761         push(scratch);
762         ByteReverseU32(new_value.gp(), new_value.gp(), scratch);
763         ByteReverseU32(expected.gp(), expected.gp(), scratch);
764         pop(scratch);
765         TurboAssembler::AtomicCompareExchange<uint32_t>(
766             dst, expected.gp(), new_value.gp(), result.gp(), r0);
767         ByteReverseU32(result.gp(), result.gp(), r0);
768         Pop(new_value.gp(), expected.gp());
769       } else {
770         TurboAssembler::AtomicCompareExchange<uint32_t>(
771             dst, expected.gp(), new_value.gp(), result.gp(), r0);
772       }
773       break;
774     }
775     case StoreType::kI64Store: {
776       if (is_be) {
777         Push(new_value.gp(), expected.gp());
778         ByteReverseU64(new_value.gp(), new_value.gp());
779         ByteReverseU64(expected.gp(), expected.gp());
780         TurboAssembler::AtomicCompareExchange<uint64_t>(
781             dst, expected.gp(), new_value.gp(), result.gp(), r0);
782         ByteReverseU64(result.gp(), result.gp());
783         Pop(new_value.gp(), expected.gp());
784       } else {
785         TurboAssembler::AtomicCompareExchange<uint64_t>(
786             dst, expected.gp(), new_value.gp(), result.gp(), r0);
787       }
788       break;
789     }
790     default:
791       UNREACHABLE();
792   }
793 }
794 
AtomicFence()795 void LiftoffAssembler::AtomicFence() { sync(); }
796 
LoadCallerFrameSlot(LiftoffRegister dst,uint32_t caller_slot_idx,ValueKind kind)797 void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
798                                            uint32_t caller_slot_idx,
799                                            ValueKind kind) {
800   int32_t offset = (caller_slot_idx + 1) * kSystemPointerSize;
801   switch (kind) {
802     case kI32: {
803 #if defined(V8_TARGET_BIG_ENDIAN)
804       LoadS32(dst.gp(), MemOperand(fp, offset + 4), r0);
805       break;
806 #else
807       LoadS32(dst.gp(), MemOperand(fp, offset), r0);
808       break;
809 #endif
810     }
811     case kRef:
812     case kRtt:
813     case kOptRef:
814     case kI64: {
815       LoadU64(dst.gp(), MemOperand(fp, offset), r0);
816       break;
817     }
818     case kF32: {
819       LoadF32(dst.fp(), MemOperand(fp, offset), r0);
820       break;
821     }
822     case kF64: {
823       LoadF64(dst.fp(), MemOperand(fp, offset), r0);
824       break;
825     }
826     case kS128: {
827       bailout(kSimd, "simd load");
828       break;
829     }
830     default:
831       UNREACHABLE();
832   }
833 }
834 
StoreCallerFrameSlot(LiftoffRegister src,uint32_t caller_slot_idx,ValueKind kind)835 void LiftoffAssembler::StoreCallerFrameSlot(LiftoffRegister src,
836                                             uint32_t caller_slot_idx,
837                                             ValueKind kind) {
838   int32_t offset = (caller_slot_idx + 1) * kSystemPointerSize;
839   switch (kind) {
840     case kI32: {
841 #if defined(V8_TARGET_BIG_ENDIAN)
842       StoreU32(src.gp(), MemOperand(fp, offset + 4), r0);
843       break;
844 #else
845       StoreU32(src.gp(), MemOperand(fp, offset), r0);
846       break;
847 #endif
848     }
849     case kRef:
850     case kRtt:
851     case kOptRef:
852     case kI64: {
853       StoreU64(src.gp(), MemOperand(fp, offset), r0);
854       break;
855     }
856     case kF32: {
857       StoreF32(src.fp(), MemOperand(fp, offset), r0);
858       break;
859     }
860     case kF64: {
861       StoreF64(src.fp(), MemOperand(fp, offset), r0);
862       break;
863     }
864     case kS128: {
865       bailout(kSimd, "simd load");
866       break;
867     }
868     default:
869       UNREACHABLE();
870   }
871 }
872 
LoadReturnStackSlot(LiftoffRegister dst,int offset,ValueKind kind)873 void LiftoffAssembler::LoadReturnStackSlot(LiftoffRegister dst, int offset,
874                                            ValueKind kind) {
875   switch (kind) {
876     case kI32: {
877 #if defined(V8_TARGET_BIG_ENDIAN)
878       LoadS32(dst.gp(), MemOperand(sp, offset + 4), r0);
879       break;
880 #else
881       LoadS32(dst.gp(), MemOperand(sp, offset), r0);
882       break;
883 #endif
884     }
885     case kRef:
886     case kRtt:
887     case kOptRef:
888     case kI64: {
889       LoadU64(dst.gp(), MemOperand(sp, offset), r0);
890       break;
891     }
892     case kF32: {
893       LoadF32(dst.fp(), MemOperand(sp, offset), r0);
894       break;
895     }
896     case kF64: {
897       LoadF64(dst.fp(), MemOperand(sp, offset), r0);
898       break;
899     }
900     case kS128: {
901       bailout(kSimd, "simd load");
902       break;
903     }
904     default:
905       UNREACHABLE();
906   }
907 }
908 
909 #ifdef V8_TARGET_BIG_ENDIAN
910 constexpr int stack_bias = -4;
911 #else
912 constexpr int stack_bias = 0;
913 #endif
914 
MoveStackValue(uint32_t dst_offset,uint32_t src_offset,ValueKind kind)915 void LiftoffAssembler::MoveStackValue(uint32_t dst_offset, uint32_t src_offset,
916                                       ValueKind kind) {
917   DCHECK_NE(dst_offset, src_offset);
918 
919   switch (kind) {
920     case kI32:
921     case kF32:
922       LoadU32(ip, liftoff::GetStackSlot(src_offset + stack_bias), r0);
923       StoreU32(ip, liftoff::GetStackSlot(dst_offset + stack_bias), r0);
924       break;
925     case kI64:
926     case kOptRef:
927     case kRef:
928     case kRtt:
929     case kF64:
930       LoadU64(ip, liftoff::GetStackSlot(src_offset), r0);
931       StoreU64(ip, liftoff::GetStackSlot(dst_offset), r0);
932       break;
933     case kS128:
934       bailout(kSimd, "simd op");
935       break;
936     default:
937       UNREACHABLE();
938   }
939 }
940 
Move(Register dst,Register src,ValueKind kind)941 void LiftoffAssembler::Move(Register dst, Register src, ValueKind kind) {
942   mr(dst, src);
943 }
944 
Move(DoubleRegister dst,DoubleRegister src,ValueKind kind)945 void LiftoffAssembler::Move(DoubleRegister dst, DoubleRegister src,
946                             ValueKind kind) {
947   if (kind == kF32 || kind == kF64) {
948     fmr(dst, src);
949   } else {
950     bailout(kSimd, "simd op");
951   }
952 }
953 
Spill(int offset,LiftoffRegister reg,ValueKind kind)954 void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueKind kind) {
955   DCHECK_LT(0, offset);
956   RecordUsedSpillOffset(offset);
957 
958   switch (kind) {
959     case kI32:
960       StoreU32(reg.gp(), liftoff::GetStackSlot(offset + stack_bias), r0);
961       break;
962     case kI64:
963     case kOptRef:
964     case kRef:
965     case kRtt:
966       StoreU64(reg.gp(), liftoff::GetStackSlot(offset), r0);
967       break;
968     case kF32:
969       StoreF32(reg.fp(), liftoff::GetStackSlot(offset + stack_bias), r0);
970       break;
971     case kF64:
972       StoreF64(reg.fp(), liftoff::GetStackSlot(offset), r0);
973       break;
974     case kS128: {
975       bailout(kSimd, "simd op");
976       break;
977     }
978     default:
979       UNREACHABLE();
980   }
981 }
982 
Spill(int offset,WasmValue value)983 void LiftoffAssembler::Spill(int offset, WasmValue value) {
984   RecordUsedSpillOffset(offset);
985   UseScratchRegisterScope temps(this);
986   Register src = no_reg;
987   src = ip;
988   switch (value.type().kind()) {
989     case kI32: {
990       mov(src, Operand(value.to_i32()));
991       StoreU32(src, liftoff::GetStackSlot(offset + stack_bias), r0);
992       break;
993     }
994     case kI64: {
995       mov(src, Operand(value.to_i64()));
996       StoreU64(src, liftoff::GetStackSlot(offset), r0);
997       break;
998     }
999     default:
1000       // We do not track f32 and f64 constants, hence they are unreachable.
1001       UNREACHABLE();
1002   }
1003 }
1004 
Fill(LiftoffRegister reg,int offset,ValueKind kind)1005 void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueKind kind) {
1006   switch (kind) {
1007     case kI32:
1008       LoadS32(reg.gp(), liftoff::GetStackSlot(offset + stack_bias), r0);
1009       break;
1010     case kI64:
1011     case kRef:
1012     case kOptRef:
1013     case kRtt:
1014       LoadU64(reg.gp(), liftoff::GetStackSlot(offset), r0);
1015       break;
1016     case kF32:
1017       LoadF32(reg.fp(), liftoff::GetStackSlot(offset + stack_bias), r0);
1018       break;
1019     case kF64:
1020       LoadF64(reg.fp(), liftoff::GetStackSlot(offset), r0);
1021       break;
1022     case kS128: {
1023       bailout(kSimd, "simd op");
1024       break;
1025     }
1026     default:
1027       UNREACHABLE();
1028   }
1029 }
1030 
FillI64Half(Register,int offset,RegPairHalf)1031 void LiftoffAssembler::FillI64Half(Register, int offset, RegPairHalf) {
1032   bailout(kUnsupportedArchitecture, "FillI64Half");
1033 }
1034 
FillStackSlotsWithZero(int start,int size)1035 void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) {
1036   DCHECK_LT(0, size);
1037   DCHECK_EQ(0, size % 8);
1038   RecordUsedSpillOffset(start + size);
1039 
1040   // We need a zero reg. Always use r0 for that, and push it before to restore
1041   // its value afterwards.
1042 
1043   if (size <= 36) {
1044     // Special straight-line code for up to nine words. Generates one
1045     // instruction per word.
1046     mov(ip, Operand::Zero());
1047     uint32_t remainder = size;
1048     for (; remainder >= kStackSlotSize; remainder -= kStackSlotSize) {
1049       StoreU64(ip, liftoff::GetStackSlot(start + remainder), r0);
1050     }
1051     DCHECK(remainder == 4 || remainder == 0);
1052     if (remainder) {
1053       StoreU32(ip, liftoff::GetStackSlot(start + remainder), r0);
1054     }
1055   } else {
1056     Label loop;
1057     push(r4);
1058 
1059     mov(r4, Operand(size / kSystemPointerSize));
1060     mtctr(r4);
1061 
1062     SubS64(r4, fp, Operand(start + size + kSystemPointerSize), r0);
1063     mov(r0, Operand::Zero());
1064 
1065     bind(&loop);
1066     StoreU64WithUpdate(r0, MemOperand(r4, kSystemPointerSize));
1067     bdnz(&loop);
1068 
1069     pop(r4);
1070   }
1071 }
1072 
1073 #define SIGN_EXT(r) extsw(r, r)
1074 #define ROUND_F64_TO_F32(fpr) frsp(fpr, fpr)
1075 #define INT32_AND_WITH_1F(x) Operand(x & 0x1f)
1076 #define INT32_AND_WITH_3F(x) Operand(x & 0x3f)
1077 #define REGISTER_AND_WITH_1F    \
1078   ([&](Register rhs) {          \
1079     andi(r0, rhs, Operand(31)); \
1080     return r0;                  \
1081   })
1082 
1083 #define REGISTER_AND_WITH_3F    \
1084   ([&](Register rhs) {          \
1085     andi(r0, rhs, Operand(63)); \
1086     return r0;                  \
1087   })
1088 
1089 #define LFR_TO_REG(reg) reg.gp()
1090 
1091 // V(name, instr, dtype, stype, dcast, scast, rcast, return_val, return_type)
1092 #define UNOP_LIST(V)                                                         \
1093   V(f32_abs, fabs, DoubleRegister, DoubleRegister, , , USE, , void)          \
1094   V(f32_neg, fneg, DoubleRegister, DoubleRegister, , , USE, , void)          \
1095   V(f32_sqrt, fsqrt, DoubleRegister, DoubleRegister, , , ROUND_F64_TO_F32, , \
1096     void)                                                                    \
1097   V(f32_floor, frim, DoubleRegister, DoubleRegister, , , ROUND_F64_TO_F32,   \
1098     true, bool)                                                              \
1099   V(f32_ceil, frip, DoubleRegister, DoubleRegister, , , ROUND_F64_TO_F32,    \
1100     true, bool)                                                              \
1101   V(f32_trunc, friz, DoubleRegister, DoubleRegister, , , ROUND_F64_TO_F32,   \
1102     true, bool)                                                              \
1103   V(f64_abs, fabs, DoubleRegister, DoubleRegister, , , USE, , void)          \
1104   V(f64_neg, fneg, DoubleRegister, DoubleRegister, , , USE, , void)          \
1105   V(f64_sqrt, fsqrt, DoubleRegister, DoubleRegister, , , USE, , void)        \
1106   V(f64_floor, frim, DoubleRegister, DoubleRegister, , , USE, true, bool)    \
1107   V(f64_ceil, frip, DoubleRegister, DoubleRegister, , , USE, true, bool)     \
1108   V(f64_trunc, friz, DoubleRegister, DoubleRegister, , , USE, true, bool)    \
1109   V(i32_clz, CountLeadingZerosU32, Register, Register, , , USE, , void)      \
1110   V(i32_ctz, CountTrailingZerosU32, Register, Register, , , USE, , void)     \
1111   V(i64_clz, CountLeadingZerosU64, LiftoffRegister, LiftoffRegister,         \
1112     LFR_TO_REG, LFR_TO_REG, USE, , void)                                     \
1113   V(i64_ctz, CountTrailingZerosU64, LiftoffRegister, LiftoffRegister,        \
1114     LFR_TO_REG, LFR_TO_REG, USE, , void)                                     \
1115   V(u32_to_uintptr, ZeroExtWord32, Register, Register, , , USE, , void)      \
1116   V(i32_signextend_i8, extsb, Register, Register, , , USE, , void)           \
1117   V(i32_signextend_i16, extsh, Register, Register, , , USE, , void)          \
1118   V(i64_signextend_i8, extsb, LiftoffRegister, LiftoffRegister, LFR_TO_REG,  \
1119     LFR_TO_REG, USE, , void)                                                 \
1120   V(i64_signextend_i16, extsh, LiftoffRegister, LiftoffRegister, LFR_TO_REG, \
1121     LFR_TO_REG, USE, , void)                                                 \
1122   V(i64_signextend_i32, extsw, LiftoffRegister, LiftoffRegister, LFR_TO_REG, \
1123     LFR_TO_REG, USE, , void)                                                 \
1124   V(i32_popcnt, Popcnt32, Register, Register, , , USE, true, bool)           \
1125   V(i64_popcnt, Popcnt64, LiftoffRegister, LiftoffRegister, LFR_TO_REG,      \
1126     LFR_TO_REG, USE, true, bool)
1127 
1128 #define EMIT_UNOP_FUNCTION(name, instr, dtype, stype, dcast, scast, rcast, \
1129                            ret, return_type)                               \
1130   return_type LiftoffAssembler::emit_##name(dtype dst, stype src) {        \
1131     auto _dst = dcast(dst);                                                \
1132     auto _src = scast(src);                                                \
1133     instr(_dst, _src);                                                     \
1134     rcast(_dst);                                                           \
1135     return ret;                                                            \
1136   }
1137 UNOP_LIST(EMIT_UNOP_FUNCTION)
1138 #undef EMIT_UNOP_FUNCTION
1139 #undef UNOP_LIST
1140 
1141 // V(name, instr, dtype, stype1, stype2, dcast, scast1, scast2, rcast,
1142 // return_val, return_type)
1143 #define BINOP_LIST(V)                                                          \
1144   V(f32_copysign, CopySignF64, DoubleRegister, DoubleRegister, DoubleRegister, \
1145     , , , USE, , void)                                                         \
1146   V(f64_copysign, CopySignF64, DoubleRegister, DoubleRegister, DoubleRegister, \
1147     , , , USE, , void)                                                         \
1148   V(f32_min, MinF64, DoubleRegister, DoubleRegister, DoubleRegister, , , ,     \
1149     USE, , void)                                                               \
1150   V(f32_max, MaxF64, DoubleRegister, DoubleRegister, DoubleRegister, , , ,     \
1151     USE, , void)                                                               \
1152   V(f64_min, MinF64, DoubleRegister, DoubleRegister, DoubleRegister, , , ,     \
1153     USE, , void)                                                               \
1154   V(f64_max, MaxF64, DoubleRegister, DoubleRegister, DoubleRegister, , , ,     \
1155     USE, , void)                                                               \
1156   V(i64_sub, SubS64, LiftoffRegister, LiftoffRegister, LiftoffRegister,        \
1157     LFR_TO_REG, LFR_TO_REG, LFR_TO_REG, USE, , void)                           \
1158   V(i64_add, AddS64, LiftoffRegister, LiftoffRegister, LiftoffRegister,        \
1159     LFR_TO_REG, LFR_TO_REG, LFR_TO_REG, USE, , void)                           \
1160   V(i64_addi, AddS64, LiftoffRegister, LiftoffRegister, int64_t, LFR_TO_REG,   \
1161     LFR_TO_REG, Operand, USE, , void)                                          \
1162   V(i32_sub, SubS32, Register, Register, Register, , , , USE, , void)          \
1163   V(i32_add, AddS32, Register, Register, Register, , , , USE, , void)          \
1164   V(i32_addi, AddS32, Register, Register, int32_t, , , Operand, USE, , void)   \
1165   V(i32_subi, SubS32, Register, Register, int32_t, , , Operand, USE, , void)   \
1166   V(i32_mul, MulS32, Register, Register, Register, , , , USE, , void)          \
1167   V(i64_mul, MulS64, LiftoffRegister, LiftoffRegister, LiftoffRegister,        \
1168     LFR_TO_REG, LFR_TO_REG, LFR_TO_REG, USE, , void)                           \
1169   V(i32_andi, AndU32, Register, Register, int32_t, , , Operand, USE, , void)   \
1170   V(i32_ori, OrU32, Register, Register, int32_t, , , Operand, USE, , void)     \
1171   V(i32_xori, XorU32, Register, Register, int32_t, , , Operand, USE, , void)   \
1172   V(i32_and, AndU32, Register, Register, Register, , , , USE, , void)          \
1173   V(i32_or, OrU32, Register, Register, Register, , , , USE, , void)            \
1174   V(i32_xor, XorU32, Register, Register, Register, , , , USE, , void)          \
1175   V(i64_and, AndU64, LiftoffRegister, LiftoffRegister, LiftoffRegister,        \
1176     LFR_TO_REG, LFR_TO_REG, LFR_TO_REG, USE, , void)                           \
1177   V(i64_or, OrU64, LiftoffRegister, LiftoffRegister, LiftoffRegister,          \
1178     LFR_TO_REG, LFR_TO_REG, LFR_TO_REG, USE, , void)                           \
1179   V(i64_xor, XorU64, LiftoffRegister, LiftoffRegister, LiftoffRegister,        \
1180     LFR_TO_REG, LFR_TO_REG, LFR_TO_REG, USE, , void)                           \
1181   V(i64_andi, AndU64, LiftoffRegister, LiftoffRegister, int32_t, LFR_TO_REG,   \
1182     LFR_TO_REG, Operand, USE, , void)                                          \
1183   V(i64_ori, OrU64, LiftoffRegister, LiftoffRegister, int32_t, LFR_TO_REG,     \
1184     LFR_TO_REG, Operand, USE, , void)                                          \
1185   V(i64_xori, XorU64, LiftoffRegister, LiftoffRegister, int32_t, LFR_TO_REG,   \
1186     LFR_TO_REG, Operand, USE, , void)                                          \
1187   V(i32_shli, ShiftLeftU32, Register, Register, int32_t, , ,                   \
1188     INT32_AND_WITH_1F, USE, , void)                                            \
1189   V(i32_sari, ShiftRightS32, Register, Register, int32_t, , ,                  \
1190     INT32_AND_WITH_1F, USE, , void)                                            \
1191   V(i32_shri, ShiftRightU32, Register, Register, int32_t, , ,                  \
1192     INT32_AND_WITH_1F, USE, , void)                                            \
1193   V(i32_shl, ShiftLeftU32, Register, Register, Register, , ,                   \
1194     REGISTER_AND_WITH_1F, USE, , void)                                         \
1195   V(i32_sar, ShiftRightS32, Register, Register, Register, , ,                  \
1196     REGISTER_AND_WITH_1F, USE, , void)                                         \
1197   V(i32_shr, ShiftRightU32, Register, Register, Register, , ,                  \
1198     REGISTER_AND_WITH_1F, USE, , void)                                         \
1199   V(i64_shl, ShiftLeftU64, LiftoffRegister, LiftoffRegister, Register,         \
1200     LFR_TO_REG, LFR_TO_REG, REGISTER_AND_WITH_3F, USE, , void)                 \
1201   V(i64_sar, ShiftRightS64, LiftoffRegister, LiftoffRegister, Register,        \
1202     LFR_TO_REG, LFR_TO_REG, REGISTER_AND_WITH_3F, USE, , void)                 \
1203   V(i64_shr, ShiftRightU64, LiftoffRegister, LiftoffRegister, Register,        \
1204     LFR_TO_REG, LFR_TO_REG, REGISTER_AND_WITH_3F, USE, , void)                 \
1205   V(i64_shli, ShiftLeftU64, LiftoffRegister, LiftoffRegister, int32_t,         \
1206     LFR_TO_REG, LFR_TO_REG, INT32_AND_WITH_3F, USE, , void)                    \
1207   V(i64_sari, ShiftRightS64, LiftoffRegister, LiftoffRegister, int32_t,        \
1208     LFR_TO_REG, LFR_TO_REG, INT32_AND_WITH_3F, USE, , void)                    \
1209   V(i64_shri, ShiftRightU64, LiftoffRegister, LiftoffRegister, int32_t,        \
1210     LFR_TO_REG, LFR_TO_REG, INT32_AND_WITH_3F, USE, , void)                    \
1211   V(f64_add, AddF64, DoubleRegister, DoubleRegister, DoubleRegister, , , ,     \
1212     USE, , void)                                                               \
1213   V(f64_sub, SubF64, DoubleRegister, DoubleRegister, DoubleRegister, , , ,     \
1214     USE, , void)                                                               \
1215   V(f64_mul, MulF64, DoubleRegister, DoubleRegister, DoubleRegister, , , ,     \
1216     USE, , void)                                                               \
1217   V(f64_div, DivF64, DoubleRegister, DoubleRegister, DoubleRegister, , , ,     \
1218     USE, , void)                                                               \
1219   V(f32_add, AddF32, DoubleRegister, DoubleRegister, DoubleRegister, , , ,     \
1220     USE, , void)                                                               \
1221   V(f32_sub, SubF32, DoubleRegister, DoubleRegister, DoubleRegister, , , ,     \
1222     USE, , void)                                                               \
1223   V(f32_mul, MulF32, DoubleRegister, DoubleRegister, DoubleRegister, , , ,     \
1224     USE, , void)                                                               \
1225   V(f32_div, DivF32, DoubleRegister, DoubleRegister, DoubleRegister, , , ,     \
1226     USE, , void)
1227 
1228 #define EMIT_BINOP_FUNCTION(name, instr, dtype, stype1, stype2, dcast, scast1, \
1229                             scast2, rcast, ret, return_type)                   \
1230   return_type LiftoffAssembler::emit_##name(dtype dst, stype1 lhs,             \
1231                                             stype2 rhs) {                      \
1232     auto _dst = dcast(dst);                                                    \
1233     auto _lhs = scast1(lhs);                                                   \
1234     auto _rhs = scast2(rhs);                                                   \
1235     instr(_dst, _lhs, _rhs);                                                   \
1236     rcast(_dst);                                                               \
1237     return ret;                                                                \
1238   }
1239 
BINOP_LIST(EMIT_BINOP_FUNCTION)1240 BINOP_LIST(EMIT_BINOP_FUNCTION)
1241 #undef BINOP_LIST
1242 #undef EMIT_BINOP_FUNCTION
1243 #undef SIGN_EXT
1244 #undef INT32_AND_WITH_1F
1245 #undef REGISTER_AND_WITH_1F
1246 #undef LFR_TO_REG
1247 
1248 bool LiftoffAssembler::emit_f32_nearest_int(DoubleRegister dst,
1249                                             DoubleRegister src) {
1250   return false;
1251 }
1252 
emit_f64_nearest_int(DoubleRegister dst,DoubleRegister src)1253 bool LiftoffAssembler::emit_f64_nearest_int(DoubleRegister dst,
1254                                             DoubleRegister src) {
1255   return false;
1256 }
1257 
IncrementSmi(LiftoffRegister dst,int offset)1258 void LiftoffAssembler::IncrementSmi(LiftoffRegister dst, int offset) {
1259   UseScratchRegisterScope temps(this);
1260   if (COMPRESS_POINTERS_BOOL) {
1261     DCHECK(SmiValuesAre31Bits());
1262     Register scratch = temps.Acquire();
1263     LoadS32(scratch, MemOperand(dst.gp(), offset), r0);
1264     AddS64(scratch, scratch, Operand(Smi::FromInt(1)));
1265     StoreU32(scratch, MemOperand(dst.gp(), offset), r0);
1266   } else {
1267     Register scratch = temps.Acquire();
1268     SmiUntag(scratch, MemOperand(dst.gp(), offset), LeaveRC, r0);
1269     AddS64(scratch, scratch, Operand(1));
1270     SmiTag(scratch);
1271     StoreU64(scratch, MemOperand(dst.gp(), offset), r0);
1272   }
1273 }
1274 
emit_i32_divs(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero,Label * trap_div_unrepresentable)1275 void LiftoffAssembler::emit_i32_divs(Register dst, Register lhs, Register rhs,
1276                                      Label* trap_div_by_zero,
1277                                      Label* trap_div_unrepresentable) {
1278   Label cont;
1279 
1280   // Check for division by zero.
1281   CmpS32(rhs, Operand::Zero(), r0);
1282   b(eq, trap_div_by_zero);
1283 
1284   // Check for kMinInt / -1. This is unrepresentable.
1285   CmpS32(rhs, Operand(-1), r0);
1286   bne(&cont);
1287   CmpS32(lhs, Operand(kMinInt), r0);
1288   b(eq, trap_div_unrepresentable);
1289 
1290   bind(&cont);
1291   DivS32(dst, lhs, rhs);
1292 }
1293 
emit_i32_divu(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero)1294 void LiftoffAssembler::emit_i32_divu(Register dst, Register lhs, Register rhs,
1295                                      Label* trap_div_by_zero) {
1296   CmpS32(rhs, Operand::Zero(), r0);
1297   beq(trap_div_by_zero);
1298   DivU32(dst, lhs, rhs);
1299 }
1300 
emit_i32_rems(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero)1301 void LiftoffAssembler::emit_i32_rems(Register dst, Register lhs, Register rhs,
1302                                      Label* trap_div_by_zero) {
1303   Label cont, done, trap_div_unrepresentable;
1304   // Check for division by zero.
1305   CmpS32(rhs, Operand::Zero(), r0);
1306   beq(trap_div_by_zero);
1307 
1308   // Check kMinInt/-1 case.
1309   CmpS32(rhs, Operand(-1), r0);
1310   bne(&cont);
1311   CmpS32(lhs, Operand(kMinInt), r0);
1312   beq(&trap_div_unrepresentable);
1313 
1314   // Continue noraml calculation.
1315   bind(&cont);
1316   ModS32(dst, lhs, rhs);
1317   bne(&done);
1318 
1319   // trap by kMinInt/-1 case.
1320   bind(&trap_div_unrepresentable);
1321   mov(dst, Operand(0));
1322   bind(&done);
1323 }
1324 
emit_i32_remu(Register dst,Register lhs,Register rhs,Label * trap_div_by_zero)1325 void LiftoffAssembler::emit_i32_remu(Register dst, Register lhs, Register rhs,
1326                                      Label* trap_div_by_zero) {
1327   CmpS32(rhs, Operand::Zero(), r0);
1328   beq(trap_div_by_zero);
1329   ModU32(dst, lhs, rhs);
1330 }
1331 
emit_i64_divs(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero,Label * trap_div_unrepresentable)1332 bool LiftoffAssembler::emit_i64_divs(LiftoffRegister dst, LiftoffRegister lhs,
1333                                      LiftoffRegister rhs,
1334                                      Label* trap_div_by_zero,
1335                                      Label* trap_div_unrepresentable) {
1336   constexpr int64_t kMinInt64 = static_cast<int64_t>(1) << 63;
1337   Label cont;
1338   // Check for division by zero.
1339   CmpS64(rhs.gp(), Operand::Zero(), r0);
1340   beq(trap_div_by_zero);
1341 
1342   // Check for kMinInt / -1. This is unrepresentable.
1343   CmpS64(rhs.gp(), Operand(-1), r0);
1344   bne(&cont);
1345   CmpS64(lhs.gp(), Operand(kMinInt64), r0);
1346   beq(trap_div_unrepresentable);
1347 
1348   bind(&cont);
1349   DivS64(dst.gp(), lhs.gp(), rhs.gp());
1350   return true;
1351 }
1352 
emit_i64_divu(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero)1353 bool LiftoffAssembler::emit_i64_divu(LiftoffRegister dst, LiftoffRegister lhs,
1354                                      LiftoffRegister rhs,
1355                                      Label* trap_div_by_zero) {
1356   CmpS64(rhs.gp(), Operand::Zero(), r0);
1357   beq(trap_div_by_zero);
1358   // Do div.
1359   DivU64(dst.gp(), lhs.gp(), rhs.gp());
1360   return true;
1361 }
1362 
emit_i64_rems(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero)1363 bool LiftoffAssembler::emit_i64_rems(LiftoffRegister dst, LiftoffRegister lhs,
1364                                      LiftoffRegister rhs,
1365                                      Label* trap_div_by_zero) {
1366   constexpr int64_t kMinInt64 = static_cast<int64_t>(1) << 63;
1367 
1368   Label trap_div_unrepresentable;
1369   Label done;
1370   Label cont;
1371 
1372   // Check for division by zero.
1373   CmpS64(rhs.gp(), Operand::Zero(), r0);
1374   beq(trap_div_by_zero);
1375 
1376   // Check for kMinInt / -1. This is unrepresentable.
1377   CmpS64(rhs.gp(), Operand(-1), r0);
1378   bne(&cont);
1379   CmpS64(lhs.gp(), Operand(kMinInt64), r0);
1380   beq(&trap_div_unrepresentable);
1381 
1382   bind(&cont);
1383   ModS64(dst.gp(), lhs.gp(), rhs.gp());
1384   bne(&done);
1385 
1386   bind(&trap_div_unrepresentable);
1387   mov(dst.gp(), Operand(0));
1388   bind(&done);
1389   return true;
1390 }
1391 
emit_i64_remu(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,Label * trap_div_by_zero)1392 bool LiftoffAssembler::emit_i64_remu(LiftoffRegister dst, LiftoffRegister lhs,
1393                                      LiftoffRegister rhs,
1394                                      Label* trap_div_by_zero) {
1395   CmpS64(rhs.gp(), Operand::Zero(), r0);
1396   beq(trap_div_by_zero);
1397   ModU64(dst.gp(), lhs.gp(), rhs.gp());
1398   return true;
1399 }
1400 
emit_type_conversion(WasmOpcode opcode,LiftoffRegister dst,LiftoffRegister src,Label * trap)1401 bool LiftoffAssembler::emit_type_conversion(WasmOpcode opcode,
1402                                             LiftoffRegister dst,
1403                                             LiftoffRegister src, Label* trap) {
1404   switch (opcode) {
1405     case kExprI32ConvertI64:
1406       extsw(dst.gp(), src.gp());
1407       return true;
1408     case kExprI64SConvertI32:
1409       extsw(dst.gp(), src.gp());
1410       return true;
1411     case kExprI64UConvertI32:
1412       ZeroExtWord32(dst.gp(), src.gp());
1413       return true;
1414     case kExprF32ConvertF64:
1415       frsp(dst.fp(), src.fp());
1416       return true;
1417     case kExprF64ConvertF32:
1418       fmr(dst.fp(), src.fp());
1419       return true;
1420     case kExprF32SConvertI32: {
1421       ConvertIntToFloat(src.gp(), dst.fp());
1422       return true;
1423     }
1424     case kExprF32UConvertI32: {
1425       ConvertUnsignedIntToFloat(src.gp(), dst.fp());
1426       return true;
1427     }
1428     case kExprF64SConvertI32: {
1429       ConvertIntToDouble(src.gp(), dst.fp());
1430       return true;
1431     }
1432     case kExprF64UConvertI32: {
1433       ConvertUnsignedIntToDouble(src.gp(), dst.fp());
1434       return true;
1435     }
1436     case kExprF64SConvertI64: {
1437       ConvertInt64ToDouble(src.gp(), dst.fp());
1438       return true;
1439     }
1440     case kExprF64UConvertI64: {
1441       ConvertUnsignedInt64ToDouble(src.gp(), dst.fp());
1442       return true;
1443     }
1444     case kExprF32SConvertI64: {
1445       ConvertInt64ToFloat(src.gp(), dst.fp());
1446       return true;
1447     }
1448     case kExprF32UConvertI64: {
1449       ConvertUnsignedInt64ToFloat(src.gp(), dst.fp());
1450       return true;
1451     }
1452     case kExprI32SConvertF64:
1453     case kExprI32SConvertF32: {
1454       LoadDoubleLiteral(kScratchDoubleReg, base::Double(0.0), r0);
1455       fcmpu(src.fp(), kScratchDoubleReg);
1456       bunordered(trap);
1457 
1458       mtfsb0(VXCVI);  // clear FPSCR:VXCVI bit
1459       fctiwz(kScratchDoubleReg, src.fp());
1460       MovDoubleLowToInt(dst.gp(), kScratchDoubleReg);
1461       mcrfs(cr7, VXCVI);
1462       boverflow(trap, cr7);
1463       return true;
1464     }
1465     case kExprI32UConvertF64:
1466     case kExprI32UConvertF32: {
1467       mtfsb0(VXCVI);  // clear FPSCR:VXCVI bit
1468       ConvertDoubleToUnsignedInt64(src.fp(), r0, kScratchDoubleReg,
1469                                    kRoundToZero);
1470       mcrfs(cr7, VXCVI);  // extract FPSCR field containing VXCVI into cr7
1471       boverflow(trap, cr7);
1472       ZeroExtWord32(dst.gp(), r0);
1473       CmpU64(dst.gp(), r0);
1474       bne(trap);
1475       return true;
1476     }
1477     case kExprI64SConvertF64:
1478     case kExprI64SConvertF32: {
1479       LoadDoubleLiteral(kScratchDoubleReg, base::Double(0.0), r0);
1480       fcmpu(src.fp(), kScratchDoubleReg);
1481       bunordered(trap);
1482 
1483       mtfsb0(VXCVI);  // clear FPSCR:VXCVI bit
1484       fctidz(kScratchDoubleReg, src.fp());
1485       MovDoubleToInt64(dst.gp(), kScratchDoubleReg);
1486       mcrfs(cr7, VXCVI);
1487       boverflow(trap, cr7);
1488       return true;
1489     }
1490     case kExprI64UConvertF64:
1491     case kExprI64UConvertF32: {
1492       LoadDoubleLiteral(kScratchDoubleReg, base::Double(0.0), r0);
1493       fcmpu(src.fp(), kScratchDoubleReg);
1494       bunordered(trap);
1495 
1496       mtfsb0(VXCVI);  // clear FPSCR:VXCVI bit
1497       fctiduz(kScratchDoubleReg, src.fp());
1498       MovDoubleToInt64(dst.gp(), kScratchDoubleReg);
1499       mcrfs(cr7, VXCVI);
1500       boverflow(trap, cr7);
1501       return true;
1502     }
1503     case kExprI32SConvertSatF64:
1504     case kExprI32SConvertSatF32: {
1505       Label done, src_is_nan;
1506       LoadDoubleLiteral(kScratchDoubleReg, base::Double(0.0), r0);
1507       fcmpu(src.fp(), kScratchDoubleReg);
1508       bunordered(&src_is_nan);
1509 
1510       mtfsb0(VXCVI);  // clear FPSCR:VXCVI bit
1511       fctiwz(kScratchDoubleReg, src.fp());
1512       MovDoubleLowToInt(dst.gp(), kScratchDoubleReg);
1513       b(&done);
1514 
1515       bind(&src_is_nan);
1516       mov(dst.gp(), Operand::Zero());
1517 
1518       bind(&done);
1519       return true;
1520     }
1521     case kExprI32UConvertSatF64:
1522     case kExprI32UConvertSatF32: {
1523       Label done, src_is_nan;
1524       LoadDoubleLiteral(kScratchDoubleReg, base::Double(0.0), r0);
1525       fcmpu(src.fp(), kScratchDoubleReg);
1526       bunordered(&src_is_nan);
1527 
1528       mtfsb0(VXCVI);  // clear FPSCR:VXCVI bit
1529       fctiwuz(kScratchDoubleReg, src.fp());
1530       MovDoubleLowToInt(dst.gp(), kScratchDoubleReg);
1531       b(&done);
1532 
1533       bind(&src_is_nan);
1534       mov(dst.gp(), Operand::Zero());
1535 
1536       bind(&done);
1537       return true;
1538     }
1539     case kExprI64SConvertSatF64:
1540     case kExprI64SConvertSatF32: {
1541       Label done, src_is_nan;
1542       LoadDoubleLiteral(kScratchDoubleReg, base::Double(0.0), r0);
1543       fcmpu(src.fp(), kScratchDoubleReg);
1544       bunordered(&src_is_nan);
1545 
1546       mtfsb0(VXCVI);  // clear FPSCR:VXCVI bit
1547       fctidz(kScratchDoubleReg, src.fp());
1548       MovDoubleToInt64(dst.gp(), kScratchDoubleReg);
1549       b(&done);
1550 
1551       bind(&src_is_nan);
1552       mov(dst.gp(), Operand::Zero());
1553 
1554       bind(&done);
1555       return true;
1556     }
1557     case kExprI64UConvertSatF64:
1558     case kExprI64UConvertSatF32: {
1559       Label done, src_is_nan;
1560       LoadDoubleLiteral(kScratchDoubleReg, base::Double(0.0), r0);
1561       fcmpu(src.fp(), kScratchDoubleReg);
1562       bunordered(&src_is_nan);
1563 
1564       mtfsb0(VXCVI);  // clear FPSCR:VXCVI bit
1565       fctiduz(kScratchDoubleReg, src.fp());
1566       MovDoubleToInt64(dst.gp(), kScratchDoubleReg);
1567       b(&done);
1568 
1569       bind(&src_is_nan);
1570       mov(dst.gp(), Operand::Zero());
1571 
1572       bind(&done);
1573       return true;
1574     }
1575     case kExprI32ReinterpretF32: {
1576       MovFloatToInt(dst.gp(), src.fp(), kScratchDoubleReg);
1577       return true;
1578     }
1579     case kExprI64ReinterpretF64: {
1580       MovDoubleToInt64(dst.gp(), src.fp());
1581       return true;
1582     }
1583     case kExprF32ReinterpretI32: {
1584       MovIntToFloat(dst.fp(), src.gp(), r0);
1585       return true;
1586     }
1587     case kExprF64ReinterpretI64: {
1588       MovInt64ToDouble(dst.fp(), src.gp());
1589       return true;
1590     }
1591     default:
1592       UNREACHABLE();
1593   }
1594 }
1595 
emit_jump(Label * label)1596 void LiftoffAssembler::emit_jump(Label* label) { b(al, label); }
1597 
emit_jump(Register target)1598 void LiftoffAssembler::emit_jump(Register target) { Jump(target); }
1599 
emit_cond_jump(LiftoffCondition liftoff_cond,Label * label,ValueKind kind,Register lhs,Register rhs)1600 void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
1601                                       Label* label, ValueKind kind,
1602                                       Register lhs, Register rhs) {
1603   Condition cond = liftoff::ToCondition(liftoff_cond);
1604   bool use_signed = liftoff::UseSignedOp(liftoff_cond);
1605 
1606   if (rhs != no_reg) {
1607     switch (kind) {
1608       case kI32:
1609         if (use_signed) {
1610           CmpS32(lhs, rhs);
1611         } else {
1612           CmpU32(lhs, rhs);
1613         }
1614         break;
1615       case kRef:
1616       case kOptRef:
1617       case kRtt:
1618         DCHECK(liftoff_cond == kEqual || liftoff_cond == kUnequal);
1619         V8_FALLTHROUGH;
1620       case kI64:
1621         if (use_signed) {
1622           CmpS64(lhs, rhs);
1623         } else {
1624           CmpU64(lhs, rhs);
1625         }
1626         break;
1627       default:
1628         UNREACHABLE();
1629     }
1630   } else {
1631     DCHECK_EQ(kind, kI32);
1632     CHECK(use_signed);
1633     CmpS32(lhs, Operand::Zero(), r0);
1634   }
1635 
1636   b(cond, label);
1637 }
1638 
emit_i32_cond_jumpi(LiftoffCondition liftoff_cond,Label * label,Register lhs,int32_t imm)1639 void LiftoffAssembler::emit_i32_cond_jumpi(LiftoffCondition liftoff_cond,
1640                                            Label* label, Register lhs,
1641                                            int32_t imm) {
1642   bool use_signed = liftoff::UseSignedOp(liftoff_cond);
1643   Condition cond = liftoff::ToCondition(liftoff_cond);
1644   if (use_signed) {
1645     CmpS32(lhs, Operand(imm), r0);
1646   } else {
1647     CmpU32(lhs, Operand(imm), r0);
1648   }
1649   b(cond, label);
1650 }
1651 
emit_i32_subi_jump_negative(Register value,int subtrahend,Label * result_negative)1652 void LiftoffAssembler::emit_i32_subi_jump_negative(Register value,
1653                                                    int subtrahend,
1654                                                    Label* result_negative) {
1655   SubS64(value, value, Operand(subtrahend), r0, LeaveOE, SetRC);
1656   blt(result_negative, cr0);
1657 }
1658 
emit_i32_eqz(Register dst,Register src)1659 void LiftoffAssembler::emit_i32_eqz(Register dst, Register src) {
1660   Label done;
1661   CmpS32(src, Operand(0), r0);
1662   mov(dst, Operand(1));
1663   beq(&done);
1664   mov(dst, Operand::Zero());
1665   bind(&done);
1666 }
1667 
emit_i32_set_cond(LiftoffCondition liftoff_cond,Register dst,Register lhs,Register rhs)1668 void LiftoffAssembler::emit_i32_set_cond(LiftoffCondition liftoff_cond,
1669                                          Register dst, Register lhs,
1670                                          Register rhs) {
1671   bool use_signed = liftoff::UseSignedOp(liftoff_cond);
1672   if (use_signed) {
1673     CmpS32(lhs, rhs);
1674   } else {
1675     CmpU32(lhs, rhs);
1676   }
1677   Label done;
1678   mov(dst, Operand(1));
1679   b(liftoff::ToCondition(liftoff_cond), &done);
1680   mov(dst, Operand::Zero());
1681   bind(&done);
1682 }
1683 
emit_i64_eqz(Register dst,LiftoffRegister src)1684 void LiftoffAssembler::emit_i64_eqz(Register dst, LiftoffRegister src) {
1685   Label done;
1686   cmpi(src.gp(), Operand(0));
1687   mov(dst, Operand(1));
1688   beq(&done);
1689   mov(dst, Operand::Zero());
1690   bind(&done);
1691 }
1692 
emit_i64_set_cond(LiftoffCondition liftoff_cond,Register dst,LiftoffRegister lhs,LiftoffRegister rhs)1693 void LiftoffAssembler::emit_i64_set_cond(LiftoffCondition liftoff_cond,
1694                                          Register dst, LiftoffRegister lhs,
1695                                          LiftoffRegister rhs) {
1696   bool use_signed = liftoff::UseSignedOp(liftoff_cond);
1697   if (use_signed) {
1698     CmpS64(lhs.gp(), rhs.gp());
1699   } else {
1700     CmpU64(lhs.gp(), rhs.gp());
1701   }
1702   Label done;
1703   mov(dst, Operand(1));
1704   b(liftoff::ToCondition(liftoff_cond), &done);
1705   mov(dst, Operand::Zero());
1706   bind(&done);
1707 }
1708 
emit_f32_set_cond(LiftoffCondition liftoff_cond,Register dst,DoubleRegister lhs,DoubleRegister rhs)1709 void LiftoffAssembler::emit_f32_set_cond(LiftoffCondition liftoff_cond,
1710                                          Register dst, DoubleRegister lhs,
1711                                          DoubleRegister rhs) {
1712   fcmpu(lhs, rhs, cr0);
1713   Label nan, done;
1714   bunordered(&nan, cr0);
1715   mov(dst, Operand::Zero());
1716   b(NegateCondition(liftoff::ToCondition(liftoff_cond)), &done, cr0);
1717   mov(dst, Operand(1));
1718   b(&done);
1719   bind(&nan);
1720   if (liftoff_cond == kUnequal) {
1721     mov(dst, Operand(1));
1722   } else {
1723     mov(dst, Operand::Zero());
1724   }
1725   bind(&done);
1726 }
1727 
emit_f64_set_cond(LiftoffCondition liftoff_cond,Register dst,DoubleRegister lhs,DoubleRegister rhs)1728 void LiftoffAssembler::emit_f64_set_cond(LiftoffCondition liftoff_cond,
1729                                          Register dst, DoubleRegister lhs,
1730                                          DoubleRegister rhs) {
1731   emit_f32_set_cond(liftoff_cond, dst, lhs, rhs);
1732 }
1733 
emit_select(LiftoffRegister dst,Register condition,LiftoffRegister true_value,LiftoffRegister false_value,ValueKind kind)1734 bool LiftoffAssembler::emit_select(LiftoffRegister dst, Register condition,
1735                                    LiftoffRegister true_value,
1736                                    LiftoffRegister false_value,
1737                                    ValueKind kind) {
1738   return false;
1739 }
1740 
LoadTransform(LiftoffRegister dst,Register src_addr,Register offset_reg,uintptr_t offset_imm,LoadType type,LoadTransformationKind transform,uint32_t * protected_load_pc)1741 void LiftoffAssembler::LoadTransform(LiftoffRegister dst, Register src_addr,
1742                                      Register offset_reg, uintptr_t offset_imm,
1743                                      LoadType type,
1744                                      LoadTransformationKind transform,
1745                                      uint32_t* protected_load_pc) {
1746   bailout(kSimd, "Load transform unimplemented");
1747 }
1748 
emit_smi_check(Register obj,Label * target,SmiCheckMode mode)1749 void LiftoffAssembler::emit_smi_check(Register obj, Label* target,
1750                                       SmiCheckMode mode) {
1751   TestIfSmi(obj, r0);
1752   Condition condition = mode == kJumpOnSmi ? eq : ne;
1753   b(condition, target, cr0);  // branch if SMI
1754 }
1755 
LoadLane(LiftoffRegister dst,LiftoffRegister src,Register addr,Register offset_reg,uintptr_t offset_imm,LoadType type,uint8_t laneidx,uint32_t * protected_load_pc)1756 void LiftoffAssembler::LoadLane(LiftoffRegister dst, LiftoffRegister src,
1757                                 Register addr, Register offset_reg,
1758                                 uintptr_t offset_imm, LoadType type,
1759                                 uint8_t laneidx, uint32_t* protected_load_pc) {
1760   bailout(kSimd, "loadlane");
1761 }
1762 
StoreLane(Register dst,Register offset,uintptr_t offset_imm,LiftoffRegister src,StoreType type,uint8_t lane,uint32_t * protected_store_pc)1763 void LiftoffAssembler::StoreLane(Register dst, Register offset,
1764                                  uintptr_t offset_imm, LiftoffRegister src,
1765                                  StoreType type, uint8_t lane,
1766                                  uint32_t* protected_store_pc) {
1767   bailout(kSimd, "store lane");
1768 }
1769 
emit_i8x16_swizzle(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1770 void LiftoffAssembler::emit_i8x16_swizzle(LiftoffRegister dst,
1771                                           LiftoffRegister lhs,
1772                                           LiftoffRegister rhs) {
1773   bailout(kUnsupportedArchitecture, "emit_i8x16_swizzle");
1774 }
1775 
emit_f64x2_splat(LiftoffRegister dst,LiftoffRegister src)1776 void LiftoffAssembler::emit_f64x2_splat(LiftoffRegister dst,
1777                                         LiftoffRegister src) {
1778   bailout(kUnsupportedArchitecture, "emit_f64x2splat");
1779 }
1780 
emit_f64x2_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)1781 void LiftoffAssembler::emit_f64x2_extract_lane(LiftoffRegister dst,
1782                                                LiftoffRegister lhs,
1783                                                uint8_t imm_lane_idx) {
1784   bailout(kUnsupportedArchitecture, "emit_f64x2extractlane");
1785 }
1786 
emit_f64x2_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)1787 void LiftoffAssembler::emit_f64x2_replace_lane(LiftoffRegister dst,
1788                                                LiftoffRegister src1,
1789                                                LiftoffRegister src2,
1790                                                uint8_t imm_lane_idx) {
1791   bailout(kUnsupportedArchitecture, "emit_f64x2replacelane");
1792 }
1793 
emit_f64x2_abs(LiftoffRegister dst,LiftoffRegister src)1794 void LiftoffAssembler::emit_f64x2_abs(LiftoffRegister dst,
1795                                       LiftoffRegister src) {
1796   bailout(kUnsupportedArchitecture, "emit_f64x2_abs");
1797 }
1798 
emit_f64x2_neg(LiftoffRegister dst,LiftoffRegister src)1799 void LiftoffAssembler::emit_f64x2_neg(LiftoffRegister dst,
1800                                       LiftoffRegister src) {
1801   bailout(kUnsupportedArchitecture, "emit_f64x2neg");
1802 }
1803 
emit_f64x2_sqrt(LiftoffRegister dst,LiftoffRegister src)1804 void LiftoffAssembler::emit_f64x2_sqrt(LiftoffRegister dst,
1805                                        LiftoffRegister src) {
1806   bailout(kUnsupportedArchitecture, "emit_f64x2sqrt");
1807 }
1808 
emit_f64x2_ceil(LiftoffRegister dst,LiftoffRegister src)1809 bool LiftoffAssembler::emit_f64x2_ceil(LiftoffRegister dst,
1810                                        LiftoffRegister src) {
1811   bailout(kSimd, "f64x2.ceil");
1812   return true;
1813 }
1814 
emit_f64x2_floor(LiftoffRegister dst,LiftoffRegister src)1815 bool LiftoffAssembler::emit_f64x2_floor(LiftoffRegister dst,
1816                                         LiftoffRegister src) {
1817   bailout(kSimd, "f64x2.floor");
1818   return true;
1819 }
1820 
emit_f64x2_trunc(LiftoffRegister dst,LiftoffRegister src)1821 bool LiftoffAssembler::emit_f64x2_trunc(LiftoffRegister dst,
1822                                         LiftoffRegister src) {
1823   bailout(kSimd, "f64x2.trunc");
1824   return true;
1825 }
1826 
emit_f64x2_nearest_int(LiftoffRegister dst,LiftoffRegister src)1827 bool LiftoffAssembler::emit_f64x2_nearest_int(LiftoffRegister dst,
1828                                               LiftoffRegister src) {
1829   bailout(kSimd, "f64x2.nearest_int");
1830   return true;
1831 }
1832 
emit_f64x2_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1833 void LiftoffAssembler::emit_f64x2_add(LiftoffRegister dst, LiftoffRegister lhs,
1834                                       LiftoffRegister rhs) {
1835   bailout(kUnsupportedArchitecture, "emit_f64x2add");
1836 }
1837 
emit_f64x2_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1838 void LiftoffAssembler::emit_f64x2_sub(LiftoffRegister dst, LiftoffRegister lhs,
1839                                       LiftoffRegister rhs) {
1840   bailout(kUnsupportedArchitecture, "emit_f64x2sub");
1841 }
1842 
emit_f64x2_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1843 void LiftoffAssembler::emit_f64x2_mul(LiftoffRegister dst, LiftoffRegister lhs,
1844                                       LiftoffRegister rhs) {
1845   bailout(kUnsupportedArchitecture, "emit_f64x2mul");
1846 }
1847 
emit_f64x2_div(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1848 void LiftoffAssembler::emit_f64x2_div(LiftoffRegister dst, LiftoffRegister lhs,
1849                                       LiftoffRegister rhs) {
1850   bailout(kUnsupportedArchitecture, "emit_f64x2div");
1851 }
1852 
emit_f64x2_min(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1853 void LiftoffAssembler::emit_f64x2_min(LiftoffRegister dst, LiftoffRegister lhs,
1854                                       LiftoffRegister rhs) {
1855   bailout(kUnsupportedArchitecture, "emit_f64x2min");
1856 }
1857 
emit_f64x2_max(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1858 void LiftoffAssembler::emit_f64x2_max(LiftoffRegister dst, LiftoffRegister lhs,
1859                                       LiftoffRegister rhs) {
1860   bailout(kUnsupportedArchitecture, "emit_f64x2max");
1861 }
1862 
emit_f64x2_pmin(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1863 void LiftoffAssembler::emit_f64x2_pmin(LiftoffRegister dst, LiftoffRegister lhs,
1864                                        LiftoffRegister rhs) {
1865   bailout(kSimd, "pmin unimplemented");
1866 }
1867 
emit_f64x2_pmax(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1868 void LiftoffAssembler::emit_f64x2_pmax(LiftoffRegister dst, LiftoffRegister lhs,
1869                                        LiftoffRegister rhs) {
1870   bailout(kSimd, "pmax unimplemented");
1871 }
1872 
emit_f64x2_convert_low_i32x4_s(LiftoffRegister dst,LiftoffRegister src)1873 void LiftoffAssembler::emit_f64x2_convert_low_i32x4_s(LiftoffRegister dst,
1874                                                       LiftoffRegister src) {
1875   bailout(kSimd, "f64x2.convert_low_i32x4_s");
1876 }
1877 
emit_f64x2_convert_low_i32x4_u(LiftoffRegister dst,LiftoffRegister src)1878 void LiftoffAssembler::emit_f64x2_convert_low_i32x4_u(LiftoffRegister dst,
1879                                                       LiftoffRegister src) {
1880   bailout(kSimd, "f64x2.convert_low_i32x4_u");
1881 }
1882 
emit_f64x2_promote_low_f32x4(LiftoffRegister dst,LiftoffRegister src)1883 void LiftoffAssembler::emit_f64x2_promote_low_f32x4(LiftoffRegister dst,
1884                                                     LiftoffRegister src) {
1885   bailout(kSimd, "f64x2.promote_low_f32x4");
1886 }
1887 
emit_f32x4_splat(LiftoffRegister dst,LiftoffRegister src)1888 void LiftoffAssembler::emit_f32x4_splat(LiftoffRegister dst,
1889                                         LiftoffRegister src) {
1890   bailout(kUnsupportedArchitecture, "emit_f32x4_splat");
1891 }
1892 
emit_f32x4_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)1893 void LiftoffAssembler::emit_f32x4_extract_lane(LiftoffRegister dst,
1894                                                LiftoffRegister lhs,
1895                                                uint8_t imm_lane_idx) {
1896   bailout(kUnsupportedArchitecture, "emit_f32x4extractlane");
1897 }
1898 
emit_f32x4_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)1899 void LiftoffAssembler::emit_f32x4_replace_lane(LiftoffRegister dst,
1900                                                LiftoffRegister src1,
1901                                                LiftoffRegister src2,
1902                                                uint8_t imm_lane_idx) {
1903   bailout(kUnsupportedArchitecture, "emit_f32x4replacelane");
1904 }
1905 
emit_f32x4_abs(LiftoffRegister dst,LiftoffRegister src)1906 void LiftoffAssembler::emit_f32x4_abs(LiftoffRegister dst,
1907                                       LiftoffRegister src) {
1908   bailout(kUnsupportedArchitecture, "emit_f32x4_abs");
1909 }
1910 
emit_f32x4_neg(LiftoffRegister dst,LiftoffRegister src)1911 void LiftoffAssembler::emit_f32x4_neg(LiftoffRegister dst,
1912                                       LiftoffRegister src) {
1913   bailout(kUnsupportedArchitecture, "emit_f32x4neg");
1914 }
1915 
emit_f32x4_sqrt(LiftoffRegister dst,LiftoffRegister src)1916 void LiftoffAssembler::emit_f32x4_sqrt(LiftoffRegister dst,
1917                                        LiftoffRegister src) {
1918   bailout(kUnsupportedArchitecture, "emit_f32x4sqrt");
1919 }
1920 
emit_f32x4_ceil(LiftoffRegister dst,LiftoffRegister src)1921 bool LiftoffAssembler::emit_f32x4_ceil(LiftoffRegister dst,
1922                                        LiftoffRegister src) {
1923   bailout(kSimd, "f32x4.ceil");
1924   return true;
1925 }
1926 
emit_f32x4_floor(LiftoffRegister dst,LiftoffRegister src)1927 bool LiftoffAssembler::emit_f32x4_floor(LiftoffRegister dst,
1928                                         LiftoffRegister src) {
1929   bailout(kSimd, "f32x4.floor");
1930   return true;
1931 }
1932 
emit_f32x4_trunc(LiftoffRegister dst,LiftoffRegister src)1933 bool LiftoffAssembler::emit_f32x4_trunc(LiftoffRegister dst,
1934                                         LiftoffRegister src) {
1935   bailout(kSimd, "f32x4.trunc");
1936   return true;
1937 }
1938 
emit_f32x4_nearest_int(LiftoffRegister dst,LiftoffRegister src)1939 bool LiftoffAssembler::emit_f32x4_nearest_int(LiftoffRegister dst,
1940                                               LiftoffRegister src) {
1941   bailout(kSimd, "f32x4.nearest_int");
1942   return true;
1943 }
1944 
emit_f32x4_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1945 void LiftoffAssembler::emit_f32x4_add(LiftoffRegister dst, LiftoffRegister lhs,
1946                                       LiftoffRegister rhs) {
1947   bailout(kUnsupportedArchitecture, "emit_f32x4add");
1948 }
1949 
emit_f32x4_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1950 void LiftoffAssembler::emit_f32x4_sub(LiftoffRegister dst, LiftoffRegister lhs,
1951                                       LiftoffRegister rhs) {
1952   bailout(kUnsupportedArchitecture, "emit_f32x4sub");
1953 }
1954 
emit_f32x4_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1955 void LiftoffAssembler::emit_f32x4_mul(LiftoffRegister dst, LiftoffRegister lhs,
1956                                       LiftoffRegister rhs) {
1957   bailout(kUnsupportedArchitecture, "emit_f32x4mul");
1958 }
1959 
emit_f32x4_div(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1960 void LiftoffAssembler::emit_f32x4_div(LiftoffRegister dst, LiftoffRegister lhs,
1961                                       LiftoffRegister rhs) {
1962   bailout(kUnsupportedArchitecture, "emit_f32x4div");
1963 }
1964 
emit_f32x4_min(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1965 void LiftoffAssembler::emit_f32x4_min(LiftoffRegister dst, LiftoffRegister lhs,
1966                                       LiftoffRegister rhs) {
1967   bailout(kUnsupportedArchitecture, "emit_f32x4min");
1968 }
1969 
emit_f32x4_max(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1970 void LiftoffAssembler::emit_f32x4_max(LiftoffRegister dst, LiftoffRegister lhs,
1971                                       LiftoffRegister rhs) {
1972   bailout(kUnsupportedArchitecture, "emit_f32x4max");
1973 }
1974 
emit_f32x4_pmin(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1975 void LiftoffAssembler::emit_f32x4_pmin(LiftoffRegister dst, LiftoffRegister lhs,
1976                                        LiftoffRegister rhs) {
1977   bailout(kSimd, "pmin unimplemented");
1978 }
1979 
emit_f32x4_pmax(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)1980 void LiftoffAssembler::emit_f32x4_pmax(LiftoffRegister dst, LiftoffRegister lhs,
1981                                        LiftoffRegister rhs) {
1982   bailout(kSimd, "pmax unimplemented");
1983 }
1984 
emit_i64x2_splat(LiftoffRegister dst,LiftoffRegister src)1985 void LiftoffAssembler::emit_i64x2_splat(LiftoffRegister dst,
1986                                         LiftoffRegister src) {
1987   bailout(kUnsupportedArchitecture, "emit_i64x2splat");
1988 }
1989 
emit_i64x2_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)1990 void LiftoffAssembler::emit_i64x2_extract_lane(LiftoffRegister dst,
1991                                                LiftoffRegister lhs,
1992                                                uint8_t imm_lane_idx) {
1993   bailout(kUnsupportedArchitecture, "emit_i64x2extractlane");
1994 }
1995 
emit_i64x2_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)1996 void LiftoffAssembler::emit_i64x2_replace_lane(LiftoffRegister dst,
1997                                                LiftoffRegister src1,
1998                                                LiftoffRegister src2,
1999                                                uint8_t imm_lane_idx) {
2000   bailout(kUnsupportedArchitecture, "emit_i64x2replacelane");
2001 }
2002 
emit_i64x2_neg(LiftoffRegister dst,LiftoffRegister src)2003 void LiftoffAssembler::emit_i64x2_neg(LiftoffRegister dst,
2004                                       LiftoffRegister src) {
2005   bailout(kUnsupportedArchitecture, "emit_i64x2neg");
2006 }
2007 
emit_i64x2_alltrue(LiftoffRegister dst,LiftoffRegister src)2008 void LiftoffAssembler::emit_i64x2_alltrue(LiftoffRegister dst,
2009                                           LiftoffRegister src) {
2010   bailout(kSimd, "i64x2_alltrue");
2011 }
2012 
emit_i64x2_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2013 void LiftoffAssembler::emit_i64x2_shl(LiftoffRegister dst, LiftoffRegister lhs,
2014                                       LiftoffRegister rhs) {
2015   bailout(kSimd, "i64x2_shl");
2016 }
2017 
emit_i64x2_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2018 void LiftoffAssembler::emit_i64x2_shli(LiftoffRegister dst, LiftoffRegister lhs,
2019                                        int32_t rhs) {
2020   bailout(kSimd, "i64x2_shli");
2021 }
2022 
emit_i64x2_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2023 void LiftoffAssembler::emit_i64x2_shr_s(LiftoffRegister dst,
2024                                         LiftoffRegister lhs,
2025                                         LiftoffRegister rhs) {
2026   bailout(kSimd, "i64x2_shr_s");
2027 }
2028 
emit_i64x2_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2029 void LiftoffAssembler::emit_i64x2_shri_s(LiftoffRegister dst,
2030                                          LiftoffRegister lhs, int32_t rhs) {
2031   bailout(kSimd, "i64x2_shri_s");
2032 }
2033 
emit_i64x2_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2034 void LiftoffAssembler::emit_i64x2_shr_u(LiftoffRegister dst,
2035                                         LiftoffRegister lhs,
2036                                         LiftoffRegister rhs) {
2037   bailout(kSimd, "i64x2_shr_u");
2038 }
2039 
emit_i64x2_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2040 void LiftoffAssembler::emit_i64x2_shri_u(LiftoffRegister dst,
2041                                          LiftoffRegister lhs, int32_t rhs) {
2042   bailout(kSimd, "i64x2_shri_u");
2043 }
2044 
emit_i64x2_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2045 void LiftoffAssembler::emit_i64x2_add(LiftoffRegister dst, LiftoffRegister lhs,
2046                                       LiftoffRegister rhs) {
2047   bailout(kUnsupportedArchitecture, "emit_i64x2add");
2048 }
2049 
emit_i64x2_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2050 void LiftoffAssembler::emit_i64x2_sub(LiftoffRegister dst, LiftoffRegister lhs,
2051                                       LiftoffRegister rhs) {
2052   bailout(kUnsupportedArchitecture, "emit_i64x2sub");
2053 }
2054 
emit_i64x2_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2055 void LiftoffAssembler::emit_i64x2_mul(LiftoffRegister dst, LiftoffRegister lhs,
2056                                       LiftoffRegister rhs) {
2057   bailout(kUnsupportedArchitecture, "emit_i64x2mul");
2058 }
2059 
emit_i64x2_extmul_low_i32x4_s(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2)2060 void LiftoffAssembler::emit_i64x2_extmul_low_i32x4_s(LiftoffRegister dst,
2061                                                      LiftoffRegister src1,
2062                                                      LiftoffRegister src2) {
2063   bailout(kSimd, "i64x2_extmul_low_i32x4_s unsupported");
2064 }
2065 
emit_i64x2_extmul_low_i32x4_u(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2)2066 void LiftoffAssembler::emit_i64x2_extmul_low_i32x4_u(LiftoffRegister dst,
2067                                                      LiftoffRegister src1,
2068                                                      LiftoffRegister src2) {
2069   bailout(kSimd, "i64x2_extmul_low_i32x4_u unsupported");
2070 }
2071 
emit_i64x2_extmul_high_i32x4_s(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2)2072 void LiftoffAssembler::emit_i64x2_extmul_high_i32x4_s(LiftoffRegister dst,
2073                                                       LiftoffRegister src1,
2074                                                       LiftoffRegister src2) {
2075   bailout(kSimd, "i64x2_extmul_high_i32x4_s unsupported");
2076 }
2077 
emit_i64x2_bitmask(LiftoffRegister dst,LiftoffRegister src)2078 void LiftoffAssembler::emit_i64x2_bitmask(LiftoffRegister dst,
2079                                           LiftoffRegister src) {
2080   bailout(kSimd, "i64x2_bitmask");
2081 }
2082 
emit_i64x2_sconvert_i32x4_low(LiftoffRegister dst,LiftoffRegister src)2083 void LiftoffAssembler::emit_i64x2_sconvert_i32x4_low(LiftoffRegister dst,
2084                                                      LiftoffRegister src) {
2085   bailout(kSimd, "i64x2_sconvert_i32x4_low");
2086 }
2087 
emit_i64x2_sconvert_i32x4_high(LiftoffRegister dst,LiftoffRegister src)2088 void LiftoffAssembler::emit_i64x2_sconvert_i32x4_high(LiftoffRegister dst,
2089                                                       LiftoffRegister src) {
2090   bailout(kSimd, "i64x2_sconvert_i32x4_high");
2091 }
2092 
emit_i64x2_uconvert_i32x4_low(LiftoffRegister dst,LiftoffRegister src)2093 void LiftoffAssembler::emit_i64x2_uconvert_i32x4_low(LiftoffRegister dst,
2094                                                      LiftoffRegister src) {
2095   bailout(kSimd, "i64x2_uconvert_i32x4_low");
2096 }
2097 
emit_i64x2_uconvert_i32x4_high(LiftoffRegister dst,LiftoffRegister src)2098 void LiftoffAssembler::emit_i64x2_uconvert_i32x4_high(LiftoffRegister dst,
2099                                                       LiftoffRegister src) {
2100   bailout(kSimd, "i64x2_uconvert_i32x4_high");
2101 }
2102 
emit_i64x2_extmul_high_i32x4_u(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2)2103 void LiftoffAssembler::emit_i64x2_extmul_high_i32x4_u(LiftoffRegister dst,
2104                                                       LiftoffRegister src1,
2105                                                       LiftoffRegister src2) {
2106   bailout(kSimd, "i64x2_extmul_high_i32x4_u unsupported");
2107 }
2108 
emit_i32x4_splat(LiftoffRegister dst,LiftoffRegister src)2109 void LiftoffAssembler::emit_i32x4_splat(LiftoffRegister dst,
2110                                         LiftoffRegister src) {
2111   bailout(kUnsupportedArchitecture, "emit_i32x4_splat");
2112 }
2113 
emit_i32x4_extract_lane(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2114 void LiftoffAssembler::emit_i32x4_extract_lane(LiftoffRegister dst,
2115                                                LiftoffRegister lhs,
2116                                                uint8_t imm_lane_idx) {
2117   bailout(kUnsupportedArchitecture, "emit_i32x4extractlane");
2118 }
2119 
emit_i32x4_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2120 void LiftoffAssembler::emit_i32x4_replace_lane(LiftoffRegister dst,
2121                                                LiftoffRegister src1,
2122                                                LiftoffRegister src2,
2123                                                uint8_t imm_lane_idx) {
2124   bailout(kUnsupportedArchitecture, "emit_i32x4replacelane");
2125 }
2126 
emit_i32x4_neg(LiftoffRegister dst,LiftoffRegister src)2127 void LiftoffAssembler::emit_i32x4_neg(LiftoffRegister dst,
2128                                       LiftoffRegister src) {
2129   bailout(kUnsupportedArchitecture, "emit_i32x4neg");
2130 }
2131 
emit_i32x4_alltrue(LiftoffRegister dst,LiftoffRegister src)2132 void LiftoffAssembler::emit_i32x4_alltrue(LiftoffRegister dst,
2133                                           LiftoffRegister src) {
2134   bailout(kSimd, "i32x4_alltrue");
2135 }
2136 
emit_i32x4_bitmask(LiftoffRegister dst,LiftoffRegister src)2137 void LiftoffAssembler::emit_i32x4_bitmask(LiftoffRegister dst,
2138                                           LiftoffRegister src) {
2139   bailout(kSimd, "i32x4_bitmask");
2140 }
2141 
emit_i32x4_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2142 void LiftoffAssembler::emit_i32x4_shl(LiftoffRegister dst, LiftoffRegister lhs,
2143                                       LiftoffRegister rhs) {
2144   bailout(kSimd, "i32x4_shl");
2145 }
2146 
emit_i32x4_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2147 void LiftoffAssembler::emit_i32x4_shli(LiftoffRegister dst, LiftoffRegister lhs,
2148                                        int32_t rhs) {
2149   bailout(kSimd, "i32x4_shli");
2150 }
2151 
emit_i32x4_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2152 void LiftoffAssembler::emit_i32x4_shr_s(LiftoffRegister dst,
2153                                         LiftoffRegister lhs,
2154                                         LiftoffRegister rhs) {
2155   bailout(kSimd, "i32x4_shr_s");
2156 }
2157 
emit_i32x4_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2158 void LiftoffAssembler::emit_i32x4_shri_s(LiftoffRegister dst,
2159                                          LiftoffRegister lhs, int32_t rhs) {
2160   bailout(kSimd, "i32x4_shri_s");
2161 }
2162 
emit_i32x4_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2163 void LiftoffAssembler::emit_i32x4_shr_u(LiftoffRegister dst,
2164                                         LiftoffRegister lhs,
2165                                         LiftoffRegister rhs) {
2166   bailout(kSimd, "i32x4_shr_u");
2167 }
2168 
emit_i32x4_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2169 void LiftoffAssembler::emit_i32x4_shri_u(LiftoffRegister dst,
2170                                          LiftoffRegister lhs, int32_t rhs) {
2171   bailout(kSimd, "i32x4_shri_u");
2172 }
2173 
emit_i32x4_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2174 void LiftoffAssembler::emit_i32x4_add(LiftoffRegister dst, LiftoffRegister lhs,
2175                                       LiftoffRegister rhs) {
2176   bailout(kUnsupportedArchitecture, "emit_i32x4add");
2177 }
2178 
emit_i32x4_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2179 void LiftoffAssembler::emit_i32x4_sub(LiftoffRegister dst, LiftoffRegister lhs,
2180                                       LiftoffRegister rhs) {
2181   bailout(kUnsupportedArchitecture, "emit_i32x4sub");
2182 }
2183 
emit_i32x4_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2184 void LiftoffAssembler::emit_i32x4_mul(LiftoffRegister dst, LiftoffRegister lhs,
2185                                       LiftoffRegister rhs) {
2186   bailout(kUnsupportedArchitecture, "emit_i32x4mul");
2187 }
2188 
emit_i32x4_min_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2189 void LiftoffAssembler::emit_i32x4_min_s(LiftoffRegister dst,
2190                                         LiftoffRegister lhs,
2191                                         LiftoffRegister rhs) {
2192   bailout(kUnsupportedArchitecture, "emit_i32x4_min_s");
2193 }
2194 
emit_i32x4_min_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2195 void LiftoffAssembler::emit_i32x4_min_u(LiftoffRegister dst,
2196                                         LiftoffRegister lhs,
2197                                         LiftoffRegister rhs) {
2198   bailout(kUnsupportedArchitecture, "emit_i32x4_min_u");
2199 }
2200 
emit_i32x4_max_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2201 void LiftoffAssembler::emit_i32x4_max_s(LiftoffRegister dst,
2202                                         LiftoffRegister lhs,
2203                                         LiftoffRegister rhs) {
2204   bailout(kUnsupportedArchitecture, "emit_i32x4_max_s");
2205 }
2206 
emit_i32x4_max_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2207 void LiftoffAssembler::emit_i32x4_max_u(LiftoffRegister dst,
2208                                         LiftoffRegister lhs,
2209                                         LiftoffRegister rhs) {
2210   bailout(kUnsupportedArchitecture, "emit_i32x4_max_u");
2211 }
2212 
emit_i32x4_dot_i16x8_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2213 void LiftoffAssembler::emit_i32x4_dot_i16x8_s(LiftoffRegister dst,
2214                                               LiftoffRegister lhs,
2215                                               LiftoffRegister rhs) {
2216   bailout(kSimd, "i32x4_dot_i16x8_s");
2217 }
2218 
emit_i32x4_extadd_pairwise_i16x8_s(LiftoffRegister dst,LiftoffRegister src)2219 void LiftoffAssembler::emit_i32x4_extadd_pairwise_i16x8_s(LiftoffRegister dst,
2220                                                           LiftoffRegister src) {
2221   bailout(kSimd, "i32x4.extadd_pairwise_i16x8_s");
2222 }
2223 
emit_i32x4_extadd_pairwise_i16x8_u(LiftoffRegister dst,LiftoffRegister src)2224 void LiftoffAssembler::emit_i32x4_extadd_pairwise_i16x8_u(LiftoffRegister dst,
2225                                                           LiftoffRegister src) {
2226   bailout(kSimd, "i32x4.extadd_pairwise_i16x8_u");
2227 }
2228 
emit_i32x4_extmul_low_i16x8_s(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2)2229 void LiftoffAssembler::emit_i32x4_extmul_low_i16x8_s(LiftoffRegister dst,
2230                                                      LiftoffRegister src1,
2231                                                      LiftoffRegister src2) {
2232   bailout(kSimd, "i32x4_extmul_low_i16x8_s unsupported");
2233 }
2234 
emit_i32x4_extmul_low_i16x8_u(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2)2235 void LiftoffAssembler::emit_i32x4_extmul_low_i16x8_u(LiftoffRegister dst,
2236                                                      LiftoffRegister src1,
2237                                                      LiftoffRegister src2) {
2238   bailout(kSimd, "i32x4_extmul_low_i16x8_u unsupported");
2239 }
2240 
emit_i32x4_extmul_high_i16x8_s(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2)2241 void LiftoffAssembler::emit_i32x4_extmul_high_i16x8_s(LiftoffRegister dst,
2242                                                       LiftoffRegister src1,
2243                                                       LiftoffRegister src2) {
2244   bailout(kSimd, "i32x4_extmul_high_i16x8_s unsupported");
2245 }
2246 
emit_i32x4_extmul_high_i16x8_u(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2)2247 void LiftoffAssembler::emit_i32x4_extmul_high_i16x8_u(LiftoffRegister dst,
2248                                                       LiftoffRegister src1,
2249                                                       LiftoffRegister src2) {
2250   bailout(kSimd, "i32x4_extmul_high_i16x8_u unsupported");
2251 }
2252 
emit_i16x8_splat(LiftoffRegister dst,LiftoffRegister src)2253 void LiftoffAssembler::emit_i16x8_splat(LiftoffRegister dst,
2254                                         LiftoffRegister src) {
2255   bailout(kUnsupportedArchitecture, "emit_i16x8splat");
2256 }
2257 
emit_i16x8_neg(LiftoffRegister dst,LiftoffRegister src)2258 void LiftoffAssembler::emit_i16x8_neg(LiftoffRegister dst,
2259                                       LiftoffRegister src) {
2260   bailout(kUnsupportedArchitecture, "emit_i16x8neg");
2261 }
2262 
emit_i16x8_alltrue(LiftoffRegister dst,LiftoffRegister src)2263 void LiftoffAssembler::emit_i16x8_alltrue(LiftoffRegister dst,
2264                                           LiftoffRegister src) {
2265   bailout(kSimd, "i16x8_alltrue");
2266 }
2267 
emit_i16x8_bitmask(LiftoffRegister dst,LiftoffRegister src)2268 void LiftoffAssembler::emit_i16x8_bitmask(LiftoffRegister dst,
2269                                           LiftoffRegister src) {
2270   bailout(kSimd, "i16x8_bitmask");
2271 }
2272 
emit_i16x8_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2273 void LiftoffAssembler::emit_i16x8_shl(LiftoffRegister dst, LiftoffRegister lhs,
2274                                       LiftoffRegister rhs) {
2275   bailout(kSimd, "i16x8_shl");
2276 }
2277 
emit_i16x8_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2278 void LiftoffAssembler::emit_i16x8_shli(LiftoffRegister dst, LiftoffRegister lhs,
2279                                        int32_t rhs) {
2280   bailout(kSimd, "i16x8_shli");
2281 }
2282 
emit_i16x8_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2283 void LiftoffAssembler::emit_i16x8_shr_s(LiftoffRegister dst,
2284                                         LiftoffRegister lhs,
2285                                         LiftoffRegister rhs) {
2286   bailout(kSimd, "i16x8_shr_s");
2287 }
2288 
emit_i16x8_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2289 void LiftoffAssembler::emit_i16x8_shri_s(LiftoffRegister dst,
2290                                          LiftoffRegister lhs, int32_t rhs) {
2291   bailout(kSimd, "i16x8_shri_s");
2292 }
2293 
emit_i16x8_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2294 void LiftoffAssembler::emit_i16x8_shr_u(LiftoffRegister dst,
2295                                         LiftoffRegister lhs,
2296                                         LiftoffRegister rhs) {
2297   bailout(kSimd, "i16x8_shr_u");
2298 }
2299 
emit_i16x8_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2300 void LiftoffAssembler::emit_i16x8_shri_u(LiftoffRegister dst,
2301                                          LiftoffRegister lhs, int32_t rhs) {
2302   bailout(kSimd, "i16x8_shri_u");
2303 }
2304 
emit_i16x8_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2305 void LiftoffAssembler::emit_i16x8_add(LiftoffRegister dst, LiftoffRegister lhs,
2306                                       LiftoffRegister rhs) {
2307   bailout(kUnsupportedArchitecture, "emit_i16x8add");
2308 }
2309 
emit_i16x8_add_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2310 void LiftoffAssembler::emit_i16x8_add_sat_s(LiftoffRegister dst,
2311                                             LiftoffRegister lhs,
2312                                             LiftoffRegister rhs) {
2313   bailout(kUnsupportedArchitecture, "emit_i16x8addsaturate_s");
2314 }
2315 
emit_i16x8_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2316 void LiftoffAssembler::emit_i16x8_sub(LiftoffRegister dst, LiftoffRegister lhs,
2317                                       LiftoffRegister rhs) {
2318   bailout(kUnsupportedArchitecture, "emit_i16x8sub");
2319 }
2320 
emit_i16x8_sub_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2321 void LiftoffAssembler::emit_i16x8_sub_sat_s(LiftoffRegister dst,
2322                                             LiftoffRegister lhs,
2323                                             LiftoffRegister rhs) {
2324   bailout(kUnsupportedArchitecture, "emit_i16x8subsaturate_s");
2325 }
2326 
emit_i16x8_sub_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2327 void LiftoffAssembler::emit_i16x8_sub_sat_u(LiftoffRegister dst,
2328                                             LiftoffRegister lhs,
2329                                             LiftoffRegister rhs) {
2330   bailout(kUnsupportedArchitecture, "emit_i16x8subsaturate_u");
2331 }
2332 
emit_i16x8_mul(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2333 void LiftoffAssembler::emit_i16x8_mul(LiftoffRegister dst, LiftoffRegister lhs,
2334                                       LiftoffRegister rhs) {
2335   bailout(kUnsupportedArchitecture, "emit_i16x8mul");
2336 }
2337 
emit_i16x8_add_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2338 void LiftoffAssembler::emit_i16x8_add_sat_u(LiftoffRegister dst,
2339                                             LiftoffRegister lhs,
2340                                             LiftoffRegister rhs) {
2341   bailout(kUnsupportedArchitecture, "emit_i16x8addsaturate_u");
2342 }
2343 
emit_i16x8_min_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2344 void LiftoffAssembler::emit_i16x8_min_s(LiftoffRegister dst,
2345                                         LiftoffRegister lhs,
2346                                         LiftoffRegister rhs) {
2347   bailout(kUnsupportedArchitecture, "emit_i16x8_min_s");
2348 }
2349 
emit_i16x8_min_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2350 void LiftoffAssembler::emit_i16x8_min_u(LiftoffRegister dst,
2351                                         LiftoffRegister lhs,
2352                                         LiftoffRegister rhs) {
2353   bailout(kUnsupportedArchitecture, "emit_i16x8_min_u");
2354 }
2355 
emit_i16x8_max_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2356 void LiftoffAssembler::emit_i16x8_max_s(LiftoffRegister dst,
2357                                         LiftoffRegister lhs,
2358                                         LiftoffRegister rhs) {
2359   bailout(kUnsupportedArchitecture, "emit_i16x8_max_s");
2360 }
2361 
emit_i16x8_max_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2362 void LiftoffAssembler::emit_i16x8_max_u(LiftoffRegister dst,
2363                                         LiftoffRegister lhs,
2364                                         LiftoffRegister rhs) {
2365   bailout(kUnsupportedArchitecture, "emit_i16x8_max_u");
2366 }
2367 
emit_i16x8_extract_lane_u(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2368 void LiftoffAssembler::emit_i16x8_extract_lane_u(LiftoffRegister dst,
2369                                                  LiftoffRegister lhs,
2370                                                  uint8_t imm_lane_idx) {
2371   bailout(kUnsupportedArchitecture, "emit_i16x8extractlane_u");
2372 }
2373 
emit_i16x8_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2374 void LiftoffAssembler::emit_i16x8_replace_lane(LiftoffRegister dst,
2375                                                LiftoffRegister src1,
2376                                                LiftoffRegister src2,
2377                                                uint8_t imm_lane_idx) {
2378   bailout(kUnsupportedArchitecture, "emit_i16x8replacelane");
2379 }
2380 
emit_i16x8_extadd_pairwise_i8x16_s(LiftoffRegister dst,LiftoffRegister src)2381 void LiftoffAssembler::emit_i16x8_extadd_pairwise_i8x16_s(LiftoffRegister dst,
2382                                                           LiftoffRegister src) {
2383   bailout(kSimd, "i16x8.extadd_pairwise_i8x16_s");
2384 }
2385 
emit_i16x8_extadd_pairwise_i8x16_u(LiftoffRegister dst,LiftoffRegister src)2386 void LiftoffAssembler::emit_i16x8_extadd_pairwise_i8x16_u(LiftoffRegister dst,
2387                                                           LiftoffRegister src) {
2388   bailout(kSimd, "i16x8.extadd_pairwise_i8x16_u");
2389 }
2390 
emit_i16x8_extract_lane_s(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2391 void LiftoffAssembler::emit_i16x8_extract_lane_s(LiftoffRegister dst,
2392                                                  LiftoffRegister lhs,
2393                                                  uint8_t imm_lane_idx) {
2394   bailout(kUnsupportedArchitecture, "emit_i16x8extractlane_s");
2395 }
2396 
emit_i16x8_extmul_low_i8x16_s(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2)2397 void LiftoffAssembler::emit_i16x8_extmul_low_i8x16_s(LiftoffRegister dst,
2398                                                      LiftoffRegister src1,
2399                                                      LiftoffRegister src2) {
2400   bailout(kSimd, "i16x8.extmul_low_i8x16_s unsupported");
2401 }
2402 
emit_i16x8_extmul_low_i8x16_u(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2)2403 void LiftoffAssembler::emit_i16x8_extmul_low_i8x16_u(LiftoffRegister dst,
2404                                                      LiftoffRegister src1,
2405                                                      LiftoffRegister src2) {
2406   bailout(kSimd, "i16x8.extmul_low_i8x16_u unsupported");
2407 }
2408 
emit_i16x8_extmul_high_i8x16_s(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2)2409 void LiftoffAssembler::emit_i16x8_extmul_high_i8x16_s(LiftoffRegister dst,
2410                                                       LiftoffRegister src1,
2411                                                       LiftoffRegister src2) {
2412   bailout(kSimd, "i16x8.extmul_high_i8x16_s unsupported");
2413 }
2414 
emit_i16x8_q15mulr_sat_s(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2)2415 void LiftoffAssembler::emit_i16x8_q15mulr_sat_s(LiftoffRegister dst,
2416                                                 LiftoffRegister src1,
2417                                                 LiftoffRegister src2) {
2418   bailout(kSimd, "i16x8_q15mulr_sat_s");
2419 }
2420 
emit_i16x8_extmul_high_i8x16_u(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2)2421 void LiftoffAssembler::emit_i16x8_extmul_high_i8x16_u(LiftoffRegister dst,
2422                                                       LiftoffRegister src1,
2423                                                       LiftoffRegister src2) {
2424   bailout(kSimd, "i16x8_extmul_high_i8x16_u unsupported");
2425 }
2426 
emit_i8x16_shuffle(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs,const uint8_t shuffle[16],bool is_swizzle)2427 void LiftoffAssembler::emit_i8x16_shuffle(LiftoffRegister dst,
2428                                           LiftoffRegister lhs,
2429                                           LiftoffRegister rhs,
2430                                           const uint8_t shuffle[16],
2431                                           bool is_swizzle) {
2432   bailout(kSimd, "i8x16_shuffle");
2433 }
2434 
emit_i8x16_popcnt(LiftoffRegister dst,LiftoffRegister src)2435 void LiftoffAssembler::emit_i8x16_popcnt(LiftoffRegister dst,
2436                                          LiftoffRegister src) {
2437   bailout(kSimd, "i8x16.popcnt");
2438 }
2439 
emit_i8x16_splat(LiftoffRegister dst,LiftoffRegister src)2440 void LiftoffAssembler::emit_i8x16_splat(LiftoffRegister dst,
2441                                         LiftoffRegister src) {
2442   bailout(kUnsupportedArchitecture, "emit_i8x16splat");
2443 }
2444 
emit_i8x16_extract_lane_u(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2445 void LiftoffAssembler::emit_i8x16_extract_lane_u(LiftoffRegister dst,
2446                                                  LiftoffRegister lhs,
2447                                                  uint8_t imm_lane_idx) {
2448   bailout(kUnsupportedArchitecture, "emit_i8x16extractlane_u");
2449 }
2450 
emit_i8x16_replace_lane(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,uint8_t imm_lane_idx)2451 void LiftoffAssembler::emit_i8x16_replace_lane(LiftoffRegister dst,
2452                                                LiftoffRegister src1,
2453                                                LiftoffRegister src2,
2454                                                uint8_t imm_lane_idx) {
2455   bailout(kUnsupportedArchitecture, "emit_i8x16replacelane");
2456 }
2457 
emit_i8x16_neg(LiftoffRegister dst,LiftoffRegister src)2458 void LiftoffAssembler::emit_i8x16_neg(LiftoffRegister dst,
2459                                       LiftoffRegister src) {
2460   bailout(kUnsupportedArchitecture, "emit_i8x16neg");
2461 }
2462 
emit_v128_anytrue(LiftoffRegister dst,LiftoffRegister src)2463 void LiftoffAssembler::emit_v128_anytrue(LiftoffRegister dst,
2464                                          LiftoffRegister src) {
2465   bailout(kSimd, "v8x16_anytrue");
2466 }
2467 
emit_i8x16_alltrue(LiftoffRegister dst,LiftoffRegister src)2468 void LiftoffAssembler::emit_i8x16_alltrue(LiftoffRegister dst,
2469                                           LiftoffRegister src) {
2470   bailout(kSimd, "i8x16_alltrue");
2471 }
2472 
emit_i8x16_bitmask(LiftoffRegister dst,LiftoffRegister src)2473 void LiftoffAssembler::emit_i8x16_bitmask(LiftoffRegister dst,
2474                                           LiftoffRegister src) {
2475   bailout(kSimd, "i8x16_bitmask");
2476 }
2477 
emit_i8x16_shl(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2478 void LiftoffAssembler::emit_i8x16_shl(LiftoffRegister dst, LiftoffRegister lhs,
2479                                       LiftoffRegister rhs) {
2480   bailout(kSimd, "i8x16_shl");
2481 }
2482 
emit_i8x16_shli(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2483 void LiftoffAssembler::emit_i8x16_shli(LiftoffRegister dst, LiftoffRegister lhs,
2484                                        int32_t rhs) {
2485   bailout(kSimd, "i8x16_shli");
2486 }
2487 
emit_i8x16_shr_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2488 void LiftoffAssembler::emit_i8x16_shr_s(LiftoffRegister dst,
2489                                         LiftoffRegister lhs,
2490                                         LiftoffRegister rhs) {
2491   bailout(kSimd, "i8x16_shr_s");
2492 }
2493 
emit_i8x16_shri_s(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2494 void LiftoffAssembler::emit_i8x16_shri_s(LiftoffRegister dst,
2495                                          LiftoffRegister lhs, int32_t rhs) {
2496   bailout(kSimd, "i8x16_shri_s");
2497 }
2498 
emit_i8x16_shr_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2499 void LiftoffAssembler::emit_i8x16_shr_u(LiftoffRegister dst,
2500                                         LiftoffRegister lhs,
2501                                         LiftoffRegister rhs) {
2502   bailout(kSimd, "i8x16_shr_u");
2503 }
2504 
emit_i8x16_shri_u(LiftoffRegister dst,LiftoffRegister lhs,int32_t rhs)2505 void LiftoffAssembler::emit_i8x16_shri_u(LiftoffRegister dst,
2506                                          LiftoffRegister lhs, int32_t rhs) {
2507   bailout(kSimd, "i8x16_shri_u");
2508 }
2509 
emit_i8x16_extract_lane_s(LiftoffRegister dst,LiftoffRegister lhs,uint8_t imm_lane_idx)2510 void LiftoffAssembler::emit_i8x16_extract_lane_s(LiftoffRegister dst,
2511                                                  LiftoffRegister lhs,
2512                                                  uint8_t imm_lane_idx) {
2513   bailout(kUnsupportedArchitecture, "emit_i8x16extractlane_s");
2514 }
2515 
emit_i8x16_add(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2516 void LiftoffAssembler::emit_i8x16_add(LiftoffRegister dst, LiftoffRegister lhs,
2517                                       LiftoffRegister rhs) {
2518   bailout(kUnsupportedArchitecture, "emit_i8x16add");
2519 }
2520 
emit_i8x16_add_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2521 void LiftoffAssembler::emit_i8x16_add_sat_s(LiftoffRegister dst,
2522                                             LiftoffRegister lhs,
2523                                             LiftoffRegister rhs) {
2524   bailout(kUnsupportedArchitecture, "emit_i8x16addsaturate_s");
2525 }
2526 
emit_i8x16_min_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2527 void LiftoffAssembler::emit_i8x16_min_s(LiftoffRegister dst,
2528                                         LiftoffRegister lhs,
2529                                         LiftoffRegister rhs) {
2530   bailout(kUnsupportedArchitecture, "emit_i8x16_min_s");
2531 }
2532 
emit_i8x16_min_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2533 void LiftoffAssembler::emit_i8x16_min_u(LiftoffRegister dst,
2534                                         LiftoffRegister lhs,
2535                                         LiftoffRegister rhs) {
2536   bailout(kUnsupportedArchitecture, "emit_i8x16_min_u");
2537 }
2538 
emit_i8x16_max_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2539 void LiftoffAssembler::emit_i8x16_max_s(LiftoffRegister dst,
2540                                         LiftoffRegister lhs,
2541                                         LiftoffRegister rhs) {
2542   bailout(kUnsupportedArchitecture, "emit_i8x16_max_s");
2543 }
2544 
emit_i8x16_max_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2545 void LiftoffAssembler::emit_i8x16_max_u(LiftoffRegister dst,
2546                                         LiftoffRegister lhs,
2547                                         LiftoffRegister rhs) {
2548   bailout(kUnsupportedArchitecture, "emit_i8x16_max_u");
2549 }
2550 
emit_i8x16_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2551 void LiftoffAssembler::emit_i8x16_eq(LiftoffRegister dst, LiftoffRegister lhs,
2552                                      LiftoffRegister rhs) {
2553   bailout(kUnsupportedArchitecture, "emit_i8x16_eq");
2554 }
2555 
emit_i8x16_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2556 void LiftoffAssembler::emit_i8x16_ne(LiftoffRegister dst, LiftoffRegister lhs,
2557                                      LiftoffRegister rhs) {
2558   bailout(kUnsupportedArchitecture, "emit_i8x16_ne");
2559 }
2560 
emit_i8x16_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2561 void LiftoffAssembler::emit_i8x16_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
2562                                        LiftoffRegister rhs) {
2563   bailout(kUnsupportedArchitecture, "emit_i8x16gt_s");
2564 }
2565 
emit_i8x16_gt_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2566 void LiftoffAssembler::emit_i8x16_gt_u(LiftoffRegister dst, LiftoffRegister lhs,
2567                                        LiftoffRegister rhs) {
2568   bailout(kUnsupportedArchitecture, "emit_i8x16gt_u");
2569 }
2570 
emit_i8x16_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2571 void LiftoffAssembler::emit_i8x16_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
2572                                        LiftoffRegister rhs) {
2573   bailout(kUnsupportedArchitecture, "emit_i8x16ge_s");
2574 }
2575 
emit_i8x16_ge_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2576 void LiftoffAssembler::emit_i8x16_ge_u(LiftoffRegister dst, LiftoffRegister lhs,
2577                                        LiftoffRegister rhs) {
2578   bailout(kUnsupportedArchitecture, "emit_i8x16ge_u");
2579 }
2580 
emit_i16x8_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2581 void LiftoffAssembler::emit_i16x8_eq(LiftoffRegister dst, LiftoffRegister lhs,
2582                                      LiftoffRegister rhs) {
2583   bailout(kUnsupportedArchitecture, "emit_i16x8_eq");
2584 }
2585 
emit_i16x8_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2586 void LiftoffAssembler::emit_i16x8_ne(LiftoffRegister dst, LiftoffRegister lhs,
2587                                      LiftoffRegister rhs) {
2588   bailout(kUnsupportedArchitecture, "emit_i16x8_ne");
2589 }
2590 
emit_i16x8_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2591 void LiftoffAssembler::emit_i16x8_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
2592                                        LiftoffRegister rhs) {
2593   bailout(kUnsupportedArchitecture, "emit_i16x8gt_s");
2594 }
2595 
emit_i16x8_gt_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2596 void LiftoffAssembler::emit_i16x8_gt_u(LiftoffRegister dst, LiftoffRegister lhs,
2597                                        LiftoffRegister rhs) {
2598   bailout(kUnsupportedArchitecture, "emit_i16x8gt_u");
2599 }
2600 
emit_i16x8_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2601 void LiftoffAssembler::emit_i16x8_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
2602                                        LiftoffRegister rhs) {
2603   bailout(kUnsupportedArchitecture, "emit_i16x8ge_s");
2604 }
2605 
emit_i16x8_ge_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2606 void LiftoffAssembler::emit_i16x8_ge_u(LiftoffRegister dst, LiftoffRegister lhs,
2607                                        LiftoffRegister rhs) {
2608   bailout(kUnsupportedArchitecture, "emit_i16x8ge_u");
2609 }
2610 
emit_i32x4_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2611 void LiftoffAssembler::emit_i32x4_eq(LiftoffRegister dst, LiftoffRegister lhs,
2612                                      LiftoffRegister rhs) {
2613   bailout(kUnsupportedArchitecture, "emit_i32x4_eq");
2614 }
2615 
emit_i32x4_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2616 void LiftoffAssembler::emit_i32x4_ne(LiftoffRegister dst, LiftoffRegister lhs,
2617                                      LiftoffRegister rhs) {
2618   bailout(kUnsupportedArchitecture, "emit_i32x4_ne");
2619 }
2620 
emit_i32x4_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2621 void LiftoffAssembler::emit_i32x4_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
2622                                        LiftoffRegister rhs) {
2623   bailout(kUnsupportedArchitecture, "emit_i32x4gt_s");
2624 }
2625 
emit_i32x4_gt_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2626 void LiftoffAssembler::emit_i32x4_gt_u(LiftoffRegister dst, LiftoffRegister lhs,
2627                                        LiftoffRegister rhs) {
2628   bailout(kUnsupportedArchitecture, "emit_32x4gt_u");
2629 }
2630 
emit_i32x4_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2631 void LiftoffAssembler::emit_i32x4_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
2632                                        LiftoffRegister rhs) {
2633   bailout(kUnsupportedArchitecture, "emit_i32x4ge_s");
2634 }
2635 
emit_i32x4_ge_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2636 void LiftoffAssembler::emit_i32x4_ge_u(LiftoffRegister dst, LiftoffRegister lhs,
2637                                        LiftoffRegister rhs) {
2638   bailout(kUnsupportedArchitecture, "emit_i32x4ge_u");
2639 }
2640 
emit_i64x2_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2641 void LiftoffAssembler::emit_i64x2_eq(LiftoffRegister dst, LiftoffRegister lhs,
2642                                      LiftoffRegister rhs) {
2643   bailout(kSimd, "i64x2.eq");
2644 }
2645 
emit_i64x2_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2646 void LiftoffAssembler::emit_i64x2_ne(LiftoffRegister dst, LiftoffRegister lhs,
2647                                      LiftoffRegister rhs) {
2648   bailout(kSimd, "i64x2_ne");
2649 }
2650 
emit_i64x2_gt_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2651 void LiftoffAssembler::emit_i64x2_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
2652                                        LiftoffRegister rhs) {
2653   bailout(kSimd, "i64x2.gt_s");
2654 }
2655 
emit_i64x2_ge_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2656 void LiftoffAssembler::emit_i64x2_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
2657                                        LiftoffRegister rhs) {
2658   bailout(kSimd, "i64x2.ge_s");
2659 }
2660 
emit_f32x4_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2661 void LiftoffAssembler::emit_f32x4_eq(LiftoffRegister dst, LiftoffRegister lhs,
2662                                      LiftoffRegister rhs) {
2663   bailout(kUnsupportedArchitecture, "emit_f32x4_eq");
2664 }
2665 
emit_f32x4_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2666 void LiftoffAssembler::emit_f32x4_ne(LiftoffRegister dst, LiftoffRegister lhs,
2667                                      LiftoffRegister rhs) {
2668   bailout(kUnsupportedArchitecture, "emit_f32x4_ne");
2669 }
2670 
emit_f32x4_lt(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2671 void LiftoffAssembler::emit_f32x4_lt(LiftoffRegister dst, LiftoffRegister lhs,
2672                                      LiftoffRegister rhs) {
2673   bailout(kUnsupportedArchitecture, "emit_f32x4_lt");
2674 }
2675 
emit_f32x4_le(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2676 void LiftoffAssembler::emit_f32x4_le(LiftoffRegister dst, LiftoffRegister lhs,
2677                                      LiftoffRegister rhs) {
2678   bailout(kUnsupportedArchitecture, "emit_f32x4_le");
2679 }
2680 
emit_f64x2_eq(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2681 void LiftoffAssembler::emit_f64x2_eq(LiftoffRegister dst, LiftoffRegister lhs,
2682                                      LiftoffRegister rhs) {
2683   bailout(kUnsupportedArchitecture, "emit_f64x2_eq");
2684 }
2685 
emit_f64x2_ne(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2686 void LiftoffAssembler::emit_f64x2_ne(LiftoffRegister dst, LiftoffRegister lhs,
2687                                      LiftoffRegister rhs) {
2688   bailout(kUnsupportedArchitecture, "emit_f64x2_ne");
2689 }
2690 
emit_f64x2_lt(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2691 void LiftoffAssembler::emit_f64x2_lt(LiftoffRegister dst, LiftoffRegister lhs,
2692                                      LiftoffRegister rhs) {
2693   bailout(kUnsupportedArchitecture, "emit_f64x2_lt");
2694 }
2695 
emit_f64x2_le(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2696 void LiftoffAssembler::emit_f64x2_le(LiftoffRegister dst, LiftoffRegister lhs,
2697                                      LiftoffRegister rhs) {
2698   bailout(kUnsupportedArchitecture, "emit_f64x2_le");
2699 }
2700 
emit_s128_const(LiftoffRegister dst,const uint8_t imms[16])2701 void LiftoffAssembler::emit_s128_const(LiftoffRegister dst,
2702                                        const uint8_t imms[16]) {
2703   bailout(kUnsupportedArchitecture, "emit_s128_const");
2704 }
2705 
emit_s128_not(LiftoffRegister dst,LiftoffRegister src)2706 void LiftoffAssembler::emit_s128_not(LiftoffRegister dst, LiftoffRegister src) {
2707   bailout(kUnsupportedArchitecture, "emit_s128_not");
2708 }
2709 
emit_s128_and(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2710 void LiftoffAssembler::emit_s128_and(LiftoffRegister dst, LiftoffRegister lhs,
2711                                      LiftoffRegister rhs) {
2712   bailout(kUnsupportedArchitecture, "emit_s128_and");
2713 }
2714 
emit_s128_or(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2715 void LiftoffAssembler::emit_s128_or(LiftoffRegister dst, LiftoffRegister lhs,
2716                                     LiftoffRegister rhs) {
2717   bailout(kUnsupportedArchitecture, "emit_s128_or");
2718 }
2719 
emit_s128_xor(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2720 void LiftoffAssembler::emit_s128_xor(LiftoffRegister dst, LiftoffRegister lhs,
2721                                      LiftoffRegister rhs) {
2722   bailout(kUnsupportedArchitecture, "emit_s128_xor");
2723 }
2724 
emit_s128_select(LiftoffRegister dst,LiftoffRegister src1,LiftoffRegister src2,LiftoffRegister mask)2725 void LiftoffAssembler::emit_s128_select(LiftoffRegister dst,
2726                                         LiftoffRegister src1,
2727                                         LiftoffRegister src2,
2728                                         LiftoffRegister mask) {
2729   bailout(kUnsupportedArchitecture, "emit_s128select");
2730 }
2731 
emit_i32x4_sconvert_f32x4(LiftoffRegister dst,LiftoffRegister src)2732 void LiftoffAssembler::emit_i32x4_sconvert_f32x4(LiftoffRegister dst,
2733                                                  LiftoffRegister src) {
2734   bailout(kSimd, "i32x4_sconvert_f32x4");
2735 }
2736 
emit_i32x4_uconvert_f32x4(LiftoffRegister dst,LiftoffRegister src)2737 void LiftoffAssembler::emit_i32x4_uconvert_f32x4(LiftoffRegister dst,
2738                                                  LiftoffRegister src) {
2739   bailout(kSimd, "i32x4_uconvert_f32x4");
2740 }
2741 
emit_f32x4_sconvert_i32x4(LiftoffRegister dst,LiftoffRegister src)2742 void LiftoffAssembler::emit_f32x4_sconvert_i32x4(LiftoffRegister dst,
2743                                                  LiftoffRegister src) {
2744   bailout(kSimd, "f32x4_sconvert_i32x4");
2745 }
2746 
emit_f32x4_uconvert_i32x4(LiftoffRegister dst,LiftoffRegister src)2747 void LiftoffAssembler::emit_f32x4_uconvert_i32x4(LiftoffRegister dst,
2748                                                  LiftoffRegister src) {
2749   bailout(kSimd, "f32x4_uconvert_i32x4");
2750 }
2751 
emit_f32x4_demote_f64x2_zero(LiftoffRegister dst,LiftoffRegister src)2752 void LiftoffAssembler::emit_f32x4_demote_f64x2_zero(LiftoffRegister dst,
2753                                                     LiftoffRegister src) {
2754   bailout(kSimd, "f32x4.demote_f64x2_zero");
2755 }
2756 
emit_i8x16_sconvert_i16x8(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2757 void LiftoffAssembler::emit_i8x16_sconvert_i16x8(LiftoffRegister dst,
2758                                                  LiftoffRegister lhs,
2759                                                  LiftoffRegister rhs) {
2760   bailout(kUnsupportedArchitecture, "emit_i8x16_sconvert_i16x8");
2761 }
2762 
emit_i8x16_uconvert_i16x8(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2763 void LiftoffAssembler::emit_i8x16_uconvert_i16x8(LiftoffRegister dst,
2764                                                  LiftoffRegister lhs,
2765                                                  LiftoffRegister rhs) {
2766   bailout(kUnsupportedArchitecture, "emit_i8x16_uconvert_i16x8");
2767 }
2768 
emit_i16x8_sconvert_i32x4(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2769 void LiftoffAssembler::emit_i16x8_sconvert_i32x4(LiftoffRegister dst,
2770                                                  LiftoffRegister lhs,
2771                                                  LiftoffRegister rhs) {
2772   bailout(kUnsupportedArchitecture, "emit_i16x8_sconvert_i32x4");
2773 }
2774 
emit_i16x8_uconvert_i32x4(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2775 void LiftoffAssembler::emit_i16x8_uconvert_i32x4(LiftoffRegister dst,
2776                                                  LiftoffRegister lhs,
2777                                                  LiftoffRegister rhs) {
2778   bailout(kUnsupportedArchitecture, "emit_i16x8_uconvert_i32x4");
2779 }
2780 
emit_i16x8_sconvert_i8x16_low(LiftoffRegister dst,LiftoffRegister src)2781 void LiftoffAssembler::emit_i16x8_sconvert_i8x16_low(LiftoffRegister dst,
2782                                                      LiftoffRegister src) {
2783   bailout(kUnsupportedArchitecture, "emit_i16x8_sconvert_i8x16_low");
2784 }
2785 
emit_i16x8_sconvert_i8x16_high(LiftoffRegister dst,LiftoffRegister src)2786 void LiftoffAssembler::emit_i16x8_sconvert_i8x16_high(LiftoffRegister dst,
2787                                                       LiftoffRegister src) {
2788   bailout(kUnsupportedArchitecture, "emit_i16x8_sconvert_i8x16_high");
2789 }
2790 
emit_i16x8_uconvert_i8x16_low(LiftoffRegister dst,LiftoffRegister src)2791 void LiftoffAssembler::emit_i16x8_uconvert_i8x16_low(LiftoffRegister dst,
2792                                                      LiftoffRegister src) {
2793   bailout(kUnsupportedArchitecture, "emit_i16x8_uconvert_i8x16_low");
2794 }
2795 
emit_i16x8_uconvert_i8x16_high(LiftoffRegister dst,LiftoffRegister src)2796 void LiftoffAssembler::emit_i16x8_uconvert_i8x16_high(LiftoffRegister dst,
2797                                                       LiftoffRegister src) {
2798   bailout(kUnsupportedArchitecture, "emit_i16x8_uconvert_i8x16_high");
2799 }
2800 
emit_i32x4_sconvert_i16x8_low(LiftoffRegister dst,LiftoffRegister src)2801 void LiftoffAssembler::emit_i32x4_sconvert_i16x8_low(LiftoffRegister dst,
2802                                                      LiftoffRegister src) {
2803   bailout(kUnsupportedArchitecture, "emit_i32x4_sconvert_i16x8_low");
2804 }
2805 
emit_i32x4_sconvert_i16x8_high(LiftoffRegister dst,LiftoffRegister src)2806 void LiftoffAssembler::emit_i32x4_sconvert_i16x8_high(LiftoffRegister dst,
2807                                                       LiftoffRegister src) {
2808   bailout(kUnsupportedArchitecture, "emit_i32x4_sconvert_i16x8_high");
2809 }
2810 
emit_i32x4_uconvert_i16x8_low(LiftoffRegister dst,LiftoffRegister src)2811 void LiftoffAssembler::emit_i32x4_uconvert_i16x8_low(LiftoffRegister dst,
2812                                                      LiftoffRegister src) {
2813   bailout(kUnsupportedArchitecture, "emit_i32x4_uconvert_i16x8_low");
2814 }
2815 
emit_i32x4_uconvert_i16x8_high(LiftoffRegister dst,LiftoffRegister src)2816 void LiftoffAssembler::emit_i32x4_uconvert_i16x8_high(LiftoffRegister dst,
2817                                                       LiftoffRegister src) {
2818   bailout(kUnsupportedArchitecture, "emit_i32x4_uconvert_i16x8_high");
2819 }
2820 
emit_i32x4_trunc_sat_f64x2_s_zero(LiftoffRegister dst,LiftoffRegister src)2821 void LiftoffAssembler::emit_i32x4_trunc_sat_f64x2_s_zero(LiftoffRegister dst,
2822                                                          LiftoffRegister src) {
2823   bailout(kSimd, "i32x4.trunc_sat_f64x2_s_zero");
2824 }
2825 
emit_i32x4_trunc_sat_f64x2_u_zero(LiftoffRegister dst,LiftoffRegister src)2826 void LiftoffAssembler::emit_i32x4_trunc_sat_f64x2_u_zero(LiftoffRegister dst,
2827                                                          LiftoffRegister src) {
2828   bailout(kSimd, "i32x4.trunc_sat_f64x2_u_zero");
2829 }
2830 
emit_s128_and_not(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2831 void LiftoffAssembler::emit_s128_and_not(LiftoffRegister dst,
2832                                          LiftoffRegister lhs,
2833                                          LiftoffRegister rhs) {
2834   bailout(kUnsupportedArchitecture, "emit_s128_and_not");
2835 }
2836 
emit_i8x16_rounding_average_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2837 void LiftoffAssembler::emit_i8x16_rounding_average_u(LiftoffRegister dst,
2838                                                      LiftoffRegister lhs,
2839                                                      LiftoffRegister rhs) {
2840   bailout(kUnsupportedArchitecture, "emit_i8x16_rounding_average_u");
2841 }
2842 
emit_i16x8_rounding_average_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2843 void LiftoffAssembler::emit_i16x8_rounding_average_u(LiftoffRegister dst,
2844                                                      LiftoffRegister lhs,
2845                                                      LiftoffRegister rhs) {
2846   bailout(kUnsupportedArchitecture, "emit_i16x8_rounding_average_u");
2847 }
2848 
emit_i8x16_abs(LiftoffRegister dst,LiftoffRegister src)2849 void LiftoffAssembler::emit_i8x16_abs(LiftoffRegister dst,
2850                                       LiftoffRegister src) {
2851   bailout(kUnsupportedArchitecture, "emit_i8x16_abs");
2852 }
2853 
emit_i16x8_abs(LiftoffRegister dst,LiftoffRegister src)2854 void LiftoffAssembler::emit_i16x8_abs(LiftoffRegister dst,
2855                                       LiftoffRegister src) {
2856   bailout(kUnsupportedArchitecture, "emit_i16x8_abs");
2857 }
2858 
emit_i32x4_abs(LiftoffRegister dst,LiftoffRegister src)2859 void LiftoffAssembler::emit_i32x4_abs(LiftoffRegister dst,
2860                                       LiftoffRegister src) {
2861   bailout(kUnsupportedArchitecture, "emit_i32x4_abs");
2862 }
2863 
emit_i64x2_abs(LiftoffRegister dst,LiftoffRegister src)2864 void LiftoffAssembler::emit_i64x2_abs(LiftoffRegister dst,
2865                                       LiftoffRegister src) {
2866   bailout(kSimd, "i64x2.abs");
2867 }
2868 
emit_i8x16_sub(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2869 void LiftoffAssembler::emit_i8x16_sub(LiftoffRegister dst, LiftoffRegister lhs,
2870                                       LiftoffRegister rhs) {
2871   bailout(kUnsupportedArchitecture, "emit_i8x16sub");
2872 }
2873 
emit_i8x16_sub_sat_s(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2874 void LiftoffAssembler::emit_i8x16_sub_sat_s(LiftoffRegister dst,
2875                                             LiftoffRegister lhs,
2876                                             LiftoffRegister rhs) {
2877   bailout(kUnsupportedArchitecture, "emit_i8x16subsaturate_s");
2878 }
2879 
emit_i8x16_sub_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2880 void LiftoffAssembler::emit_i8x16_sub_sat_u(LiftoffRegister dst,
2881                                             LiftoffRegister lhs,
2882                                             LiftoffRegister rhs) {
2883   bailout(kUnsupportedArchitecture, "emit_i8x16subsaturate_u");
2884 }
2885 
emit_i8x16_add_sat_u(LiftoffRegister dst,LiftoffRegister lhs,LiftoffRegister rhs)2886 void LiftoffAssembler::emit_i8x16_add_sat_u(LiftoffRegister dst,
2887                                             LiftoffRegister lhs,
2888                                             LiftoffRegister rhs) {
2889   bailout(kUnsupportedArchitecture, "emit_i8x16addsaturate_u");
2890 }
2891 
StackCheck(Label * ool_code,Register limit_address)2892 void LiftoffAssembler::StackCheck(Label* ool_code, Register limit_address) {
2893   LoadU64(limit_address, MemOperand(limit_address), r0);
2894   CmpU64(sp, limit_address);
2895   ble(ool_code);
2896 }
2897 
CallTrapCallbackForTesting()2898 void LiftoffAssembler::CallTrapCallbackForTesting() {
2899   PrepareCallCFunction(0, 0, ip);
2900   CallCFunction(ExternalReference::wasm_call_trap_callback_for_testing(), 0);
2901 }
2902 
AssertUnreachable(AbortReason reason)2903 void LiftoffAssembler::AssertUnreachable(AbortReason reason) {
2904   if (FLAG_debug_code) Abort(reason);
2905 }
2906 
PushRegisters(LiftoffRegList regs)2907 void LiftoffAssembler::PushRegisters(LiftoffRegList regs) {
2908   MultiPush(regs.GetGpList());
2909   MultiPushDoubles(regs.GetFpList());
2910 }
2911 
PopRegisters(LiftoffRegList regs)2912 void LiftoffAssembler::PopRegisters(LiftoffRegList regs) {
2913   MultiPopDoubles(regs.GetFpList());
2914   MultiPop(regs.GetGpList());
2915 }
2916 
RecordSpillsInSafepoint(SafepointTableBuilder::Safepoint & safepoint,LiftoffRegList all_spills,LiftoffRegList ref_spills,int spill_offset)2917 void LiftoffAssembler::RecordSpillsInSafepoint(
2918     SafepointTableBuilder::Safepoint& safepoint, LiftoffRegList all_spills,
2919     LiftoffRegList ref_spills, int spill_offset) {
2920   int spill_space_size = 0;
2921   while (!all_spills.is_empty()) {
2922     LiftoffRegister reg = all_spills.GetLastRegSet();
2923     if (ref_spills.has(reg)) {
2924       safepoint.DefineTaggedStackSlot(spill_offset);
2925     }
2926     all_spills.clear(reg);
2927     ++spill_offset;
2928     spill_space_size += kSystemPointerSize;
2929   }
2930   // Record the number of additional spill slots.
2931   RecordOolSpillSpaceSize(spill_space_size);
2932 }
2933 
DropStackSlotsAndRet(uint32_t num_stack_slots)2934 void LiftoffAssembler::DropStackSlotsAndRet(uint32_t num_stack_slots) {
2935   Drop(num_stack_slots);
2936   Ret();
2937 }
2938 
CallC(const ValueKindSig * sig,const LiftoffRegister * args,const LiftoffRegister * rets,ValueKind out_argument_kind,int stack_bytes,ExternalReference ext_ref)2939 void LiftoffAssembler::CallC(const ValueKindSig* sig,
2940                              const LiftoffRegister* args,
2941                              const LiftoffRegister* rets,
2942                              ValueKind out_argument_kind, int stack_bytes,
2943                              ExternalReference ext_ref) {
2944   int total_size = RoundUp(stack_bytes, kSystemPointerSize);
2945 
2946   int size = total_size;
2947   constexpr int kStackPageSize = 4 * KB;
2948 
2949   // Reserve space in the stack.
2950   while (size > kStackPageSize) {
2951     SubS64(sp, sp, Operand(kStackPageSize), r0);
2952     StoreU64(r0, MemOperand(sp));
2953     size -= kStackPageSize;
2954   }
2955 
2956   SubS64(sp, sp, Operand(size), r0);
2957 
2958   int arg_bytes = 0;
2959   for (ValueKind param_kind : sig->parameters()) {
2960     switch (param_kind) {
2961       case kI32:
2962         StoreU32(args->gp(), MemOperand(sp, arg_bytes), r0);
2963         break;
2964       case kI64:
2965         StoreU64(args->gp(), MemOperand(sp, arg_bytes), r0);
2966         break;
2967       case kF32:
2968         StoreF32(args->fp(), MemOperand(sp, arg_bytes), r0);
2969         break;
2970       case kF64:
2971         StoreF64(args->fp(), MemOperand(sp, arg_bytes), r0);
2972         break;
2973       default:
2974         UNREACHABLE();
2975     }
2976     args++;
2977     arg_bytes += value_kind_size(param_kind);
2978   }
2979 
2980   DCHECK_LE(arg_bytes, stack_bytes);
2981 
2982   // Pass a pointer to the buffer with the arguments to the C function.
2983   mr(r3, sp);
2984 
2985   // Now call the C function.
2986   constexpr int kNumCCallArgs = 1;
2987   PrepareCallCFunction(kNumCCallArgs, r0);
2988   CallCFunction(ext_ref, kNumCCallArgs);
2989 
2990   // Move return value to the right register.
2991   const LiftoffRegister* result_reg = rets;
2992   if (sig->return_count() > 0) {
2993     DCHECK_EQ(1, sig->return_count());
2994     constexpr Register kReturnReg = r3;
2995     if (kReturnReg != rets->gp()) {
2996       Move(*rets, LiftoffRegister(kReturnReg), sig->GetReturn(0));
2997     }
2998     result_reg++;
2999   }
3000 
3001   // Load potential output value from the buffer on the stack.
3002   if (out_argument_kind != kVoid) {
3003     switch (out_argument_kind) {
3004       case kI32:
3005         LoadS32(result_reg->gp(), MemOperand(sp));
3006         break;
3007       case kI64:
3008       case kOptRef:
3009       case kRef:
3010       case kRtt:
3011         LoadU64(result_reg->gp(), MemOperand(sp));
3012         break;
3013       case kF32:
3014         LoadF32(result_reg->fp(), MemOperand(sp));
3015         break;
3016       case kF64:
3017         LoadF64(result_reg->fp(), MemOperand(sp));
3018         break;
3019       default:
3020         UNREACHABLE();
3021     }
3022   }
3023   AddS64(sp, sp, Operand(total_size), r0);
3024 }
3025 
CallNativeWasmCode(Address addr)3026 void LiftoffAssembler::CallNativeWasmCode(Address addr) {
3027   Call(addr, RelocInfo::WASM_CALL);
3028 }
3029 
TailCallNativeWasmCode(Address addr)3030 void LiftoffAssembler::TailCallNativeWasmCode(Address addr) {
3031   Jump(addr, RelocInfo::WASM_CALL);
3032 }
3033 
CallIndirect(const ValueKindSig * sig,compiler::CallDescriptor * call_descriptor,Register target)3034 void LiftoffAssembler::CallIndirect(const ValueKindSig* sig,
3035                                     compiler::CallDescriptor* call_descriptor,
3036                                     Register target) {
3037   DCHECK(target != no_reg);
3038   Call(target);
3039 }
3040 
TailCallIndirect(Register target)3041 void LiftoffAssembler::TailCallIndirect(Register target) {
3042   DCHECK(target != no_reg);
3043   Jump(target);
3044 }
3045 
CallRuntimeStub(WasmCode::RuntimeStubId sid)3046 void LiftoffAssembler::CallRuntimeStub(WasmCode::RuntimeStubId sid) {
3047   Call(static_cast<Address>(sid), RelocInfo::WASM_STUB_CALL);
3048 }
3049 
AllocateStackSlot(Register addr,uint32_t size)3050 void LiftoffAssembler::AllocateStackSlot(Register addr, uint32_t size) {
3051   SubS64(sp, sp, Operand(size), r0);
3052   mr(addr, sp);
3053 }
3054 
DeallocateStackSlot(uint32_t size)3055 void LiftoffAssembler::DeallocateStackSlot(uint32_t size) {
3056   AddS64(sp, sp, Operand(size));
3057 }
3058 
MaybeOSR()3059 void LiftoffAssembler::MaybeOSR() {}
3060 
emit_set_if_nan(Register dst,DoubleRegister src,ValueKind kind)3061 void LiftoffAssembler::emit_set_if_nan(Register dst, DoubleRegister src,
3062                                        ValueKind kind) {
3063   UNIMPLEMENTED();
3064 }
3065 
emit_s128_set_if_nan(Register dst,LiftoffRegister src,Register tmp_gp,LiftoffRegister tmp_s128,ValueKind lane_kind)3066 void LiftoffAssembler::emit_s128_set_if_nan(Register dst, LiftoffRegister src,
3067                                             Register tmp_gp,
3068                                             LiftoffRegister tmp_s128,
3069                                             ValueKind lane_kind) {
3070   UNIMPLEMENTED();
3071 }
3072 
Construct(int param_slots)3073 void LiftoffStackSlots::Construct(int param_slots) {
3074   DCHECK_LT(0, slots_.size());
3075   SortInPushOrder();
3076   int last_stack_slot = param_slots;
3077   for (auto& slot : slots_) {
3078     const int stack_slot = slot.dst_slot_;
3079     int stack_decrement = (last_stack_slot - stack_slot) * kSystemPointerSize;
3080     DCHECK_LT(0, stack_decrement);
3081     last_stack_slot = stack_slot;
3082     const LiftoffAssembler::VarState& src = slot.src_;
3083     switch (src.loc()) {
3084       case LiftoffAssembler::VarState::kStack: {
3085         switch (src.kind()) {
3086           case kI32:
3087           case kRef:
3088           case kOptRef:
3089           case kRtt:
3090           case kI64: {
3091             asm_->AllocateStackSpace(stack_decrement - kSystemPointerSize);
3092             UseScratchRegisterScope temps(asm_);
3093             Register scratch = temps.Acquire();
3094             asm_->LoadU64(scratch, liftoff::GetStackSlot(slot.src_offset_), r0);
3095             asm_->Push(scratch);
3096             break;
3097           }
3098           case kF32: {
3099             asm_->AllocateStackSpace(stack_decrement - kSystemPointerSize);
3100             asm_->LoadF32(kScratchDoubleReg,
3101                           liftoff::GetStackSlot(slot.src_offset_), r0);
3102             asm_->AddS64(sp, sp, Operand(-kSystemPointerSize));
3103             asm_->StoreF32(kScratchDoubleReg, MemOperand(sp), r0);
3104             break;
3105           }
3106           case kF64: {
3107             asm_->AllocateStackSpace(stack_decrement - kDoubleSize);
3108             asm_->LoadF64(kScratchDoubleReg,
3109                           liftoff::GetStackSlot(slot.src_offset_), r0);
3110             asm_->AddS64(sp, sp, Operand(-kSystemPointerSize), r0);
3111             asm_->StoreF64(kScratchDoubleReg, MemOperand(sp), r0);
3112             break;
3113           }
3114           case kS128: {
3115             asm_->bailout(kSimd, "LiftoffStackSlots::Construct");
3116             break;
3117           }
3118           default:
3119             UNREACHABLE();
3120         }
3121         break;
3122       }
3123       case LiftoffAssembler::VarState::kRegister: {
3124         int pushed_bytes = SlotSizeInBytes(slot);
3125         asm_->AllocateStackSpace(stack_decrement - pushed_bytes);
3126         switch (src.kind()) {
3127           case kI64:
3128           case kI32:
3129           case kRef:
3130           case kOptRef:
3131           case kRtt:
3132             asm_->push(src.reg().gp());
3133             break;
3134           case kF32:
3135             asm_->AddS64(sp, sp, Operand(-kSystemPointerSize), r0);
3136             asm_->StoreF32(src.reg().fp(), MemOperand(sp), r0);
3137             break;
3138           case kF64:
3139             asm_->AddS64(sp, sp, Operand(-kSystemPointerSize), r0);
3140             asm_->StoreF64(src.reg().fp(), MemOperand(sp), r0);
3141             break;
3142           case kS128: {
3143             asm_->bailout(kSimd, "LiftoffStackSlots::Construct");
3144             break;
3145           }
3146           default:
3147             UNREACHABLE();
3148         }
3149         break;
3150       }
3151       case LiftoffAssembler::VarState::kIntConst: {
3152         asm_->AllocateStackSpace(stack_decrement - kSystemPointerSize);
3153         DCHECK(src.kind() == kI32 || src.kind() == kI64);
3154         UseScratchRegisterScope temps(asm_);
3155         Register scratch = temps.Acquire();
3156 
3157         switch (src.kind()) {
3158           case kI32:
3159             asm_->mov(scratch, Operand(src.i32_const()));
3160             break;
3161           case kI64:
3162             asm_->mov(scratch, Operand(int64_t{slot.src_.i32_const()}));
3163             break;
3164           default:
3165             UNREACHABLE();
3166         }
3167         asm_->push(scratch);
3168         break;
3169       }
3170     }
3171   }
3172 }
3173 
3174 }  // namespace wasm
3175 }  // namespace internal
3176 }  // namespace v8
3177 
3178 #undef BAILOUT
3179 
3180 #endif  // V8_WASM_BASELINE_PPC_LIFTOFF_ASSEMBLER_PPC_H_
3181