Lines Matching +full:0 +full:v
29 #define FOREACH_CONTROL_OPCODE(V) \ argument
30 V(Unreachable, 0x00, _) \
31 V(Nop, 0x01, _) \
32 V(Block, 0x02, _) \
33 V(Loop, 0x03, _) \
34 V(If, 0x04, _) \
35 V(Else, 0x05, _) \
36 V(Try, 0x06, _ /* eh_prototype */) \
37 V(Catch, 0x07, _ /* eh_prototype */) \
38 V(Throw, 0x08, _ /* eh_prototype */) \
39 V(Rethrow, 0x09, _ /* eh_prototype */) \
40 V(BrOnExn, 0x0a, _ /* eh prototype */) \
41 V(End, 0x0b, _) \
42 V(Br, 0x0c, _) \
43 V(BrIf, 0x0d, _) \
44 V(BrTable, 0x0e, _) \
45 V(Return, 0x0f, _) \
46 V(CallRef, 0x14, _ /* typed_funcref prototype */) \
47 V(ReturnCallRef, 0x15, _ /* typed_funcref prototype */) \
48 V(Let, 0x17, _ /* typed_funcref prototype */) \
49 V(BrOnNull, 0xd4, _ /* gc prototype */)
52 #define FOREACH_MISC_OPCODE(V) \ argument
53 V(CallFunction, 0x10, _) \
54 V(CallIndirect, 0x11, _) \
55 V(ReturnCall, 0x12, _) \
56 V(ReturnCallIndirect, 0x13, _) \
57 V(Drop, 0x1a, _) \
58 V(Select, 0x1b, _) \
59 V(SelectWithType, 0x1c, _) \
60 V(LocalGet, 0x20, _) \
61 V(LocalSet, 0x21, _) \
62 V(LocalTee, 0x22, _) \
63 V(GlobalGet, 0x23, _) \
64 V(GlobalSet, 0x24, _) \
65 V(TableGet, 0x25, _) \
66 V(TableSet, 0x26, _) \
67 V(I32Const, 0x41, _) \
68 V(I64Const, 0x42, _) \
69 V(F32Const, 0x43, _) \
70 V(F64Const, 0x44, _) \
71 V(RefNull, 0xd0, _) \
72 V(RefIsNull, 0xd1, _) \
73 V(RefFunc, 0xd2, _) \
74 V(RefAsNonNull, 0xd3, _ /* typed_funcref prototype */)
77 #define FOREACH_LOAD_MEM_OPCODE(V) \ argument
78 V(I32LoadMem, 0x28, i_i) \
79 V(I64LoadMem, 0x29, l_i) \
80 V(F32LoadMem, 0x2a, f_i) \
81 V(F64LoadMem, 0x2b, d_i) \
82 V(I32LoadMem8S, 0x2c, i_i) \
83 V(I32LoadMem8U, 0x2d, i_i) \
84 V(I32LoadMem16S, 0x2e, i_i) \
85 V(I32LoadMem16U, 0x2f, i_i) \
86 V(I64LoadMem8S, 0x30, l_i) \
87 V(I64LoadMem8U, 0x31, l_i) \
88 V(I64LoadMem16S, 0x32, l_i) \
89 V(I64LoadMem16U, 0x33, l_i) \
90 V(I64LoadMem32S, 0x34, l_i) \
91 V(I64LoadMem32U, 0x35, l_i)
94 #define FOREACH_STORE_MEM_OPCODE(V) \ argument
95 V(I32StoreMem, 0x36, v_ii) \
96 V(I64StoreMem, 0x37, v_il) \
97 V(F32StoreMem, 0x38, v_if) \
98 V(F64StoreMem, 0x39, v_id) \
99 V(I32StoreMem8, 0x3a, v_ii) \
100 V(I32StoreMem16, 0x3b, v_ii) \
101 V(I64StoreMem8, 0x3c, v_il) \
102 V(I64StoreMem16, 0x3d, v_il) \
103 V(I64StoreMem32, 0x3e, v_il)
106 #define FOREACH_MISC_MEM_OPCODE(V) \ argument
107 V(MemorySize, 0x3f, i_v) \
108 V(MemoryGrow, 0x40, i_i)
111 #define FOREACH_SIMPLE_OPCODE(V) \ argument
112 V(I32Eqz, 0x45, i_i) \
113 V(I32Eq, 0x46, i_ii) \
114 V(I32Ne, 0x47, i_ii) \
115 V(I32LtS, 0x48, i_ii) \
116 V(I32LtU, 0x49, i_ii) \
117 V(I32GtS, 0x4a, i_ii) \
118 V(I32GtU, 0x4b, i_ii) \
119 V(I32LeS, 0x4c, i_ii) \
120 V(I32LeU, 0x4d, i_ii) \
121 V(I32GeS, 0x4e, i_ii) \
122 V(I32GeU, 0x4f, i_ii) \
123 V(I64Eqz, 0x50, i_l) \
124 V(I64Eq, 0x51, i_ll) \
125 V(I64Ne, 0x52, i_ll) \
126 V(I64LtS, 0x53, i_ll) \
127 V(I64LtU, 0x54, i_ll) \
128 V(I64GtS, 0x55, i_ll) \
129 V(I64GtU, 0x56, i_ll) \
130 V(I64LeS, 0x57, i_ll) \
131 V(I64LeU, 0x58, i_ll) \
132 V(I64GeS, 0x59, i_ll) \
133 V(I64GeU, 0x5a, i_ll) \
134 V(F32Eq, 0x5b, i_ff) \
135 V(F32Ne, 0x5c, i_ff) \
136 V(F32Lt, 0x5d, i_ff) \
137 V(F32Gt, 0x5e, i_ff) \
138 V(F32Le, 0x5f, i_ff) \
139 V(F32Ge, 0x60, i_ff) \
140 V(F64Eq, 0x61, i_dd) \
141 V(F64Ne, 0x62, i_dd) \
142 V(F64Lt, 0x63, i_dd) \
143 V(F64Gt, 0x64, i_dd) \
144 V(F64Le, 0x65, i_dd) \
145 V(F64Ge, 0x66, i_dd) \
146 V(I32Clz, 0x67, i_i) \
147 V(I32Ctz, 0x68, i_i) \
148 V(I32Popcnt, 0x69, i_i) \
149 V(I32Add, 0x6a, i_ii) \
150 V(I32Sub, 0x6b, i_ii) \
151 V(I32Mul, 0x6c, i_ii) \
152 V(I32DivS, 0x6d, i_ii) \
153 V(I32DivU, 0x6e, i_ii) \
154 V(I32RemS, 0x6f, i_ii) \
155 V(I32RemU, 0x70, i_ii) \
156 V(I32And, 0x71, i_ii) \
157 V(I32Ior, 0x72, i_ii) \
158 V(I32Xor, 0x73, i_ii) \
159 V(I32Shl, 0x74, i_ii) \
160 V(I32ShrS, 0x75, i_ii) \
161 V(I32ShrU, 0x76, i_ii) \
162 V(I32Rol, 0x77, i_ii) \
163 V(I32Ror, 0x78, i_ii) \
164 V(I64Clz, 0x79, l_l) \
165 V(I64Ctz, 0x7a, l_l) \
166 V(I64Popcnt, 0x7b, l_l) \
167 V(I64Add, 0x7c, l_ll) \
168 V(I64Sub, 0x7d, l_ll) \
169 V(I64Mul, 0x7e, l_ll) \
170 V(I64DivS, 0x7f, l_ll) \
171 V(I64DivU, 0x80, l_ll) \
172 V(I64RemS, 0x81, l_ll) \
173 V(I64RemU, 0x82, l_ll) \
174 V(I64And, 0x83, l_ll) \
175 V(I64Ior, 0x84, l_ll) \
176 V(I64Xor, 0x85, l_ll) \
177 V(I64Shl, 0x86, l_ll) \
178 V(I64ShrS, 0x87, l_ll) \
179 V(I64ShrU, 0x88, l_ll) \
180 V(I64Rol, 0x89, l_ll) \
181 V(I64Ror, 0x8a, l_ll) \
182 V(F32Abs, 0x8b, f_f) \
183 V(F32Neg, 0x8c, f_f) \
184 V(F32Ceil, 0x8d, f_f) \
185 V(F32Floor, 0x8e, f_f) \
186 V(F32Trunc, 0x8f, f_f) \
187 V(F32NearestInt, 0x90, f_f) \
188 V(F32Sqrt, 0x91, f_f) \
189 V(F32Add, 0x92, f_ff) \
190 V(F32Sub, 0x93, f_ff) \
191 V(F32Mul, 0x94, f_ff) \
192 V(F32Div, 0x95, f_ff) \
193 V(F32Min, 0x96, f_ff) \
194 V(F32Max, 0x97, f_ff) \
195 V(F32CopySign, 0x98, f_ff) \
196 V(F64Abs, 0x99, d_d) \
197 V(F64Neg, 0x9a, d_d) \
198 V(F64Ceil, 0x9b, d_d) \
199 V(F64Floor, 0x9c, d_d) \
200 V(F64Trunc, 0x9d, d_d) \
201 V(F64NearestInt, 0x9e, d_d) \
202 V(F64Sqrt, 0x9f, d_d) \
203 V(F64Add, 0xa0, d_dd) \
204 V(F64Sub, 0xa1, d_dd) \
205 V(F64Mul, 0xa2, d_dd) \
206 V(F64Div, 0xa3, d_dd) \
207 V(F64Min, 0xa4, d_dd) \
208 V(F64Max, 0xa5, d_dd) \
209 V(F64CopySign, 0xa6, d_dd) \
210 V(I32ConvertI64, 0xa7, i_l) \
211 V(I32SConvertF32, 0xa8, i_f) \
212 V(I32UConvertF32, 0xa9, i_f) \
213 V(I32SConvertF64, 0xaa, i_d) \
214 V(I32UConvertF64, 0xab, i_d) \
215 V(I64SConvertI32, 0xac, l_i) \
216 V(I64UConvertI32, 0xad, l_i) \
217 V(I64SConvertF32, 0xae, l_f) \
218 V(I64UConvertF32, 0xaf, l_f) \
219 V(I64SConvertF64, 0xb0, l_d) \
220 V(I64UConvertF64, 0xb1, l_d) \
221 V(F32SConvertI32, 0xb2, f_i) \
222 V(F32UConvertI32, 0xb3, f_i) \
223 V(F32SConvertI64, 0xb4, f_l) \
224 V(F32UConvertI64, 0xb5, f_l) \
225 V(F32ConvertF64, 0xb6, f_d) \
226 V(F64SConvertI32, 0xb7, d_i) \
227 V(F64UConvertI32, 0xb8, d_i) \
228 V(F64SConvertI64, 0xb9, d_l) \
229 V(F64UConvertI64, 0xba, d_l) \
230 V(F64ConvertF32, 0xbb, d_f) \
231 V(I32ReinterpretF32, 0xbc, i_f) \
232 V(I64ReinterpretF64, 0xbd, l_d) \
233 V(F32ReinterpretI32, 0xbe, f_i) \
234 V(F64ReinterpretI64, 0xbf, d_l) \
235 V(I32SExtendI8, 0xc0, i_i) \
236 V(I32SExtendI16, 0xc1, i_i) \
237 V(I64SExtendI8, 0xc2, l_l) \
238 V(I64SExtendI16, 0xc3, l_l) \
239 V(I64SExtendI32, 0xc4, l_l)
241 #define FOREACH_SIMPLE_PROTOTYPE_OPCODE(V) V(RefEq, 0xd5, i_qq) argument
246 #define FOREACH_ASMJS_COMPAT_OPCODE(V) \ argument
247 V(F64Acos, 0xc5, d_d) \
248 V(F64Asin, 0xc6, d_d) \
249 V(F64Atan, 0xc7, d_d) \
250 V(F64Cos, 0xc8, d_d) \
251 V(F64Sin, 0xc9, d_d) \
252 V(F64Tan, 0xca, d_d) \
253 V(F64Exp, 0xcb, d_d) \
254 V(F64Log, 0xcc, d_d) \
255 V(F64Atan2, 0xcd, d_dd) \
256 V(F64Pow, 0xce, d_dd) \
257 V(F64Mod, 0xcf, d_dd) \
258 V(I32AsmjsDivS, 0xe7, i_ii) \
259 V(I32AsmjsDivU, 0xe8, i_ii) \
260 V(I32AsmjsRemS, 0xe9, i_ii) \
261 V(I32AsmjsRemU, 0xd6, i_ii) \
262 V(I32AsmjsLoadMem8S, 0xd7, i_i) \
263 V(I32AsmjsLoadMem8U, 0xd8, i_i) \
264 V(I32AsmjsLoadMem16S, 0xd9, i_i) \
265 V(I32AsmjsLoadMem16U, 0xda, i_i) \
266 V(I32AsmjsLoadMem, 0xdb, i_i) \
267 V(F32AsmjsLoadMem, 0xdc, f_i) \
268 V(F64AsmjsLoadMem, 0xdd, d_i) \
269 V(I32AsmjsStoreMem8, 0xde, i_ii) \
270 V(I32AsmjsStoreMem16, 0xdf, i_ii) \
271 V(I32AsmjsStoreMem, 0xe0, i_ii) \
272 V(F32AsmjsStoreMem, 0xe1, f_if) \
273 V(F64AsmjsStoreMem, 0xe2, d_id) \
274 V(I32AsmjsSConvertF32, 0xe3, i_f) \
275 V(I32AsmjsUConvertF32, 0xe4, i_f) \
276 V(I32AsmjsSConvertF64, 0xe5, i_d) \
277 V(I32AsmjsUConvertF64, 0xe6, i_d)
279 #define FOREACH_SIMD_MEM_OPCODE(V) \ argument
280 V(S128LoadMem, 0xfd00, s_i) \
281 V(S128Load8x8S, 0xfd01, s_i) \
282 V(S128Load8x8U, 0xfd02, s_i) \
283 V(S128Load16x4S, 0xfd03, s_i) \
284 V(S128Load16x4U, 0xfd04, s_i) \
285 V(S128Load32x2S, 0xfd05, s_i) \
286 V(S128Load32x2U, 0xfd06, s_i) \
287 V(S128Load8Splat, 0xfd07, s_i) \
288 V(S128Load16Splat, 0xfd08, s_i) \
289 V(S128Load32Splat, 0xfd09, s_i) \
290 V(S128Load64Splat, 0xfd0a, s_i) \
291 V(S128StoreMem, 0xfd0b, v_is) \
292 V(S128Load32Zero, 0xfdfc, s_i) \
293 V(S128Load64Zero, 0xfdfd, s_i)
295 #define FOREACH_SIMD_CONST_OPCODE(V) V(S128Const, 0xfd0c, _) argument
297 #define FOREACH_SIMD_MASK_OPERAND_OPCODE(V) V(I8x16Shuffle, 0xfd0d, s_ss) argument
299 #define FOREACH_SIMD_MVP_0_OPERAND_OPCODE(V) \ argument
300 V(I8x16Swizzle, 0xfd0e, s_ss) \
301 V(I8x16Splat, 0xfd0f, s_i) \
302 V(I16x8Splat, 0xfd10, s_i) \
303 V(I32x4Splat, 0xfd11, s_i) \
304 V(I64x2Splat, 0xfd12, s_l) \
305 V(F32x4Splat, 0xfd13, s_f) \
306 V(F64x2Splat, 0xfd14, s_d) \
307 V(I8x16Eq, 0xfd23, s_ss) \
308 V(I8x16Ne, 0xfd24, s_ss) \
309 V(I8x16LtS, 0xfd25, s_ss) \
310 V(I8x16LtU, 0xfd26, s_ss) \
311 V(I8x16GtS, 0xfd27, s_ss) \
312 V(I8x16GtU, 0xfd28, s_ss) \
313 V(I8x16LeS, 0xfd29, s_ss) \
314 V(I8x16LeU, 0xfd2a, s_ss) \
315 V(I8x16GeS, 0xfd2b, s_ss) \
316 V(I8x16GeU, 0xfd2c, s_ss) \
317 V(I16x8Eq, 0xfd2d, s_ss) \
318 V(I16x8Ne, 0xfd2e, s_ss) \
319 V(I16x8LtS, 0xfd2f, s_ss) \
320 V(I16x8LtU, 0xfd30, s_ss) \
321 V(I16x8GtS, 0xfd31, s_ss) \
322 V(I16x8GtU, 0xfd32, s_ss) \
323 V(I16x8LeS, 0xfd33, s_ss) \
324 V(I16x8LeU, 0xfd34, s_ss) \
325 V(I16x8GeS, 0xfd35, s_ss) \
326 V(I16x8GeU, 0xfd36, s_ss) \
327 V(I32x4Eq, 0xfd37, s_ss) \
328 V(I32x4Ne, 0xfd38, s_ss) \
329 V(I32x4LtS, 0xfd39, s_ss) \
330 V(I32x4LtU, 0xfd3a, s_ss) \
331 V(I32x4GtS, 0xfd3b, s_ss) \
332 V(I32x4GtU, 0xfd3c, s_ss) \
333 V(I32x4LeS, 0xfd3d, s_ss) \
334 V(I32x4LeU, 0xfd3e, s_ss) \
335 V(I32x4GeS, 0xfd3f, s_ss) \
336 V(I32x4GeU, 0xfd40, s_ss) \
337 V(F32x4Eq, 0xfd41, s_ss) \
338 V(F32x4Ne, 0xfd42, s_ss) \
339 V(F32x4Lt, 0xfd43, s_ss) \
340 V(F32x4Gt, 0xfd44, s_ss) \
341 V(F32x4Le, 0xfd45, s_ss) \
342 V(F32x4Ge, 0xfd46, s_ss) \
343 V(F64x2Eq, 0xfd47, s_ss) \
344 V(F64x2Ne, 0xfd48, s_ss) \
345 V(F64x2Lt, 0xfd49, s_ss) \
346 V(F64x2Gt, 0xfd4a, s_ss) \
347 V(F64x2Le, 0xfd4b, s_ss) \
348 V(F64x2Ge, 0xfd4c, s_ss) \
349 V(S128Not, 0xfd4d, s_s) \
350 V(S128And, 0xfd4e, s_ss) \
351 V(S128AndNot, 0xfd4f, s_ss) \
352 V(S128Or, 0xfd50, s_ss) \
353 V(S128Xor, 0xfd51, s_ss) \
354 V(S128Select, 0xfd52, s_sss) \
355 V(I8x16Abs, 0xfd60, s_s) \
356 V(I8x16Neg, 0xfd61, s_s) \
357 V(V8x16AnyTrue, 0xfd62, i_s) \
358 V(V8x16AllTrue, 0xfd63, i_s) \
359 V(I8x16BitMask, 0xfd64, i_s) \
360 V(I8x16SConvertI16x8, 0xfd65, s_ss) \
361 V(I8x16UConvertI16x8, 0xfd66, s_ss) \
362 V(I8x16Shl, 0xfd6b, s_si) \
363 V(I8x16ShrS, 0xfd6c, s_si) \
364 V(I8x16ShrU, 0xfd6d, s_si) \
365 V(I8x16Add, 0xfd6e, s_ss) \
366 V(I8x16AddSatS, 0xfd6f, s_ss) \
367 V(I8x16AddSatU, 0xfd70, s_ss) \
368 V(I8x16Sub, 0xfd71, s_ss) \
369 V(I8x16SubSatS, 0xfd72, s_ss) \
370 V(I8x16SubSatU, 0xfd73, s_ss) \
371 V(I8x16MinS, 0xfd76, s_ss) \
372 V(I8x16MinU, 0xfd77, s_ss) \
373 V(I8x16MaxS, 0xfd78, s_ss) \
374 V(I8x16MaxU, 0xfd79, s_ss) \
375 V(I8x16RoundingAverageU, 0xfd7b, s_ss) \
376 V(I16x8Abs, 0xfd80, s_s) \
377 V(I16x8Neg, 0xfd81, s_s) \
378 V(V16x8AnyTrue, 0xfd82, i_s) \
379 V(V16x8AllTrue, 0xfd83, i_s) \
380 V(I16x8BitMask, 0xfd84, i_s) \
381 V(I16x8SConvertI32x4, 0xfd85, s_ss) \
382 V(I16x8UConvertI32x4, 0xfd86, s_ss) \
383 V(I16x8SConvertI8x16Low, 0xfd87, s_s) \
384 V(I16x8SConvertI8x16High, 0xfd88, s_s) \
385 V(I16x8UConvertI8x16Low, 0xfd89, s_s) \
386 V(I16x8UConvertI8x16High, 0xfd8a, s_s) \
387 V(I16x8Shl, 0xfd8b, s_si) \
388 V(I16x8ShrS, 0xfd8c, s_si) \
389 V(I16x8ShrU, 0xfd8d, s_si) \
390 V(I16x8Add, 0xfd8e, s_ss) \
391 V(I16x8AddSatS, 0xfd8f, s_ss) \
392 V(I16x8AddSatU, 0xfd90, s_ss) \
393 V(I16x8Sub, 0xfd91, s_ss) \
394 V(I16x8SubSatS, 0xfd92, s_ss) \
395 V(I16x8SubSatU, 0xfd93, s_ss) \
396 V(I16x8Mul, 0xfd95, s_ss) \
397 V(I16x8MinS, 0xfd96, s_ss) \
398 V(I16x8MinU, 0xfd97, s_ss) \
399 V(I16x8MaxS, 0xfd98, s_ss) \
400 V(I16x8MaxU, 0xfd99, s_ss) \
401 V(I16x8RoundingAverageU, 0xfd9b, s_ss) \
402 V(I32x4Abs, 0xfda0, s_s) \
403 V(I32x4Neg, 0xfda1, s_s) \
404 V(V32x4AnyTrue, 0xfda2, i_s) \
405 V(V32x4AllTrue, 0xfda3, i_s) \
406 V(I32x4BitMask, 0xfda4, i_s) \
407 V(I32x4SConvertI16x8Low, 0xfda7, s_s) \
408 V(I32x4SConvertI16x8High, 0xfda8, s_s) \
409 V(I32x4UConvertI16x8Low, 0xfda9, s_s) \
410 V(I32x4UConvertI16x8High, 0xfdaa, s_s) \
411 V(I32x4Shl, 0xfdab, s_si) \
412 V(I32x4ShrS, 0xfdac, s_si) \
413 V(I32x4ShrU, 0xfdad, s_si) \
414 V(I32x4Add, 0xfdae, s_ss) \
415 V(I32x4Sub, 0xfdb1, s_ss) \
416 V(I32x4Mul, 0xfdb5, s_ss) \
417 V(I32x4MinS, 0xfdb6, s_ss) \
418 V(I32x4MinU, 0xfdb7, s_ss) \
419 V(I32x4MaxS, 0xfdb8, s_ss) \
420 V(I32x4MaxU, 0xfdb9, s_ss) \
421 V(I32x4DotI16x8S, 0xfdba, s_ss) \
422 V(I64x2Neg, 0xfdc1, s_s) \
423 V(I64x2Shl, 0xfdcb, s_si) \
424 V(I64x2ShrS, 0xfdcc, s_si) \
425 V(I64x2ShrU, 0xfdcd, s_si) \
426 V(I64x2Add, 0xfdce, s_ss) \
427 V(I64x2Sub, 0xfdd1, s_ss) \
428 V(I64x2Mul, 0xfdd5, s_ss) \
429 V(F32x4Abs, 0xfde0, s_s) \
430 V(F32x4Neg, 0xfde1, s_s) \
431 V(F32x4Sqrt, 0xfde3, s_s) \
432 V(F32x4Add, 0xfde4, s_ss) \
433 V(F32x4Sub, 0xfde5, s_ss) \
434 V(F32x4Mul, 0xfde6, s_ss) \
435 V(F32x4Div, 0xfde7, s_ss) \
436 V(F32x4Min, 0xfde8, s_ss) \
437 V(F32x4Max, 0xfde9, s_ss) \
438 V(F32x4Pmin, 0xfdea, s_ss) \
439 V(F32x4Pmax, 0xfdeb, s_ss) \
440 V(F64x2Abs, 0xfdec, s_s) \
441 V(F64x2Neg, 0xfded, s_s) \
442 V(F64x2Sqrt, 0xfdef, s_s) \
443 V(F64x2Add, 0xfdf0, s_ss) \
444 V(F64x2Sub, 0xfdf1, s_ss) \
445 V(F64x2Mul, 0xfdf2, s_ss) \
446 V(F64x2Div, 0xfdf3, s_ss) \
447 V(F64x2Min, 0xfdf4, s_ss) \
448 V(F64x2Max, 0xfdf5, s_ss) \
449 V(F64x2Pmin, 0xfdf6, s_ss) \
450 V(F64x2Pmax, 0xfdf7, s_ss) \
451 V(I32x4SConvertF32x4, 0xfdf8, s_s) \
452 V(I32x4UConvertF32x4, 0xfdf9, s_s) \
453 V(F32x4SConvertI32x4, 0xfdfa, s_s) \
454 V(F32x4UConvertI32x4, 0xfdfb, s_s) \
455 V(F32x4Ceil, 0xfdd8, s_s) \
456 V(F32x4Floor, 0xfdd9, s_s) \
457 V(F32x4Trunc, 0xfdda, s_s) \
458 V(F32x4NearestInt, 0xfddb, s_s) \
459 V(F64x2Ceil, 0xfddc, s_s) \
460 V(F64x2Floor, 0xfddd, s_s) \
461 V(F64x2Trunc, 0xfdde, s_s) \
462 V(F64x2NearestInt, 0xfddf, s_s)
464 #define FOREACH_SIMD_POST_MVP_MEM_OPCODE(V) \ argument
465 V(S128Load8Lane, 0xfd58, s_is) \
466 V(S128Load16Lane, 0xfd59, s_is) \
467 V(S128Load32Lane, 0xfd5a, s_is) \
468 V(S128Load64Lane, 0xfd5b, s_is) \
469 V(S128Store8Lane, 0xfd5c, v_is) \
470 V(S128Store16Lane, 0xfd5d, v_is) \
471 V(S128Store32Lane, 0xfd5e, v_is) \
472 V(S128Store64Lane, 0xfd5f, v_is)
474 #define FOREACH_SIMD_POST_MVP_OPCODE(V) \ argument
475 V(I8x16Mul, 0xfd75, s_ss) \
476 V(I8x16Popcnt, 0xfd7c, s_s) \
477 V(I8x16SignSelect, 0xfd7d, s_sss) \
478 V(I16x8SignSelect, 0xfd7e, s_sss) \
479 V(I32x4SignSelect, 0xfd7f, s_sss) \
480 V(I64x2SignSelect, 0xfd94, s_sss) \
481 V(I16x8Q15MulRSatS, 0xfd9c, s_ss) \
482 V(I16x8ExtMulLowI8x16S, 0xfd9a, s_ss) \
483 V(I16x8ExtMulHighI8x16S, 0xfd9d, s_ss) \
484 V(I16x8ExtMulLowI8x16U, 0xfd9e, s_ss) \
485 V(I16x8ExtMulHighI8x16U, 0xfd9f, s_ss) \
486 V(I32x4ExtMulLowI16x8S, 0xfdbb, s_ss) \
487 V(I32x4ExtMulHighI16x8S, 0xfdbd, s_ss) \
488 V(I32x4ExtMulLowI16x8U, 0xfdbe, s_ss) \
489 V(I32x4ExtMulHighI16x8U, 0xfdbf, s_ss) \
490 V(I64x2ExtMulLowI32x4S, 0xfdd2, s_ss) \
491 V(I64x2ExtMulHighI32x4S, 0xfdd3, s_ss) \
492 V(I64x2ExtMulLowI32x4U, 0xfdd6, s_ss) \
493 V(I64x2ExtMulHighI32x4U, 0xfdd7, s_ss) \
494 V(I32x4ExtAddPairwiseI16x8S, 0xfda5, s_s) \
495 V(I32x4ExtAddPairwiseI16x8U, 0xfda6, s_s) \
496 V(I16x8ExtAddPairwiseI8x16S, 0xfdc2, s_s) \
497 V(I16x8ExtAddPairwiseI8x16U, 0xfdc3, s_s) \
498 V(I64x2Eq, 0xfdc0, s_ss) \
499 V(F32x4Qfma, 0xfdb4, s_sss) \
500 V(I64x2BitMask, 0xfdc4, i_s) \
501 V(I64x2SConvertI32x4Low, 0xfdc7, s_s) \
502 V(I64x2SConvertI32x4High, 0xfdc8, s_s) \
503 V(I64x2UConvertI32x4Low, 0xfdc9, s_s) \
504 V(I64x2UConvertI32x4High, 0xfdca, s_s) \
505 V(F32x4Qfms, 0xfdd4, s_sss) \
506 V(F64x2Qfma, 0xfdfe, s_sss) \
507 V(F64x2Qfms, 0xfdff, s_sss) \
508 V(I16x8AddHoriz, 0xfdaf, s_ss) \
509 V(I32x4AddHoriz, 0xfdb0, s_ss) \
510 V(F32x4AddHoriz, 0xfdb2, s_ss) \
511 V(F32x4RecipApprox, 0xfdb3, s_s) \
512 V(F32x4RecipSqrtApprox, 0xfdbc, s_s)
514 #define FOREACH_SIMD_1_OPERAND_1_PARAM_OPCODE(V) \ argument
515 V(I8x16ExtractLaneS, 0xfd15, _) \
516 V(I8x16ExtractLaneU, 0xfd16, _) \
517 V(I16x8ExtractLaneS, 0xfd18, _) \
518 V(I16x8ExtractLaneU, 0xfd19, _) \
519 V(I32x4ExtractLane, 0xfd1b, _) \
520 V(I64x2ExtractLane, 0xfd1d, _) \
521 V(F32x4ExtractLane, 0xfd1f, _) \
522 V(F64x2ExtractLane, 0xfd21, _)
524 #define FOREACH_SIMD_1_OPERAND_2_PARAM_OPCODE(V) \ argument
525 V(I8x16ReplaceLane, 0xfd17, _) \
526 V(I16x8ReplaceLane, 0xfd1a, _) \
527 V(I32x4ReplaceLane, 0xfd1c, _) \
528 V(I64x2ReplaceLane, 0xfd1e, _) \
529 V(F32x4ReplaceLane, 0xfd20, _) \
530 V(F64x2ReplaceLane, 0xfd22, _)
532 #define FOREACH_SIMD_0_OPERAND_OPCODE(V) \ argument
533 FOREACH_SIMD_MVP_0_OPERAND_OPCODE(V) \
534 FOREACH_SIMD_POST_MVP_OPCODE(V)
536 #define FOREACH_SIMD_1_OPERAND_OPCODE(V) \ argument
537 FOREACH_SIMD_1_OPERAND_1_PARAM_OPCODE(V) \
538 FOREACH_SIMD_1_OPERAND_2_PARAM_OPCODE(V)
540 #define FOREACH_NUMERIC_OPCODE(V) \ argument
541 V(I32SConvertSatF32, 0xfc00, i_f) \
542 V(I32UConvertSatF32, 0xfc01, i_f) \
543 V(I32SConvertSatF64, 0xfc02, i_d) \
544 V(I32UConvertSatF64, 0xfc03, i_d) \
545 V(I64SConvertSatF32, 0xfc04, l_f) \
546 V(I64UConvertSatF32, 0xfc05, l_f) \
547 V(I64SConvertSatF64, 0xfc06, l_d) \
548 V(I64UConvertSatF64, 0xfc07, l_d) \
549 V(MemoryInit, 0xfc08, v_iii) \
550 V(DataDrop, 0xfc09, v_v) \
551 V(MemoryCopy, 0xfc0a, v_iii) \
552 V(MemoryFill, 0xfc0b, v_iii) \
553 V(TableInit, 0xfc0c, v_iii) \
554 V(ElemDrop, 0xfc0d, v_v) \
555 V(TableCopy, 0xfc0e, v_iii) \
558 V(TableGrow, 0xfc0f, i_ci) \
559 V(TableSize, 0xfc10, i_v) \
562 V(TableFill, 0xfc11, v_iii)
564 #define FOREACH_ATOMIC_OPCODE(V) \ argument
565 V(AtomicNotify, 0xfe00, i_ii) \
566 V(I32AtomicWait, 0xfe01, i_iil) \
567 V(I64AtomicWait, 0xfe02, i_ill) \
568 V(I32AtomicLoad, 0xfe10, i_i) \
569 V(I64AtomicLoad, 0xfe11, l_i) \
570 V(I32AtomicLoad8U, 0xfe12, i_i) \
571 V(I32AtomicLoad16U, 0xfe13, i_i) \
572 V(I64AtomicLoad8U, 0xfe14, l_i) \
573 V(I64AtomicLoad16U, 0xfe15, l_i) \
574 V(I64AtomicLoad32U, 0xfe16, l_i) \
575 V(I32AtomicStore, 0xfe17, v_ii) \
576 V(I64AtomicStore, 0xfe18, v_il) \
577 V(I32AtomicStore8U, 0xfe19, v_ii) \
578 V(I32AtomicStore16U, 0xfe1a, v_ii) \
579 V(I64AtomicStore8U, 0xfe1b, v_il) \
580 V(I64AtomicStore16U, 0xfe1c, v_il) \
581 V(I64AtomicStore32U, 0xfe1d, v_il) \
582 V(I32AtomicAdd, 0xfe1e, i_ii) \
583 V(I64AtomicAdd, 0xfe1f, l_il) \
584 V(I32AtomicAdd8U, 0xfe20, i_ii) \
585 V(I32AtomicAdd16U, 0xfe21, i_ii) \
586 V(I64AtomicAdd8U, 0xfe22, l_il) \
587 V(I64AtomicAdd16U, 0xfe23, l_il) \
588 V(I64AtomicAdd32U, 0xfe24, l_il) \
589 V(I32AtomicSub, 0xfe25, i_ii) \
590 V(I64AtomicSub, 0xfe26, l_il) \
591 V(I32AtomicSub8U, 0xfe27, i_ii) \
592 V(I32AtomicSub16U, 0xfe28, i_ii) \
593 V(I64AtomicSub8U, 0xfe29, l_il) \
594 V(I64AtomicSub16U, 0xfe2a, l_il) \
595 V(I64AtomicSub32U, 0xfe2b, l_il) \
596 V(I32AtomicAnd, 0xfe2c, i_ii) \
597 V(I64AtomicAnd, 0xfe2d, l_il) \
598 V(I32AtomicAnd8U, 0xfe2e, i_ii) \
599 V(I32AtomicAnd16U, 0xfe2f, i_ii) \
600 V(I64AtomicAnd8U, 0xfe30, l_il) \
601 V(I64AtomicAnd16U, 0xfe31, l_il) \
602 V(I64AtomicAnd32U, 0xfe32, l_il) \
603 V(I32AtomicOr, 0xfe33, i_ii) \
604 V(I64AtomicOr, 0xfe34, l_il) \
605 V(I32AtomicOr8U, 0xfe35, i_ii) \
606 V(I32AtomicOr16U, 0xfe36, i_ii) \
607 V(I64AtomicOr8U, 0xfe37, l_il) \
608 V(I64AtomicOr16U, 0xfe38, l_il) \
609 V(I64AtomicOr32U, 0xfe39, l_il) \
610 V(I32AtomicXor, 0xfe3a, i_ii) \
611 V(I64AtomicXor, 0xfe3b, l_il) \
612 V(I32AtomicXor8U, 0xfe3c, i_ii) \
613 V(I32AtomicXor16U, 0xfe3d, i_ii) \
614 V(I64AtomicXor8U, 0xfe3e, l_il) \
615 V(I64AtomicXor16U, 0xfe3f, l_il) \
616 V(I64AtomicXor32U, 0xfe40, l_il) \
617 V(I32AtomicExchange, 0xfe41, i_ii) \
618 V(I64AtomicExchange, 0xfe42, l_il) \
619 V(I32AtomicExchange8U, 0xfe43, i_ii) \
620 V(I32AtomicExchange16U, 0xfe44, i_ii) \
621 V(I64AtomicExchange8U, 0xfe45, l_il) \
622 V(I64AtomicExchange16U, 0xfe46, l_il) \
623 V(I64AtomicExchange32U, 0xfe47, l_il) \
624 V(I32AtomicCompareExchange, 0xfe48, i_iii) \
625 V(I64AtomicCompareExchange, 0xfe49, l_ill) \
626 V(I32AtomicCompareExchange8U, 0xfe4a, i_iii) \
627 V(I32AtomicCompareExchange16U, 0xfe4b, i_iii) \
628 V(I64AtomicCompareExchange8U, 0xfe4c, l_ill) \
629 V(I64AtomicCompareExchange16U, 0xfe4d, l_ill) \
630 V(I64AtomicCompareExchange32U, 0xfe4e, l_ill)
632 #define FOREACH_GC_OPCODE(V) \ argument
633 V(StructNewWithRtt, 0xfb01, _) \
634 V(StructNewDefault, 0xfb02, _) \
635 V(StructGet, 0xfb03, _) \
636 V(StructGetS, 0xfb04, _) \
637 V(StructGetU, 0xfb05, _) \
638 V(StructSet, 0xfb06, _) \
639 V(ArrayNewWithRtt, 0xfb11, _) \
640 V(ArrayNewDefault, 0xfb12, _) \
641 V(ArrayGet, 0xfb13, _) \
642 V(ArrayGetS, 0xfb14, _) \
643 V(ArrayGetU, 0xfb15, _) \
644 V(ArraySet, 0xfb16, _) \
645 V(ArrayLen, 0xfb17, _) \
646 V(I31New, 0xfb20, _) \
647 V(I31GetS, 0xfb21, _) \
648 V(I31GetU, 0xfb22, _) \
649 V(RttCanon, 0xfb30, _) \
650 V(RttSub, 0xfb31, _) \
651 V(RefTest, 0xfb40, _) \
652 V(RefCast, 0xfb41, _) \
653 V(BrOnCast, 0xfb42, _)
655 #define FOREACH_ATOMIC_0_OPERAND_OPCODE(V) \ argument
657 V(AtomicFence, 0xfe03, v_v)
660 #define FOREACH_OPCODE(V) \ argument
661 FOREACH_CONTROL_OPCODE(V) \
662 FOREACH_MISC_OPCODE(V) \
663 FOREACH_SIMPLE_OPCODE(V) \
664 FOREACH_SIMPLE_PROTOTYPE_OPCODE(V) \
665 FOREACH_STORE_MEM_OPCODE(V) \
666 FOREACH_LOAD_MEM_OPCODE(V) \
667 FOREACH_MISC_MEM_OPCODE(V) \
668 FOREACH_ASMJS_COMPAT_OPCODE(V) \
669 FOREACH_SIMD_0_OPERAND_OPCODE(V) \
670 FOREACH_SIMD_1_OPERAND_OPCODE(V) \
671 FOREACH_SIMD_MASK_OPERAND_OPCODE(V) \
672 FOREACH_SIMD_MEM_OPCODE(V) \
673 FOREACH_SIMD_POST_MVP_MEM_OPCODE(V) \
674 FOREACH_SIMD_CONST_OPCODE(V) \
675 FOREACH_ATOMIC_OPCODE(V) \
676 FOREACH_ATOMIC_0_OPERAND_OPCODE(V) \
677 FOREACH_NUMERIC_OPCODE(V) \
678 FOREACH_GC_OPCODE(V)
681 #define FOREACH_SIGNATURE(V) \ argument
682 FOREACH_SIMD_SIGNATURE(V) \
683 V(v_v, kWasmStmt) \
684 V(i_ii, kWasmI32, kWasmI32, kWasmI32) \
685 V(i_i, kWasmI32, kWasmI32) \
686 V(i_v, kWasmI32) \
687 V(i_ff, kWasmI32, kWasmF32, kWasmF32) \
688 V(i_f, kWasmI32, kWasmF32) \
689 V(i_dd, kWasmI32, kWasmF64, kWasmF64) \
690 V(i_d, kWasmI32, kWasmF64) \
691 V(i_l, kWasmI32, kWasmI64) \
692 V(l_ll, kWasmI64, kWasmI64, kWasmI64) \
693 V(i_ll, kWasmI32, kWasmI64, kWasmI64) \
694 V(l_l, kWasmI64, kWasmI64) \
695 V(l_i, kWasmI64, kWasmI32) \
696 V(l_f, kWasmI64, kWasmF32) \
697 V(l_d, kWasmI64, kWasmF64) \
698 V(f_ff, kWasmF32, kWasmF32, kWasmF32) \
699 V(f_f, kWasmF32, kWasmF32) \
700 V(f_d, kWasmF32, kWasmF64) \
701 V(f_i, kWasmF32, kWasmI32) \
702 V(f_l, kWasmF32, kWasmI64) \
703 V(d_dd, kWasmF64, kWasmF64, kWasmF64) \
704 V(d_d, kWasmF64, kWasmF64) \
705 V(d_f, kWasmF64, kWasmF32) \
706 V(d_i, kWasmF64, kWasmI32) \
707 V(d_l, kWasmF64, kWasmI64) \
708 V(v_ii, kWasmStmt, kWasmI32, kWasmI32) \
709 V(v_id, kWasmStmt, kWasmI32, kWasmF64) \
710 V(d_id, kWasmF64, kWasmI32, kWasmF64) \
711 V(v_if, kWasmStmt, kWasmI32, kWasmF32) \
712 V(f_if, kWasmF32, kWasmI32, kWasmF32) \
713 V(v_il, kWasmStmt, kWasmI32, kWasmI64) \
714 V(l_il, kWasmI64, kWasmI32, kWasmI64) \
715 V(v_iii, kWasmStmt, kWasmI32, kWasmI32, kWasmI32) \
716 V(i_iii, kWasmI32, kWasmI32, kWasmI32, kWasmI32) \
717 V(l_ill, kWasmI64, kWasmI32, kWasmI64, kWasmI64) \
718 V(i_iil, kWasmI32, kWasmI32, kWasmI32, kWasmI64) \
719 V(i_ill, kWasmI32, kWasmI32, kWasmI64, kWasmI64) \
720 V(i_e, kWasmI32, kWasmExternRef) \
721 V(i_ci, kWasmI32, kWasmFuncRef, kWasmI32) \
722 V(i_qq, kWasmI32, kWasmEqRef, kWasmEqRef)
724 #define FOREACH_SIMD_SIGNATURE(V) \ argument
725 V(s_s, kWasmS128, kWasmS128) \
726 V(s_f, kWasmS128, kWasmF32) \
727 V(s_d, kWasmS128, kWasmF64) \
728 V(s_ss, kWasmS128, kWasmS128, kWasmS128) \
729 V(s_i, kWasmS128, kWasmI32) \
730 V(s_l, kWasmS128, kWasmI64) \
731 V(s_si, kWasmS128, kWasmS128, kWasmI32) \
732 V(i_s, kWasmI32, kWasmS128) \
733 V(v_is, kWasmStmt, kWasmI32, kWasmS128) \
734 V(s_sss, kWasmS128, kWasmS128, kWasmS128, kWasmS128) \
735 V(s_is, kWasmS128, kWasmI32, kWasmS128)
737 #define FOREACH_PREFIX(V) \ argument
738 V(Numeric, 0xfc) \
739 V(Simd, 0xfd) \
740 V(Atomic, 0xfe) \
741 V(GC, 0xfb)
807 WasmInitExpr() : kind_(kNone) { immediate_.i32_const = 0; } in WasmInitExpr()
808 explicit WasmInitExpr(int32_t v) : kind_(kI32Const) { in WasmInitExpr() argument
809 immediate_.i32_const = v; in WasmInitExpr()
811 explicit WasmInitExpr(int64_t v) : kind_(kI64Const) { in WasmInitExpr() argument
812 immediate_.i64_const = v; in WasmInitExpr()
814 explicit WasmInitExpr(float v) : kind_(kF32Const) { in WasmInitExpr() argument
815 immediate_.f32_const = v; in WasmInitExpr()
817 explicit WasmInitExpr(double v) : kind_(kF64Const) { in WasmInitExpr() argument
818 immediate_.f64_const = v; in WasmInitExpr()
820 explicit WasmInitExpr(uint8_t v[kSimd128Size]) : kind_(kS128Const) { in WasmInitExpr()
821 memcpy(immediate_.s128_const.data(), v, kSimd128Size); in WasmInitExpr()