1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_WASM_FUNCTION_BODY_DECODER_IMPL_H_
6 #define V8_WASM_FUNCTION_BODY_DECODER_IMPL_H_
7
8 // Do only include this header for implementing new Interface of the
9 // WasmFullDecoder.
10
11 #include <inttypes.h>
12
13 #include "src/base/platform/elapsed-timer.h"
14 #include "src/base/small-vector.h"
15 #include "src/utils/bit-vector.h"
16 #include "src/wasm/decoder.h"
17 #include "src/wasm/function-body-decoder.h"
18 #include "src/wasm/value-type.h"
19 #include "src/wasm/wasm-features.h"
20 #include "src/wasm/wasm-limits.h"
21 #include "src/wasm/wasm-module.h"
22 #include "src/wasm/wasm-opcodes.h"
23 #include "src/wasm/wasm-subtyping.h"
24
25 namespace v8 {
26 namespace internal {
27 namespace wasm {
28
29 struct WasmGlobal;
30 struct WasmException;
31
32 #define TRACE(...) \
33 do { \
34 if (FLAG_trace_wasm_decoder) PrintF(__VA_ARGS__); \
35 } while (false)
36
37 #define TRACE_INST_FORMAT " @%-8d #%-20s|"
38
39 // Return the evaluation of `condition` if validate==true, DCHECK that it's
40 // true and always return true otherwise.
41 #define VALIDATE(condition) \
42 (validate ? V8_LIKELY(condition) : [&] { \
43 DCHECK(condition); \
44 return true; \
45 }())
46
47 #define CHECK_PROTOTYPE_OPCODE(feat) \
48 DCHECK(this->module_->origin == kWasmOrigin); \
49 if (!VALIDATE(this->enabled_.has_##feat())) { \
50 this->DecodeError( \
51 "Invalid opcode 0x%x (enable with --experimental-wasm-" #feat ")", \
52 opcode); \
53 return 0; \
54 } \
55 this->detected_->Add(kFeature_##feat);
56
57 #define ATOMIC_OP_LIST(V) \
58 V(AtomicNotify, Uint32) \
59 V(I32AtomicWait, Uint32) \
60 V(I64AtomicWait, Uint64) \
61 V(I32AtomicLoad, Uint32) \
62 V(I64AtomicLoad, Uint64) \
63 V(I32AtomicLoad8U, Uint8) \
64 V(I32AtomicLoad16U, Uint16) \
65 V(I64AtomicLoad8U, Uint8) \
66 V(I64AtomicLoad16U, Uint16) \
67 V(I64AtomicLoad32U, Uint32) \
68 V(I32AtomicAdd, Uint32) \
69 V(I32AtomicAdd8U, Uint8) \
70 V(I32AtomicAdd16U, Uint16) \
71 V(I64AtomicAdd, Uint64) \
72 V(I64AtomicAdd8U, Uint8) \
73 V(I64AtomicAdd16U, Uint16) \
74 V(I64AtomicAdd32U, Uint32) \
75 V(I32AtomicSub, Uint32) \
76 V(I64AtomicSub, Uint64) \
77 V(I32AtomicSub8U, Uint8) \
78 V(I32AtomicSub16U, Uint16) \
79 V(I64AtomicSub8U, Uint8) \
80 V(I64AtomicSub16U, Uint16) \
81 V(I64AtomicSub32U, Uint32) \
82 V(I32AtomicAnd, Uint32) \
83 V(I64AtomicAnd, Uint64) \
84 V(I32AtomicAnd8U, Uint8) \
85 V(I32AtomicAnd16U, Uint16) \
86 V(I64AtomicAnd8U, Uint8) \
87 V(I64AtomicAnd16U, Uint16) \
88 V(I64AtomicAnd32U, Uint32) \
89 V(I32AtomicOr, Uint32) \
90 V(I64AtomicOr, Uint64) \
91 V(I32AtomicOr8U, Uint8) \
92 V(I32AtomicOr16U, Uint16) \
93 V(I64AtomicOr8U, Uint8) \
94 V(I64AtomicOr16U, Uint16) \
95 V(I64AtomicOr32U, Uint32) \
96 V(I32AtomicXor, Uint32) \
97 V(I64AtomicXor, Uint64) \
98 V(I32AtomicXor8U, Uint8) \
99 V(I32AtomicXor16U, Uint16) \
100 V(I64AtomicXor8U, Uint8) \
101 V(I64AtomicXor16U, Uint16) \
102 V(I64AtomicXor32U, Uint32) \
103 V(I32AtomicExchange, Uint32) \
104 V(I64AtomicExchange, Uint64) \
105 V(I32AtomicExchange8U, Uint8) \
106 V(I32AtomicExchange16U, Uint16) \
107 V(I64AtomicExchange8U, Uint8) \
108 V(I64AtomicExchange16U, Uint16) \
109 V(I64AtomicExchange32U, Uint32) \
110 V(I32AtomicCompareExchange, Uint32) \
111 V(I64AtomicCompareExchange, Uint64) \
112 V(I32AtomicCompareExchange8U, Uint8) \
113 V(I32AtomicCompareExchange16U, Uint16) \
114 V(I64AtomicCompareExchange8U, Uint8) \
115 V(I64AtomicCompareExchange16U, Uint16) \
116 V(I64AtomicCompareExchange32U, Uint32)
117
118 #define ATOMIC_STORE_OP_LIST(V) \
119 V(I32AtomicStore, Uint32) \
120 V(I64AtomicStore, Uint64) \
121 V(I32AtomicStore8U, Uint8) \
122 V(I32AtomicStore16U, Uint16) \
123 V(I64AtomicStore8U, Uint8) \
124 V(I64AtomicStore16U, Uint16) \
125 V(I64AtomicStore32U, Uint32)
126
127 // Decoder error with explicit PC and format arguments.
128 template <Decoder::ValidateFlag validate, typename... Args>
DecodeError(Decoder * decoder,const byte * pc,const char * str,Args &&...args)129 void DecodeError(Decoder* decoder, const byte* pc, const char* str,
130 Args&&... args) {
131 CHECK(validate == Decoder::kFullValidation ||
132 validate == Decoder::kBooleanValidation);
133 STATIC_ASSERT(sizeof...(Args) > 0);
134 if (validate == Decoder::kBooleanValidation) {
135 decoder->MarkError();
136 } else {
137 decoder->errorf(pc, str, std::forward<Args>(args)...);
138 }
139 }
140
141 // Decoder error with explicit PC and no format arguments.
142 template <Decoder::ValidateFlag validate>
DecodeError(Decoder * decoder,const byte * pc,const char * str)143 void DecodeError(Decoder* decoder, const byte* pc, const char* str) {
144 CHECK(validate == Decoder::kFullValidation ||
145 validate == Decoder::kBooleanValidation);
146 if (validate == Decoder::kBooleanValidation) {
147 decoder->MarkError();
148 } else {
149 decoder->error(pc, str);
150 }
151 }
152
153 // Decoder error without explicit PC, but with format arguments.
154 template <Decoder::ValidateFlag validate, typename... Args>
DecodeError(Decoder * decoder,const char * str,Args &&...args)155 void DecodeError(Decoder* decoder, const char* str, Args&&... args) {
156 CHECK(validate == Decoder::kFullValidation ||
157 validate == Decoder::kBooleanValidation);
158 STATIC_ASSERT(sizeof...(Args) > 0);
159 if (validate == Decoder::kBooleanValidation) {
160 decoder->MarkError();
161 } else {
162 decoder->errorf(str, std::forward<Args>(args)...);
163 }
164 }
165
166 // Decoder error without explicit PC and without format arguments.
167 template <Decoder::ValidateFlag validate>
DecodeError(Decoder * decoder,const char * str)168 void DecodeError(Decoder* decoder, const char* str) {
169 CHECK(validate == Decoder::kFullValidation ||
170 validate == Decoder::kBooleanValidation);
171 if (validate == Decoder::kBooleanValidation) {
172 decoder->MarkError();
173 } else {
174 decoder->error(str);
175 }
176 }
177
178 namespace value_type_reader {
179
feature_for_heap_type(HeapType heap_type)180 V8_INLINE WasmFeature feature_for_heap_type(HeapType heap_type) {
181 switch (heap_type.representation()) {
182 case HeapType::kFunc:
183 case HeapType::kExtern:
184 return WasmFeature::kFeature_reftypes;
185 case HeapType::kExn:
186 return WasmFeature::kFeature_eh;
187 case HeapType::kEq:
188 case HeapType::kI31:
189 return WasmFeature::kFeature_gc;
190 default:
191 UNREACHABLE();
192 }
193 }
194
195 template <Decoder::ValidateFlag validate>
read_heap_type(Decoder * decoder,const byte * pc,uint32_t * const length,const WasmFeatures & enabled)196 HeapType read_heap_type(Decoder* decoder, const byte* pc,
197 uint32_t* const length, const WasmFeatures& enabled) {
198 int64_t heap_index = decoder->read_i33v<validate>(pc, length, "heap type");
199 if (heap_index < 0) {
200 int64_t min_1_byte_leb128 = -64;
201 if (heap_index < min_1_byte_leb128) {
202 DecodeError<validate>(decoder, pc, "Unknown heap type %" PRId64,
203 heap_index);
204 return HeapType(HeapType::kBottom);
205 }
206 uint8_t uint_7_mask = 0x7F;
207 uint8_t code = static_cast<ValueTypeCode>(heap_index) & uint_7_mask;
208 switch (code) {
209 case kFuncRefCode:
210 case kExnRefCode:
211 case kEqRefCode:
212 case kExternRefCode:
213 case kI31RefCode: {
214 HeapType result = HeapType::from_code(code);
215 if (!VALIDATE(enabled.contains(feature_for_heap_type(result)))) {
216 DecodeError<validate>(
217 decoder, pc,
218 "invalid heap type '%s', enable with --experimental-wasm-%s",
219 result.name().c_str(),
220 WasmFeatures::name_for_feature(feature_for_heap_type(result)));
221 return HeapType(HeapType::kBottom);
222 }
223 return result;
224 }
225 default:
226 DecodeError<validate>(decoder, pc, "Unknown heap type %" PRId64,
227 heap_index);
228 return HeapType(HeapType::kBottom);
229 }
230 UNREACHABLE();
231 } else {
232 if (!VALIDATE(enabled.has_typed_funcref())) {
233 DecodeError<validate>(decoder, pc,
234 "Invalid indexed heap type, enable with "
235 "--experimental-wasm-typed-funcref");
236 return HeapType(HeapType::kBottom);
237 }
238 uint32_t type_index = static_cast<uint32_t>(heap_index);
239 if (!VALIDATE(type_index < kV8MaxWasmTypes)) {
240 DecodeError<validate>(
241 decoder, pc,
242 "Type index %u is greater than the maximum number %zu "
243 "of type definitions supported by V8",
244 type_index, kV8MaxWasmTypes);
245 return HeapType(HeapType::kBottom);
246 }
247 return HeapType(type_index);
248 }
249 }
250
251 // Read a value type starting at address 'pc' in 'decoder'.
252 // No bytes are consumed. The result is written into the 'result' parameter.
253 // Returns the amount of bytes read, or 0 if decoding failed.
254 // Registers an error if the type opcode is invalid iff validate is set.
255 template <Decoder::ValidateFlag validate>
read_value_type(Decoder * decoder,const byte * pc,uint32_t * const length,const WasmFeatures & enabled)256 ValueType read_value_type(Decoder* decoder, const byte* pc,
257 uint32_t* const length, const WasmFeatures& enabled) {
258 *length = 1;
259 byte val = decoder->read_u8<validate>(pc, "value type opcode");
260 if (decoder->failed()) {
261 return kWasmBottom;
262 }
263 ValueTypeCode code = static_cast<ValueTypeCode>(val);
264 switch (code) {
265 case kFuncRefCode:
266 case kExnRefCode:
267 case kEqRefCode:
268 case kExternRefCode:
269 case kI31RefCode: {
270 HeapType heap_type = HeapType::from_code(code);
271 ValueType result = ValueType::Ref(
272 heap_type, code == kI31RefCode ? kNonNullable : kNullable);
273 if (!VALIDATE(enabled.contains(feature_for_heap_type(heap_type)))) {
274 DecodeError<validate>(
275 decoder, pc,
276 "invalid value type '%s', enable with --experimental-wasm-%s",
277 result.name().c_str(),
278 WasmFeatures::name_for_feature(feature_for_heap_type(heap_type)));
279 return kWasmBottom;
280 }
281 return result;
282 }
283 case kI32Code:
284 return kWasmI32;
285 case kI64Code:
286 return kWasmI64;
287 case kF32Code:
288 return kWasmF32;
289 case kF64Code:
290 return kWasmF64;
291 case kRefCode:
292 case kOptRefCode: {
293 Nullability nullability = code == kOptRefCode ? kNullable : kNonNullable;
294 if (!VALIDATE(enabled.has_typed_funcref())) {
295 DecodeError<validate>(decoder, pc,
296 "Invalid type '(ref%s <heaptype>)', enable with "
297 "--experimental-wasm-typed-funcref",
298 nullability == kNullable ? " null" : "");
299 return kWasmBottom;
300 }
301 HeapType heap_type =
302 read_heap_type<validate>(decoder, pc + 1, length, enabled);
303 *length += 1;
304 return heap_type.is_bottom() ? kWasmBottom
305 : ValueType::Ref(heap_type, nullability);
306 }
307 case kRttCode: {
308 if (!VALIDATE(enabled.has_gc())) {
309 DecodeError<validate>(
310 decoder, pc,
311 "invalid value type 'rtt', enable with --experimental-wasm-gc");
312 return kWasmBottom;
313 }
314 uint32_t depth_length;
315 uint32_t depth =
316 decoder->read_u32v<validate>(pc + 1, &depth_length, "depth");
317 if (!VALIDATE(depth <= kV8MaxRttSubtypingDepth)) {
318 DecodeError<validate>(
319 decoder, pc,
320 "subtyping depth %u is greater than the maximum depth "
321 "%u supported by V8",
322 depth, kV8MaxRttSubtypingDepth);
323 return kWasmBottom;
324 }
325 HeapType heap_type = read_heap_type<validate>(
326 decoder, pc + depth_length + 1, length, enabled);
327 *length += depth_length + 1;
328 return heap_type.is_bottom() ? kWasmBottom
329 : ValueType::Rtt(heap_type, depth);
330 }
331 case kS128Code: {
332 if (!VALIDATE(enabled.has_simd())) {
333 DecodeError<validate>(
334 decoder, pc,
335 "invalid value type 's128', enable with --experimental-wasm-simd");
336 return kWasmBottom;
337 }
338 return kWasmS128;
339 }
340 // Although these codes are included in ValueTypeCode, they technically
341 // do not correspond to value types and are only used in specific
342 // contexts. The caller of this function is responsible for handling them.
343 case kVoidCode:
344 case kI8Code:
345 case kI16Code:
346 return kWasmBottom;
347 }
348 // Anything that doesn't match an enumeration value is an invalid type code.
349 return kWasmBottom;
350 }
351 } // namespace value_type_reader
352
353 // Helpers for decoding different kinds of immediates which follow bytecodes.
354 template <Decoder::ValidateFlag validate>
355 struct LocalIndexImmediate {
356 uint32_t index;
357 uint32_t length;
358
LocalIndexImmediateLocalIndexImmediate359 inline LocalIndexImmediate(Decoder* decoder, const byte* pc) {
360 index = decoder->read_u32v<validate>(pc, &length, "local index");
361 }
362 };
363
364 template <Decoder::ValidateFlag validate>
365 struct ExceptionIndexImmediate {
366 uint32_t index;
367 const WasmException* exception = nullptr;
368 uint32_t length;
369
ExceptionIndexImmediateExceptionIndexImmediate370 inline ExceptionIndexImmediate(Decoder* decoder, const byte* pc) {
371 index = decoder->read_u32v<validate>(pc, &length, "exception index");
372 }
373 };
374
375 template <Decoder::ValidateFlag validate>
376 struct ImmI32Immediate {
377 int32_t value;
378 uint32_t length;
ImmI32ImmediateImmI32Immediate379 inline ImmI32Immediate(Decoder* decoder, const byte* pc) {
380 value = decoder->read_i32v<validate>(pc, &length, "immi32");
381 }
382 };
383
384 template <Decoder::ValidateFlag validate>
385 struct ImmI64Immediate {
386 int64_t value;
387 uint32_t length;
ImmI64ImmediateImmI64Immediate388 inline ImmI64Immediate(Decoder* decoder, const byte* pc) {
389 value = decoder->read_i64v<validate>(pc, &length, "immi64");
390 }
391 };
392
393 template <Decoder::ValidateFlag validate>
394 struct ImmF32Immediate {
395 float value;
396 uint32_t length = 4;
ImmF32ImmediateImmF32Immediate397 inline ImmF32Immediate(Decoder* decoder, const byte* pc) {
398 // We can't use bit_cast here because calling any helper function that
399 // returns a float would potentially flip NaN bits per C++ semantics, so we
400 // have to inline the memcpy call directly.
401 uint32_t tmp = decoder->read_u32<validate>(pc, "immf32");
402 memcpy(&value, &tmp, sizeof(value));
403 }
404 };
405
406 template <Decoder::ValidateFlag validate>
407 struct ImmF64Immediate {
408 double value;
409 uint32_t length = 8;
ImmF64ImmediateImmF64Immediate410 inline ImmF64Immediate(Decoder* decoder, const byte* pc) {
411 // Avoid bit_cast because it might not preserve the signalling bit of a NaN.
412 uint64_t tmp = decoder->read_u64<validate>(pc, "immf64");
413 memcpy(&value, &tmp, sizeof(value));
414 }
415 };
416
417 template <Decoder::ValidateFlag validate>
418 struct GlobalIndexImmediate {
419 uint32_t index;
420 ValueType type = kWasmStmt;
421 const WasmGlobal* global = nullptr;
422 uint32_t length;
423
GlobalIndexImmediateGlobalIndexImmediate424 inline GlobalIndexImmediate(Decoder* decoder, const byte* pc) {
425 index = decoder->read_u32v<validate>(pc, &length, "global index");
426 }
427 };
428
429 template <Decoder::ValidateFlag validate>
430 struct SelectTypeImmediate {
431 uint32_t length;
432 ValueType type;
433
SelectTypeImmediateSelectTypeImmediate434 inline SelectTypeImmediate(const WasmFeatures& enabled, Decoder* decoder,
435 const byte* pc) {
436 uint8_t num_types =
437 decoder->read_u32v<validate>(pc, &length, "number of select types");
438 if (!VALIDATE(num_types == 1)) {
439 DecodeError<validate>(
440 decoder, pc + 1,
441 "Invalid number of types. Select accepts exactly one type");
442 return;
443 }
444 uint32_t type_length;
445 type = value_type_reader::read_value_type<validate>(decoder, pc + length,
446 &type_length, enabled);
447 length += type_length;
448 if (!VALIDATE(type != kWasmBottom)) {
449 DecodeError<validate>(decoder, pc + 1, "invalid select type");
450 }
451 }
452 };
453
454 template <Decoder::ValidateFlag validate>
455 struct BlockTypeImmediate {
456 uint32_t length = 1;
457 ValueType type = kWasmStmt;
458 uint32_t sig_index = 0;
459 const FunctionSig* sig = nullptr;
460
BlockTypeImmediateBlockTypeImmediate461 inline BlockTypeImmediate(const WasmFeatures& enabled, Decoder* decoder,
462 const byte* pc) {
463 int64_t block_type =
464 decoder->read_i33v<validate>(pc, &length, "block type");
465 if (block_type < 0) {
466 constexpr int64_t kVoidCode_i64_extended = (~int64_t{0x7F}) | kVoidCode;
467 if (block_type == kVoidCode_i64_extended) return;
468 type = value_type_reader::read_value_type<validate>(decoder, pc, &length,
469 enabled);
470 if (!VALIDATE(type != kWasmBottom)) {
471 DecodeError<validate>(decoder, pc, "Invalid block type %" PRId64,
472 block_type);
473 }
474 } else {
475 if (!VALIDATE(enabled.has_mv())) {
476 DecodeError<validate>(decoder, pc,
477 "invalid block type %" PRId64
478 ", enable with --experimental-wasm-mv",
479 block_type);
480 return;
481 }
482 type = kWasmBottom;
483 sig_index = static_cast<uint32_t>(block_type);
484 }
485 }
486
in_arityBlockTypeImmediate487 uint32_t in_arity() const {
488 if (type != kWasmBottom) return 0;
489 return static_cast<uint32_t>(sig->parameter_count());
490 }
out_arityBlockTypeImmediate491 uint32_t out_arity() const {
492 if (type == kWasmStmt) return 0;
493 if (type != kWasmBottom) return 1;
494 return static_cast<uint32_t>(sig->return_count());
495 }
in_typeBlockTypeImmediate496 ValueType in_type(uint32_t index) {
497 DCHECK_EQ(kWasmBottom, type);
498 return sig->GetParam(index);
499 }
out_typeBlockTypeImmediate500 ValueType out_type(uint32_t index) {
501 if (type == kWasmBottom) return sig->GetReturn(index);
502 DCHECK_NE(kWasmStmt, type);
503 DCHECK_EQ(0, index);
504 return type;
505 }
506 };
507
508 template <Decoder::ValidateFlag validate>
509 struct BranchDepthImmediate {
510 uint32_t depth;
511 uint32_t length;
BranchDepthImmediateBranchDepthImmediate512 inline BranchDepthImmediate(Decoder* decoder, const byte* pc) {
513 depth = decoder->read_u32v<validate>(pc, &length, "branch depth");
514 }
515 };
516
517 template <Decoder::ValidateFlag validate>
518 struct BranchOnExceptionImmediate {
519 BranchDepthImmediate<validate> depth;
520 ExceptionIndexImmediate<validate> index;
521 uint32_t length = 0;
BranchOnExceptionImmediateBranchOnExceptionImmediate522 inline BranchOnExceptionImmediate(Decoder* decoder, const byte* pc)
523 : depth(BranchDepthImmediate<validate>(decoder, pc)),
524 index(ExceptionIndexImmediate<validate>(decoder, pc + depth.length)) {
525 length = depth.length + index.length;
526 }
527 };
528
529 template <Decoder::ValidateFlag validate>
530 struct FunctionIndexImmediate {
531 uint32_t index = 0;
532 uint32_t length = 1;
FunctionIndexImmediateFunctionIndexImmediate533 inline FunctionIndexImmediate(Decoder* decoder, const byte* pc) {
534 index = decoder->read_u32v<validate>(pc, &length, "function index");
535 }
536 };
537
538 template <Decoder::ValidateFlag validate>
539 struct MemoryIndexImmediate {
540 uint32_t index = 0;
541 uint32_t length = 1;
542 inline MemoryIndexImmediate() = default;
MemoryIndexImmediateMemoryIndexImmediate543 inline MemoryIndexImmediate(Decoder* decoder, const byte* pc) {
544 index = decoder->read_u8<validate>(pc, "memory index");
545 if (!VALIDATE(index == 0)) {
546 DecodeError<validate>(decoder, pc, "expected memory index 0, found %u",
547 index);
548 }
549 }
550 };
551
552 template <Decoder::ValidateFlag validate>
553 struct TableIndexImmediate {
554 uint32_t index = 0;
555 uint32_t length = 1;
556 inline TableIndexImmediate() = default;
TableIndexImmediateTableIndexImmediate557 inline TableIndexImmediate(Decoder* decoder, const byte* pc) {
558 index = decoder->read_u32v<validate>(pc, &length, "table index");
559 }
560 };
561
562 // TODO(jkummerow): Introduce a common superclass for StructIndexImmediate and
563 // ArrayIndexImmediate? Maybe even FunctionIndexImmediate too?
564 template <Decoder::ValidateFlag validate>
565 struct StructIndexImmediate {
566 uint32_t index = 0;
567 uint32_t length = 0;
568 const StructType* struct_type = nullptr;
StructIndexImmediateStructIndexImmediate569 inline StructIndexImmediate(Decoder* decoder, const byte* pc) {
570 index = decoder->read_u32v<validate>(pc, &length, "struct index");
571 }
572 };
573
574 template <Decoder::ValidateFlag validate>
575 struct FieldIndexImmediate {
576 StructIndexImmediate<validate> struct_index;
577 uint32_t index = 0;
578 uint32_t length = 0;
FieldIndexImmediateFieldIndexImmediate579 inline FieldIndexImmediate(Decoder* decoder, const byte* pc)
580 : struct_index(decoder, pc) {
581 index = decoder->read_u32v<validate>(pc + struct_index.length, &length,
582 "field index");
583 length += struct_index.length;
584 }
585 };
586
587 template <Decoder::ValidateFlag validate>
588 struct ArrayIndexImmediate {
589 uint32_t index = 0;
590 uint32_t length = 0;
591 const ArrayType* array_type = nullptr;
ArrayIndexImmediateArrayIndexImmediate592 inline ArrayIndexImmediate(Decoder* decoder, const byte* pc) {
593 index = decoder->read_u32v<validate>(pc, &length, "array index");
594 }
595 };
596
597 template <Decoder::ValidateFlag validate>
598 struct CallIndirectImmediate {
599 uint32_t table_index;
600 uint32_t sig_index;
601 const FunctionSig* sig = nullptr;
602 uint32_t length = 0;
CallIndirectImmediateCallIndirectImmediate603 inline CallIndirectImmediate(const WasmFeatures enabled, Decoder* decoder,
604 const byte* pc) {
605 uint32_t len = 0;
606 sig_index = decoder->read_u32v<validate>(pc, &len, "signature index");
607 TableIndexImmediate<validate> table(decoder, pc + len);
608 if (!VALIDATE((table.index == 0 && table.length == 1) ||
609 enabled.has_reftypes())) {
610 DecodeError<validate>(decoder, pc + len,
611 "expected table index 0, found %u", table.index);
612 }
613 table_index = table.index;
614 length = len + table.length;
615 }
616 };
617
618 template <Decoder::ValidateFlag validate>
619 struct CallFunctionImmediate {
620 uint32_t index;
621 const FunctionSig* sig = nullptr;
622 uint32_t length;
CallFunctionImmediateCallFunctionImmediate623 inline CallFunctionImmediate(Decoder* decoder, const byte* pc) {
624 index = decoder->read_u32v<validate>(pc, &length, "function index");
625 }
626 };
627
628 template <Decoder::ValidateFlag validate>
629 struct BranchTableImmediate {
630 uint32_t table_count;
631 const byte* start;
632 const byte* table;
BranchTableImmediateBranchTableImmediate633 inline BranchTableImmediate(Decoder* decoder, const byte* pc) {
634 start = pc;
635 uint32_t len = 0;
636 table_count = decoder->read_u32v<validate>(pc, &len, "table count");
637 table = pc + len;
638 }
639 };
640
641 // A helper to iterate over a branch table.
642 template <Decoder::ValidateFlag validate>
643 class BranchTableIterator {
644 public:
cur_index()645 uint32_t cur_index() { return index_; }
has_next()646 bool has_next() { return VALIDATE(decoder_->ok()) && index_ <= table_count_; }
next()647 uint32_t next() {
648 DCHECK(has_next());
649 index_++;
650 uint32_t length;
651 uint32_t result =
652 decoder_->read_u32v<validate>(pc_, &length, "branch table entry");
653 pc_ += length;
654 return result;
655 }
656 // length, including the length of the {BranchTableImmediate}, but not the
657 // opcode.
length()658 uint32_t length() {
659 while (has_next()) next();
660 return static_cast<uint32_t>(pc_ - start_);
661 }
pc()662 const byte* pc() { return pc_; }
663
BranchTableIterator(Decoder * decoder,const BranchTableImmediate<validate> & imm)664 BranchTableIterator(Decoder* decoder,
665 const BranchTableImmediate<validate>& imm)
666 : decoder_(decoder),
667 start_(imm.start),
668 pc_(imm.table),
669 table_count_(imm.table_count) {}
670
671 private:
672 Decoder* const decoder_;
673 const byte* start_;
674 const byte* pc_;
675 uint32_t index_ = 0; // the current index.
676 const uint32_t table_count_; // the count of entries, not including default.
677 };
678
679 template <Decoder::ValidateFlag validate>
680 struct MemoryAccessImmediate {
681 uint32_t alignment;
682 uint32_t offset;
683 uint32_t length = 0;
MemoryAccessImmediateMemoryAccessImmediate684 inline MemoryAccessImmediate(Decoder* decoder, const byte* pc,
685 uint32_t max_alignment) {
686 uint32_t alignment_length;
687 alignment =
688 decoder->read_u32v<validate>(pc, &alignment_length, "alignment");
689 if (!VALIDATE(alignment <= max_alignment)) {
690 DecodeError<validate>(
691 decoder, pc,
692 "invalid alignment; expected maximum alignment is %u, "
693 "actual alignment is %u",
694 max_alignment, alignment);
695 }
696 uint32_t offset_length;
697 offset = decoder->read_u32v<validate>(pc + alignment_length, &offset_length,
698 "offset");
699 length = alignment_length + offset_length;
700 }
701 };
702
703 // Immediate for SIMD lane operations.
704 template <Decoder::ValidateFlag validate>
705 struct SimdLaneImmediate {
706 uint8_t lane;
707 uint32_t length = 1;
708
SimdLaneImmediateSimdLaneImmediate709 inline SimdLaneImmediate(Decoder* decoder, const byte* pc) {
710 lane = decoder->read_u8<validate>(pc, "lane");
711 }
712 };
713
714 // Immediate for SIMD S8x16 shuffle operations.
715 template <Decoder::ValidateFlag validate>
716 struct Simd128Immediate {
717 uint8_t value[kSimd128Size] = {0};
718
Simd128ImmediateSimd128Immediate719 inline Simd128Immediate(Decoder* decoder, const byte* pc) {
720 for (uint32_t i = 0; i < kSimd128Size; ++i) {
721 value[i] = decoder->read_u8<validate>(pc + i, "value");
722 }
723 }
724 };
725
726 template <Decoder::ValidateFlag validate>
727 struct MemoryInitImmediate {
728 uint32_t data_segment_index = 0;
729 MemoryIndexImmediate<validate> memory;
730 unsigned length = 0;
731
MemoryInitImmediateMemoryInitImmediate732 inline MemoryInitImmediate(Decoder* decoder, const byte* pc) {
733 uint32_t len = 0;
734 data_segment_index =
735 decoder->read_u32v<validate>(pc, &len, "data segment index");
736 memory = MemoryIndexImmediate<validate>(decoder, pc + len);
737 length = len + memory.length;
738 }
739 };
740
741 template <Decoder::ValidateFlag validate>
742 struct DataDropImmediate {
743 uint32_t index;
744 unsigned length;
745
DataDropImmediateDataDropImmediate746 inline DataDropImmediate(Decoder* decoder, const byte* pc) {
747 index = decoder->read_u32v<validate>(pc, &length, "data segment index");
748 }
749 };
750
751 template <Decoder::ValidateFlag validate>
752 struct MemoryCopyImmediate {
753 MemoryIndexImmediate<validate> memory_src;
754 MemoryIndexImmediate<validate> memory_dst;
755 unsigned length = 0;
756
MemoryCopyImmediateMemoryCopyImmediate757 inline MemoryCopyImmediate(Decoder* decoder, const byte* pc) {
758 memory_src = MemoryIndexImmediate<validate>(decoder, pc);
759 memory_dst =
760 MemoryIndexImmediate<validate>(decoder, pc + memory_src.length);
761 length = memory_src.length + memory_dst.length;
762 }
763 };
764
765 template <Decoder::ValidateFlag validate>
766 struct TableInitImmediate {
767 uint32_t elem_segment_index = 0;
768 TableIndexImmediate<validate> table;
769 unsigned length = 0;
770
TableInitImmediateTableInitImmediate771 inline TableInitImmediate(Decoder* decoder, const byte* pc) {
772 uint32_t len = 0;
773 elem_segment_index =
774 decoder->read_u32v<validate>(pc, &len, "elem segment index");
775 table = TableIndexImmediate<validate>(decoder, pc + len);
776 length = len + table.length;
777 }
778 };
779
780 template <Decoder::ValidateFlag validate>
781 struct ElemDropImmediate {
782 uint32_t index;
783 unsigned length;
784
ElemDropImmediateElemDropImmediate785 inline ElemDropImmediate(Decoder* decoder, const byte* pc) {
786 index = decoder->read_u32v<validate>(pc, &length, "elem segment index");
787 }
788 };
789
790 template <Decoder::ValidateFlag validate>
791 struct TableCopyImmediate {
792 TableIndexImmediate<validate> table_dst;
793 TableIndexImmediate<validate> table_src;
794 unsigned length = 0;
795
TableCopyImmediateTableCopyImmediate796 inline TableCopyImmediate(Decoder* decoder, const byte* pc) {
797 table_dst = TableIndexImmediate<validate>(decoder, pc);
798 table_src = TableIndexImmediate<validate>(decoder, pc + table_dst.length);
799 length = table_src.length + table_dst.length;
800 }
801 };
802
803 template <Decoder::ValidateFlag validate>
804 struct HeapTypeImmediate {
805 uint32_t length = 1;
806 HeapType type = HeapType(HeapType::kBottom);
HeapTypeImmediateHeapTypeImmediate807 inline HeapTypeImmediate(const WasmFeatures& enabled, Decoder* decoder,
808 const byte* pc) {
809 type = value_type_reader::read_heap_type<validate>(decoder, pc, &length,
810 enabled);
811 }
812 };
813
814 template <Decoder::ValidateFlag validate>
815 struct PcForErrors {
PcForErrorsPcForErrors816 PcForErrors(const byte* /* pc */) {}
817
pcPcForErrors818 const byte* pc() const { return nullptr; }
819 };
820
821 template <>
822 struct PcForErrors<Decoder::kFullValidation> {
823 const byte* pc_for_errors = nullptr;
824
825 PcForErrors(const byte* pc) : pc_for_errors(pc) {}
826
827 const byte* pc() const { return pc_for_errors; }
828 };
829
830 // An entry on the value stack.
831 template <Decoder::ValidateFlag validate>
832 struct ValueBase : public PcForErrors<validate> {
833 ValueType type = kWasmStmt;
834
835 ValueBase(const byte* pc, ValueType type)
836 : PcForErrors<validate>(pc), type(type) {}
837 };
838
839 template <typename Value>
840 struct Merge {
841 uint32_t arity = 0;
842 union { // Either multiple values or a single value.
843 Value* array;
844 Value first;
845 } vals = {nullptr}; // Initialize {array} with {nullptr}.
846
847 // Tracks whether this merge was ever reached. Uses precise reachability, like
848 // Reachability::kReachable.
849 bool reached;
850
851 explicit Merge(bool reached = false) : reached(reached) {}
852
853 Value& operator[](uint32_t i) {
854 DCHECK_GT(arity, i);
855 return arity == 1 ? vals.first : vals.array[i];
856 }
857 };
858
859 enum ControlKind : uint8_t {
860 kControlIf,
861 kControlIfElse,
862 kControlBlock,
863 kControlLoop,
864 kControlLet,
865 kControlTry,
866 kControlTryCatch
867 };
868
869 enum Reachability : uint8_t {
870 // reachable code.
871 kReachable,
872 // reachable code in unreachable block (implies normal validation).
873 kSpecOnlyReachable,
874 // code unreachable in its own block (implies polymorphic validation).
875 kUnreachable
876 };
877
878 // An entry on the control stack (i.e. if, block, loop, or try).
879 template <typename Value, Decoder::ValidateFlag validate>
880 struct ControlBase : public PcForErrors<validate> {
881 ControlKind kind = kControlBlock;
882 uint32_t locals_count = 0;
883 uint32_t stack_depth = 0; // stack height at the beginning of the construct.
884 Reachability reachability = kReachable;
885
886 // Values merged into the start or end of this control construct.
887 Merge<Value> start_merge;
888 Merge<Value> end_merge;
889
890 MOVE_ONLY_NO_DEFAULT_CONSTRUCTOR(ControlBase);
891
892 ControlBase(ControlKind kind, uint32_t locals_count, uint32_t stack_depth,
893 const uint8_t* pc, Reachability reachability)
894 : PcForErrors<validate>(pc),
895 kind(kind),
896 locals_count(locals_count),
897 stack_depth(stack_depth),
898 reachability(reachability),
899 start_merge(reachability == kReachable) {
900 DCHECK(kind == kControlLet || locals_count == 0);
901 }
902
903 // Check whether the current block is reachable.
904 bool reachable() const { return reachability == kReachable; }
905
906 // Check whether the rest of the block is unreachable.
907 // Note that this is different from {!reachable()}, as there is also the
908 // "indirect unreachable state", for which both {reachable()} and
909 // {unreachable()} return false.
910 bool unreachable() const { return reachability == kUnreachable; }
911
912 // Return the reachability of new control structs started in this block.
913 Reachability innerReachability() const {
914 return reachability == kReachable ? kReachable : kSpecOnlyReachable;
915 }
916
917 bool is_if() const { return is_onearmed_if() || is_if_else(); }
918 bool is_onearmed_if() const { return kind == kControlIf; }
919 bool is_if_else() const { return kind == kControlIfElse; }
920 bool is_block() const { return kind == kControlBlock; }
921 bool is_let() const { return kind == kControlLet; }
922 bool is_loop() const { return kind == kControlLoop; }
923 bool is_incomplete_try() const { return kind == kControlTry; }
924 bool is_try_catch() const { return kind == kControlTryCatch; }
925 bool is_try() const { return is_incomplete_try() || is_try_catch(); }
926
927 inline Merge<Value>* br_merge() {
928 return is_loop() ? &this->start_merge : &this->end_merge;
929 }
930 };
931
932 // This is the list of callback functions that an interface for the
933 // WasmFullDecoder should implement.
934 // F(Name, args...)
935 #define INTERFACE_FUNCTIONS(F) \
936 /* General: */ \
937 F(StartFunction) \
938 F(StartFunctionBody, Control* block) \
939 F(FinishFunction) \
940 F(OnFirstError) \
941 F(NextInstruction, WasmOpcode) \
942 /* Control: */ \
943 F(Block, Control* block) \
944 F(Loop, Control* block) \
945 F(Try, Control* block) \
946 F(Catch, Control* block, Value* exception) \
947 F(If, const Value& cond, Control* if_block) \
948 F(FallThruTo, Control* c) \
949 F(PopControl, Control* block) \
950 F(EndControl, Control* block) \
951 /* Instructions: */ \
952 F(UnOp, WasmOpcode opcode, const Value& value, Value* result) \
953 F(BinOp, WasmOpcode opcode, const Value& lhs, const Value& rhs, \
954 Value* result) \
955 F(I32Const, Value* result, int32_t value) \
956 F(I64Const, Value* result, int64_t value) \
957 F(F32Const, Value* result, float value) \
958 F(F64Const, Value* result, double value) \
959 F(RefNull, ValueType type, Value* result) \
960 F(RefFunc, uint32_t function_index, Value* result) \
961 F(RefAsNonNull, const Value& arg, Value* result) \
962 F(Drop, const Value& value) \
963 F(DoReturn, Vector<Value> values) \
964 F(LocalGet, Value* result, const LocalIndexImmediate<validate>& imm) \
965 F(LocalSet, const Value& value, const LocalIndexImmediate<validate>& imm) \
966 F(LocalTee, const Value& value, Value* result, \
967 const LocalIndexImmediate<validate>& imm) \
968 F(AllocateLocals, Vector<Value> local_values) \
969 F(DeallocateLocals, uint32_t count) \
970 F(GlobalGet, Value* result, const GlobalIndexImmediate<validate>& imm) \
971 F(GlobalSet, const Value& value, const GlobalIndexImmediate<validate>& imm) \
972 F(TableGet, const Value& index, Value* result, \
973 const TableIndexImmediate<validate>& imm) \
974 F(TableSet, const Value& index, const Value& value, \
975 const TableIndexImmediate<validate>& imm) \
976 F(Unreachable) \
977 F(Select, const Value& cond, const Value& fval, const Value& tval, \
978 Value* result) \
979 F(Br, Control* target) \
980 F(BrIf, const Value& cond, uint32_t depth) \
981 F(BrTable, const BranchTableImmediate<validate>& imm, const Value& key) \
982 F(Else, Control* if_block) \
983 F(LoadMem, LoadType type, const MemoryAccessImmediate<validate>& imm, \
984 const Value& index, Value* result) \
985 F(LoadTransform, LoadType type, LoadTransformationKind transform, \
986 const MemoryAccessImmediate<validate>& imm, const Value& index, \
987 Value* result) \
988 F(LoadLane, LoadType type, const Value& value, const Value& index, \
989 const MemoryAccessImmediate<validate>& imm, const uint8_t laneidx, \
990 Value* result) \
991 F(StoreMem, StoreType type, const MemoryAccessImmediate<validate>& imm, \
992 const Value& index, const Value& value) \
993 F(StoreLane, StoreType type, const MemoryAccessImmediate<validate>& imm, \
994 const Value& index, const Value& value, const uint8_t laneidx) \
995 F(CurrentMemoryPages, Value* result) \
996 F(MemoryGrow, const Value& value, Value* result) \
997 F(CallDirect, const CallFunctionImmediate<validate>& imm, \
998 const Value args[], Value returns[]) \
999 F(CallIndirect, const Value& index, \
1000 const CallIndirectImmediate<validate>& imm, const Value args[], \
1001 Value returns[]) \
1002 F(CallRef, const Value& func_ref, const FunctionSig* sig, \
1003 uint32_t sig_index, const Value args[], const Value returns[]) \
1004 F(ReturnCallRef, const Value& func_ref, const FunctionSig* sig, \
1005 uint32_t sig_index, const Value args[]) \
1006 F(ReturnCall, const CallFunctionImmediate<validate>& imm, \
1007 const Value args[]) \
1008 F(ReturnCallIndirect, const Value& index, \
1009 const CallIndirectImmediate<validate>& imm, const Value args[]) \
1010 F(BrOnNull, const Value& ref_object, uint32_t depth) \
1011 F(SimdOp, WasmOpcode opcode, Vector<Value> args, Value* result) \
1012 F(SimdLaneOp, WasmOpcode opcode, const SimdLaneImmediate<validate>& imm, \
1013 const Vector<Value> inputs, Value* result) \
1014 F(S128Const, const Simd128Immediate<validate>& imm, Value* result) \
1015 F(Simd8x16ShuffleOp, const Simd128Immediate<validate>& imm, \
1016 const Value& input0, const Value& input1, Value* result) \
1017 F(Throw, const ExceptionIndexImmediate<validate>& imm, \
1018 const Vector<Value>& args) \
1019 F(Rethrow, const Value& exception) \
1020 F(BrOnException, const Value& exception, \
1021 const ExceptionIndexImmediate<validate>& imm, uint32_t depth, \
1022 Vector<Value> values) \
1023 F(AtomicOp, WasmOpcode opcode, Vector<Value> args, \
1024 const MemoryAccessImmediate<validate>& imm, Value* result) \
1025 F(AtomicFence) \
1026 F(MemoryInit, const MemoryInitImmediate<validate>& imm, const Value& dst, \
1027 const Value& src, const Value& size) \
1028 F(DataDrop, const DataDropImmediate<validate>& imm) \
1029 F(MemoryCopy, const MemoryCopyImmediate<validate>& imm, const Value& dst, \
1030 const Value& src, const Value& size) \
1031 F(MemoryFill, const MemoryIndexImmediate<validate>& imm, const Value& dst, \
1032 const Value& value, const Value& size) \
1033 F(TableInit, const TableInitImmediate<validate>& imm, Vector<Value> args) \
1034 F(ElemDrop, const ElemDropImmediate<validate>& imm) \
1035 F(TableCopy, const TableCopyImmediate<validate>& imm, Vector<Value> args) \
1036 F(TableGrow, const TableIndexImmediate<validate>& imm, const Value& value, \
1037 const Value& delta, Value* result) \
1038 F(TableSize, const TableIndexImmediate<validate>& imm, Value* result) \
1039 F(TableFill, const TableIndexImmediate<validate>& imm, const Value& start, \
1040 const Value& value, const Value& count) \
1041 F(StructNewWithRtt, const StructIndexImmediate<validate>& imm, \
1042 const Value& rtt, const Value args[], Value* result) \
1043 F(StructNewDefault, const StructIndexImmediate<validate>& imm, \
1044 const Value& rtt, Value* result) \
1045 F(StructGet, const Value& struct_object, \
1046 const FieldIndexImmediate<validate>& field, bool is_signed, Value* result) \
1047 F(StructSet, const Value& struct_object, \
1048 const FieldIndexImmediate<validate>& field, const Value& field_value) \
1049 F(ArrayNewWithRtt, const ArrayIndexImmediate<validate>& imm, \
1050 const Value& length, const Value& initial_value, const Value& rtt, \
1051 Value* result) \
1052 F(ArrayNewDefault, const ArrayIndexImmediate<validate>& imm, \
1053 const Value& length, const Value& rtt, Value* result) \
1054 F(ArrayGet, const Value& array_obj, \
1055 const ArrayIndexImmediate<validate>& imm, const Value& index, \
1056 bool is_signed, Value* result) \
1057 F(ArraySet, const Value& array_obj, \
1058 const ArrayIndexImmediate<validate>& imm, const Value& index, \
1059 const Value& value) \
1060 F(ArrayLen, const Value& array_obj, Value* result) \
1061 F(I31New, const Value& input, Value* result) \
1062 F(I31GetS, const Value& input, Value* result) \
1063 F(I31GetU, const Value& input, Value* result) \
1064 F(RttCanon, const HeapTypeImmediate<validate>& imm, Value* result) \
1065 F(RttSub, const HeapTypeImmediate<validate>& imm, const Value& parent, \
1066 Value* result) \
1067 F(RefTest, const Value& obj, const Value& rtt, Value* result) \
1068 F(RefCast, const Value& obj, const Value& rtt, Value* result) \
1069 F(BrOnCast, const Value& obj, const Value& rtt, Value* result_on_branch, \
1070 uint32_t depth) \
1071 F(PassThrough, const Value& from, Value* to)
1072
1073 // Generic Wasm bytecode decoder with utilities for decoding immediates,
1074 // lengths, etc.
1075 template <Decoder::ValidateFlag validate>
1076 class WasmDecoder : public Decoder {
1077 public:
1078 WasmDecoder(Zone* zone, const WasmModule* module, const WasmFeatures& enabled,
1079 WasmFeatures* detected, const FunctionSig* sig, const byte* start,
1080 const byte* end, uint32_t buffer_offset = 0)
1081 : Decoder(start, end, buffer_offset),
1082 local_types_(zone),
1083 module_(module),
1084 enabled_(enabled),
1085 detected_(detected),
1086 sig_(sig) {}
1087
1088 Zone* zone() const { return local_types_.get_allocator().zone(); }
1089
1090 uint32_t num_locals() const {
1091 DCHECK_EQ(num_locals_, local_types_.size());
1092 return num_locals_;
1093 }
1094
1095 ValueType local_type(uint32_t index) const { return local_types_[index]; }
1096
1097 void InitializeLocalsFromSig() {
1098 DCHECK_NOT_NULL(sig_);
1099 DCHECK_EQ(0, this->local_types_.size());
1100 local_types_.assign(sig_->parameters().begin(), sig_->parameters().end());
1101 num_locals_ = static_cast<uint32_t>(sig_->parameters().size());
1102 }
1103
1104 // Decodes local definitions in the current decoder.
1105 // Returns true iff locals are found.
1106 // Writes the total length of decoded locals in 'total_length'.
1107 // If insert_position is present, the decoded locals will be inserted into the
1108 // 'local_types_' of this decoder. Otherwise, this function is used just to
1109 // check validity and determine the encoding length of the locals in bytes.
1110 // The decoder's pc is not advanced. If no locals are found (i.e., no
1111 // compressed uint32 is found at pc), this will exit as 'false' and without an
1112 // error.
1113 bool DecodeLocals(const byte* pc, uint32_t* total_length,
1114 const base::Optional<uint32_t> insert_position) {
1115 uint32_t length;
1116 *total_length = 0;
1117
1118 // The 'else' value is useless, we pass it for convenience.
1119 auto insert_iterator = insert_position.has_value()
1120 ? local_types_.begin() + insert_position.value()
1121 : local_types_.begin();
1122
1123 // Decode local declarations, if any.
1124 uint32_t entries =
1125 read_u32v<kFullValidation>(pc, &length, "local decls count");
1126 if (!VALIDATE(ok())) {
1127 DecodeError(pc + *total_length, "invalid local decls count");
1128 return false;
1129 }
1130
1131 *total_length += length;
1132 TRACE("local decls count: %u\n", entries);
1133
1134 while (entries-- > 0) {
1135 if (!VALIDATE(more())) {
1136 DecodeError(end(),
1137 "expected more local decls but reached end of input");
1138 return false;
1139 }
1140 uint32_t count = read_u32v<kFullValidation>(pc + *total_length, &length,
1141 "local count");
1142 if (!VALIDATE(ok())) {
1143 DecodeError(pc + *total_length, "invalid local count");
1144 return false;
1145 }
1146 DCHECK_LE(local_types_.size(), kV8MaxWasmFunctionLocals);
1147 if (!VALIDATE(count <= kV8MaxWasmFunctionLocals - local_types_.size())) {
1148 DecodeError(pc + *total_length, "local count too large");
1149 return false;
1150 }
1151 *total_length += length;
1152
1153 ValueType type = value_type_reader::read_value_type<kFullValidation>(
1154 this, pc + *total_length, &length, enabled_);
1155 if (!VALIDATE(type != kWasmBottom)) {
1156 DecodeError(pc + *total_length, "invalid local type");
1157 return false;
1158 }
1159 *total_length += length;
1160
1161 if (insert_position.has_value()) {
1162 // Move the insertion iterator to the end of the newly inserted locals.
1163 insert_iterator =
1164 local_types_.insert(insert_iterator, count, type) + count;
1165 num_locals_ += count;
1166 }
1167 }
1168 DCHECK(ok());
1169 return true;
1170 }
1171
1172 // Shorthand that forwards to the {DecodeError} functions above, passing our
1173 // {validate} flag.
1174 template <typename... Args>
1175 void DecodeError(Args... args) {
1176 wasm::DecodeError<validate>(this, std::forward<Args>(args)...);
1177 }
1178
1179 static BitVector* AnalyzeLoopAssignment(WasmDecoder* decoder, const byte* pc,
1180 uint32_t locals_count, Zone* zone) {
1181 if (pc >= decoder->end()) return nullptr;
1182 if (*pc != kExprLoop) return nullptr;
1183
1184 // The number of locals_count is augmented by 2 so that 'locals_count - 2'
1185 // can be used to track mem_size, and 'locals_count - 1' to track mem_start.
1186 BitVector* assigned = zone->New<BitVector>(locals_count, zone);
1187 int depth = 0;
1188 // Iteratively process all AST nodes nested inside the loop.
1189 while (pc < decoder->end() && VALIDATE(decoder->ok())) {
1190 WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
1191 uint32_t length = 1;
1192 switch (opcode) {
1193 case kExprLoop:
1194 case kExprIf:
1195 case kExprBlock:
1196 case kExprTry:
1197 length = OpcodeLength(decoder, pc);
1198 depth++;
1199 break;
1200 case kExprLocalSet: // fallthru
1201 case kExprLocalTee: {
1202 LocalIndexImmediate<validate> imm(decoder, pc + 1);
1203 if (assigned->length() > 0 &&
1204 imm.index < static_cast<uint32_t>(assigned->length())) {
1205 // Unverified code might have an out-of-bounds index.
1206 assigned->Add(imm.index);
1207 }
1208 length = 1 + imm.length;
1209 break;
1210 }
1211 case kExprMemoryGrow:
1212 case kExprCallFunction:
1213 case kExprCallIndirect:
1214 case kExprReturnCall:
1215 case kExprReturnCallIndirect:
1216 // Add instance cache nodes to the assigned set.
1217 // TODO(titzer): make this more clear.
1218 assigned->Add(locals_count - 1);
1219 length = OpcodeLength(decoder, pc);
1220 break;
1221 case kExprEnd:
1222 depth--;
1223 break;
1224 default:
1225 length = OpcodeLength(decoder, pc);
1226 break;
1227 }
1228 if (depth <= 0) break;
1229 pc += length;
1230 }
1231 return VALIDATE(decoder->ok()) ? assigned : nullptr;
1232 }
1233
1234 inline bool Validate(const byte* pc, LocalIndexImmediate<validate>& imm) {
1235 if (!VALIDATE(imm.index < num_locals())) {
1236 DecodeError(pc, "invalid local index: %u", imm.index);
1237 return false;
1238 }
1239 return true;
1240 }
1241
1242 inline bool Complete(ExceptionIndexImmediate<validate>& imm) {
1243 if (!VALIDATE(imm.index < module_->exceptions.size())) return false;
1244 imm.exception = &module_->exceptions[imm.index];
1245 return true;
1246 }
1247
1248 inline bool Validate(const byte* pc, ExceptionIndexImmediate<validate>& imm) {
1249 if (!Complete(imm)) {
1250 DecodeError(pc, "Invalid exception index: %u", imm.index);
1251 return false;
1252 }
1253 return true;
1254 }
1255
1256 inline bool Validate(const byte* pc, GlobalIndexImmediate<validate>& imm) {
1257 if (!VALIDATE(imm.index < module_->globals.size())) {
1258 DecodeError(pc, "invalid global index: %u", imm.index);
1259 return false;
1260 }
1261 imm.global = &module_->globals[imm.index];
1262 imm.type = imm.global->type;
1263 return true;
1264 }
1265
1266 inline bool Complete(StructIndexImmediate<validate>& imm) {
1267 if (!VALIDATE(module_->has_struct(imm.index))) return false;
1268 imm.struct_type = module_->struct_type(imm.index);
1269 return true;
1270 }
1271
1272 inline bool Validate(const byte* pc, StructIndexImmediate<validate>& imm) {
1273 if (Complete(imm)) return true;
1274 DecodeError(pc, "invalid struct index: %u", imm.index);
1275 return false;
1276 }
1277
1278 inline bool Validate(const byte* pc, FieldIndexImmediate<validate>& imm) {
1279 if (!Validate(pc, imm.struct_index)) return false;
1280 if (!VALIDATE(imm.index < imm.struct_index.struct_type->field_count())) {
1281 DecodeError(pc + imm.struct_index.length, "invalid field index: %u",
1282 imm.index);
1283 return false;
1284 }
1285 return true;
1286 }
1287
1288 inline bool Complete(ArrayIndexImmediate<validate>& imm) {
1289 if (!VALIDATE(module_->has_array(imm.index))) return false;
1290 imm.array_type = module_->array_type(imm.index);
1291 return true;
1292 }
1293
1294 inline bool Validate(const byte* pc, ArrayIndexImmediate<validate>& imm) {
1295 if (!Complete(imm)) {
1296 DecodeError(pc, "invalid array index: %u", imm.index);
1297 return false;
1298 }
1299 return true;
1300 }
1301
1302 inline bool CanReturnCall(const FunctionSig* target_sig) {
1303 if (target_sig == nullptr) return false;
1304 size_t num_returns = sig_->return_count();
1305 if (num_returns != target_sig->return_count()) return false;
1306 for (size_t i = 0; i < num_returns; ++i) {
1307 if (sig_->GetReturn(i) != target_sig->GetReturn(i)) return false;
1308 }
1309 return true;
1310 }
1311
1312 inline bool Complete(CallFunctionImmediate<validate>& imm) {
1313 if (!VALIDATE(imm.index < module_->functions.size())) return false;
1314 imm.sig = module_->functions[imm.index].sig;
1315 if (imm.sig->return_count() > 1) {
1316 this->detected_->Add(kFeature_mv);
1317 }
1318 return true;
1319 }
1320
1321 inline bool Validate(const byte* pc, CallFunctionImmediate<validate>& imm) {
1322 if (!Complete(imm)) {
1323 DecodeError(pc, "invalid function index: %u", imm.index);
1324 return false;
1325 }
1326 return true;
1327 }
1328
1329 inline bool Complete(CallIndirectImmediate<validate>& imm) {
1330 if (!VALIDATE(module_->has_signature(imm.sig_index))) return false;
1331 imm.sig = module_->signature(imm.sig_index);
1332 if (imm.sig->return_count() > 1) {
1333 this->detected_->Add(kFeature_mv);
1334 }
1335 return true;
1336 }
1337
1338 inline bool Validate(const byte* pc, CallIndirectImmediate<validate>& imm) {
1339 if (!VALIDATE(imm.table_index < module_->tables.size())) {
1340 DecodeError(pc, "call_indirect: table index immediate out of bounds");
1341 return false;
1342 }
1343 ValueType table_type = module_->tables[imm.table_index].type;
1344 if (!VALIDATE(IsSubtypeOf(table_type, kWasmFuncRef, module_))) {
1345 DecodeError(
1346 pc, "call_indirect: immediate table #%u is not of a function type",
1347 imm.table_index);
1348 return false;
1349 }
1350 if (!Complete(imm)) {
1351 DecodeError(pc, "invalid signature index: #%u", imm.sig_index);
1352 return false;
1353 }
1354 // Check that the dynamic signature for this call is a subtype of the static
1355 // type of the table the function is defined in.
1356 ValueType immediate_type = ValueType::Ref(imm.sig_index, kNonNullable);
1357 if (!VALIDATE(IsSubtypeOf(immediate_type, table_type, module_))) {
1358 DecodeError(pc,
1359 "call_indirect: Immediate signature #%u is not a subtype of "
1360 "immediate table #%u",
1361 imm.sig_index, imm.table_index);
1362 }
1363 return true;
1364 }
1365
1366 inline bool Validate(const byte* pc, BranchDepthImmediate<validate>& imm,
1367 size_t control_depth) {
1368 if (!VALIDATE(imm.depth < control_depth)) {
1369 DecodeError(pc, "invalid branch depth: %u", imm.depth);
1370 return false;
1371 }
1372 return true;
1373 }
1374
1375 inline bool Validate(const byte* pc, BranchTableImmediate<validate>& imm,
1376 size_t block_depth) {
1377 if (!VALIDATE(imm.table_count <= kV8MaxWasmFunctionBrTableSize)) {
1378 DecodeError(pc, "invalid table count (> max br_table size): %u",
1379 imm.table_count);
1380 return false;
1381 }
1382 return checkAvailable(imm.table_count);
1383 }
1384
1385 inline bool Validate(const byte* pc,
1386 BranchOnExceptionImmediate<validate>& imm,
1387 size_t control_size) {
1388 return Validate(pc, imm.depth, control_size) &&
1389 Validate(pc + imm.depth.length, imm.index);
1390 }
1391
1392 inline bool Validate(const byte* pc, WasmOpcode opcode,
1393 SimdLaneImmediate<validate>& imm) {
1394 uint8_t num_lanes = 0;
1395 switch (opcode) {
1396 case kExprF64x2ExtractLane:
1397 case kExprF64x2ReplaceLane:
1398 case kExprI64x2ExtractLane:
1399 case kExprI64x2ReplaceLane:
1400 num_lanes = 2;
1401 break;
1402 case kExprF32x4ExtractLane:
1403 case kExprF32x4ReplaceLane:
1404 case kExprI32x4ExtractLane:
1405 case kExprI32x4ReplaceLane:
1406 num_lanes = 4;
1407 break;
1408 case kExprI16x8ExtractLaneS:
1409 case kExprI16x8ExtractLaneU:
1410 case kExprI16x8ReplaceLane:
1411 num_lanes = 8;
1412 break;
1413 case kExprI8x16ExtractLaneS:
1414 case kExprI8x16ExtractLaneU:
1415 case kExprI8x16ReplaceLane:
1416 num_lanes = 16;
1417 break;
1418 default:
1419 UNREACHABLE();
1420 break;
1421 }
1422 if (!VALIDATE(imm.lane >= 0 && imm.lane < num_lanes)) {
1423 DecodeError(pc, "invalid lane index");
1424 return false;
1425 } else {
1426 return true;
1427 }
1428 }
1429
1430 inline bool Validate(const byte* pc, Simd128Immediate<validate>& imm) {
1431 uint8_t max_lane = 0;
1432 for (uint32_t i = 0; i < kSimd128Size; ++i) {
1433 max_lane = std::max(max_lane, imm.value[i]);
1434 }
1435 // Shuffle indices must be in [0..31] for a 16 lane shuffle.
1436 if (!VALIDATE(max_lane < 2 * kSimd128Size)) {
1437 DecodeError(pc, "invalid shuffle mask");
1438 return false;
1439 }
1440 return true;
1441 }
1442
1443 inline bool Complete(BlockTypeImmediate<validate>& imm) {
1444 if (imm.type != kWasmBottom) return true;
1445 if (!VALIDATE(module_->has_signature(imm.sig_index))) return false;
1446 imm.sig = module_->signature(imm.sig_index);
1447 if (imm.sig->return_count() > 1) {
1448 this->detected_->Add(kFeature_mv);
1449 }
1450 return true;
1451 }
1452
1453 inline bool Validate(const byte* pc, BlockTypeImmediate<validate>& imm) {
1454 if (!Complete(imm)) {
1455 DecodeError(pc, "block type index %u out of bounds (%zu types)",
1456 imm.sig_index, module_->types.size());
1457 return false;
1458 }
1459 return true;
1460 }
1461
1462 inline bool Validate(const byte* pc, FunctionIndexImmediate<validate>& imm) {
1463 if (!VALIDATE(imm.index < module_->functions.size())) {
1464 DecodeError(pc, "invalid function index: %u", imm.index);
1465 return false;
1466 }
1467 if (!VALIDATE(module_->functions[imm.index].declared)) {
1468 DecodeError(pc, "undeclared reference to function #%u", imm.index);
1469 return false;
1470 }
1471 return true;
1472 }
1473
1474 inline bool Validate(const byte* pc, MemoryIndexImmediate<validate>& imm) {
1475 if (!VALIDATE(module_->has_memory)) {
1476 DecodeError(pc, "memory instruction with no memory");
1477 return false;
1478 }
1479 return true;
1480 }
1481
1482 inline bool Validate(const byte* pc, MemoryInitImmediate<validate>& imm) {
1483 if (!VALIDATE(imm.data_segment_index <
1484 module_->num_declared_data_segments)) {
1485 DecodeError(pc, "invalid data segment index: %u", imm.data_segment_index);
1486 return false;
1487 }
1488 if (!Validate(pc + imm.length - imm.memory.length, imm.memory))
1489 return false;
1490 return true;
1491 }
1492
1493 inline bool Validate(const byte* pc, DataDropImmediate<validate>& imm) {
1494 if (!VALIDATE(imm.index < module_->num_declared_data_segments)) {
1495 DecodeError(pc, "invalid data segment index: %u", imm.index);
1496 return false;
1497 }
1498 return true;
1499 }
1500
1501 inline bool Validate(const byte* pc, MemoryCopyImmediate<validate>& imm) {
1502 return Validate(pc, imm.memory_src) &&
1503 Validate(pc + imm.memory_src.length, imm.memory_dst);
1504 }
1505
1506 inline bool Validate(const byte* pc, TableIndexImmediate<validate>& imm) {
1507 if (!VALIDATE(imm.index < module_->tables.size())) {
1508 DecodeError(pc, "invalid table index: %u", imm.index);
1509 return false;
1510 }
1511 return true;
1512 }
1513
1514 inline bool Validate(const byte* pc, TableInitImmediate<validate>& imm) {
1515 if (!VALIDATE(imm.elem_segment_index < module_->elem_segments.size())) {
1516 DecodeError(pc, "invalid element segment index: %u",
1517 imm.elem_segment_index);
1518 return false;
1519 }
1520 if (!Validate(pc + imm.length - imm.table.length, imm.table)) {
1521 return false;
1522 }
1523 ValueType elem_type = module_->elem_segments[imm.elem_segment_index].type;
1524 if (!VALIDATE(IsSubtypeOf(elem_type, module_->tables[imm.table.index].type,
1525 module_))) {
1526 DecodeError(pc, "table %u is not a super-type of %s", imm.table.index,
1527 elem_type.name().c_str());
1528 return false;
1529 }
1530 return true;
1531 }
1532
1533 inline bool Validate(const byte* pc, ElemDropImmediate<validate>& imm) {
1534 if (!VALIDATE(imm.index < module_->elem_segments.size())) {
1535 DecodeError(pc, "invalid element segment index: %u", imm.index);
1536 return false;
1537 }
1538 return true;
1539 }
1540
1541 inline bool Validate(const byte* pc, TableCopyImmediate<validate>& imm) {
1542 if (!Validate(pc, imm.table_src)) return false;
1543 if (!Validate(pc + imm.table_src.length, imm.table_dst)) return false;
1544 ValueType src_type = module_->tables[imm.table_src.index].type;
1545 if (!VALIDATE(IsSubtypeOf(
1546 src_type, module_->tables[imm.table_dst.index].type, module_))) {
1547 DecodeError(pc, "table %u is not a super-type of %s", imm.table_dst.index,
1548 src_type.name().c_str());
1549 return false;
1550 }
1551 return true;
1552 }
1553
1554 inline bool Validate(const byte* pc, HeapTypeImmediate<validate>& imm) {
1555 if (!VALIDATE(!imm.type.is_bottom())) {
1556 DecodeError(pc, "invalid heap type");
1557 return false;
1558 }
1559 if (!VALIDATE(imm.type.is_generic() ||
1560 module_->has_type(imm.type.ref_index()))) {
1561 DecodeError(pc, "Type index %u is out of bounds", imm.type.ref_index());
1562 return false;
1563 }
1564 return true;
1565 }
1566
1567 static uint32_t OpcodeLength(WasmDecoder* decoder, const byte* pc) {
1568 WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
1569 switch (opcode) {
1570 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
1571 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
1572 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
1573 #undef DECLARE_OPCODE_CASE
1574 {
1575 MemoryAccessImmediate<validate> imm(decoder, pc + 1, UINT32_MAX);
1576 return 1 + imm.length;
1577 }
1578 case kExprBr:
1579 case kExprBrIf: {
1580 BranchDepthImmediate<validate> imm(decoder, pc + 1);
1581 return 1 + imm.length;
1582 }
1583 case kExprGlobalGet:
1584 case kExprGlobalSet: {
1585 GlobalIndexImmediate<validate> imm(decoder, pc + 1);
1586 return 1 + imm.length;
1587 }
1588 case kExprTableGet:
1589 case kExprTableSet: {
1590 TableIndexImmediate<validate> imm(decoder, pc + 1);
1591 return 1 + imm.length;
1592 }
1593 case kExprCallFunction:
1594 case kExprReturnCall: {
1595 CallFunctionImmediate<validate> imm(decoder, pc + 1);
1596 return 1 + imm.length;
1597 }
1598 case kExprCallIndirect:
1599 case kExprReturnCallIndirect: {
1600 CallIndirectImmediate<validate> imm(WasmFeatures::All(), decoder,
1601 pc + 1);
1602 return 1 + imm.length;
1603 }
1604
1605 case kExprTry:
1606 case kExprIf: // fall through
1607 case kExprLoop:
1608 case kExprBlock: {
1609 BlockTypeImmediate<validate> imm(WasmFeatures::All(), decoder, pc + 1);
1610 return 1 + imm.length;
1611 }
1612
1613 case kExprLet: {
1614 BlockTypeImmediate<validate> imm(WasmFeatures::All(), decoder, pc + 1);
1615 uint32_t locals_length;
1616 bool locals_result =
1617 decoder->DecodeLocals(decoder->pc() + 1 + imm.length,
1618 &locals_length, base::Optional<uint32_t>());
1619 return 1 + imm.length + (locals_result ? locals_length : 0);
1620 }
1621
1622 case kExprThrow: {
1623 ExceptionIndexImmediate<validate> imm(decoder, pc + 1);
1624 return 1 + imm.length;
1625 }
1626
1627 case kExprBrOnExn: {
1628 BranchOnExceptionImmediate<validate> imm(decoder, pc + 1);
1629 return 1 + imm.length;
1630 }
1631
1632 case kExprBrOnNull: {
1633 BranchDepthImmediate<validate> imm(decoder, pc + 1);
1634 return 1 + imm.length;
1635 }
1636
1637 case kExprLocalGet:
1638 case kExprLocalSet:
1639 case kExprLocalTee: {
1640 LocalIndexImmediate<validate> imm(decoder, pc + 1);
1641 return 1 + imm.length;
1642 }
1643 case kExprSelectWithType: {
1644 SelectTypeImmediate<validate> imm(WasmFeatures::All(), decoder, pc + 1);
1645 return 1 + imm.length;
1646 }
1647 case kExprBrTable: {
1648 BranchTableImmediate<validate> imm(decoder, pc + 1);
1649 BranchTableIterator<validate> iterator(decoder, imm);
1650 return 1 + iterator.length();
1651 }
1652 case kExprI32Const: {
1653 ImmI32Immediate<validate> imm(decoder, pc + 1);
1654 return 1 + imm.length;
1655 }
1656 case kExprI64Const: {
1657 ImmI64Immediate<validate> imm(decoder, pc + 1);
1658 return 1 + imm.length;
1659 }
1660 case kExprRefNull: {
1661 HeapTypeImmediate<validate> imm(WasmFeatures::All(), decoder, pc + 1);
1662 return 1 + imm.length;
1663 }
1664 case kExprRefIsNull: {
1665 return 1;
1666 }
1667 case kExprRefFunc: {
1668 FunctionIndexImmediate<validate> imm(decoder, pc + 1);
1669 return 1 + imm.length;
1670 }
1671 case kExprMemoryGrow:
1672 case kExprMemorySize: {
1673 MemoryIndexImmediate<validate> imm(decoder, pc + 1);
1674 return 1 + imm.length;
1675 }
1676 case kExprF32Const:
1677 return 5;
1678 case kExprF64Const:
1679 return 9;
1680 case kNumericPrefix: {
1681 uint32_t length = 0;
1682 opcode = decoder->read_prefixed_opcode<validate>(pc, &length);
1683 switch (opcode) {
1684 case kExprI32SConvertSatF32:
1685 case kExprI32UConvertSatF32:
1686 case kExprI32SConvertSatF64:
1687 case kExprI32UConvertSatF64:
1688 case kExprI64SConvertSatF32:
1689 case kExprI64UConvertSatF32:
1690 case kExprI64SConvertSatF64:
1691 case kExprI64UConvertSatF64:
1692 return length;
1693 case kExprMemoryInit: {
1694 MemoryInitImmediate<validate> imm(decoder, pc + length);
1695 return length + imm.length;
1696 }
1697 case kExprDataDrop: {
1698 DataDropImmediate<validate> imm(decoder, pc + length);
1699 return length + imm.length;
1700 }
1701 case kExprMemoryCopy: {
1702 MemoryCopyImmediate<validate> imm(decoder, pc + length);
1703 return length + imm.length;
1704 }
1705 case kExprMemoryFill: {
1706 MemoryIndexImmediate<validate> imm(decoder, pc + length);
1707 return length + imm.length;
1708 }
1709 case kExprTableInit: {
1710 TableInitImmediate<validate> imm(decoder, pc + length);
1711 return length + imm.length;
1712 }
1713 case kExprElemDrop: {
1714 ElemDropImmediate<validate> imm(decoder, pc + length);
1715 return length + imm.length;
1716 }
1717 case kExprTableCopy: {
1718 TableCopyImmediate<validate> imm(decoder, pc + length);
1719 return length + imm.length;
1720 }
1721 case kExprTableGrow:
1722 case kExprTableSize:
1723 case kExprTableFill: {
1724 TableIndexImmediate<validate> imm(decoder, pc + length);
1725 return length + imm.length;
1726 }
1727 default:
1728 decoder->DecodeError(pc, "invalid numeric opcode");
1729 return length;
1730 }
1731 }
1732 case kSimdPrefix: {
1733 uint32_t length = 0;
1734 opcode = decoder->read_prefixed_opcode<validate>(pc, &length);
1735 switch (opcode) {
1736 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
1737 FOREACH_SIMD_0_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
1738 #undef DECLARE_OPCODE_CASE
1739 return length;
1740 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
1741 FOREACH_SIMD_1_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
1742 #undef DECLARE_OPCODE_CASE
1743 return length + 1;
1744 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
1745 FOREACH_SIMD_MEM_OPCODE(DECLARE_OPCODE_CASE)
1746 #undef DECLARE_OPCODE_CASE
1747 {
1748 MemoryAccessImmediate<validate> imm(decoder, pc + length,
1749 UINT32_MAX);
1750 return length + imm.length;
1751 }
1752 case kExprS128Load8Lane:
1753 case kExprS128Load16Lane:
1754 case kExprS128Load32Lane:
1755 case kExprS128Load64Lane:
1756 case kExprS128Store8Lane:
1757 case kExprS128Store16Lane:
1758 case kExprS128Store32Lane:
1759 case kExprS128Store64Lane: {
1760 MemoryAccessImmediate<validate> imm(decoder, pc + length,
1761 UINT32_MAX);
1762 // 1 more byte for lane index immediate.
1763 return length + imm.length + 1;
1764 }
1765 // Shuffles require a byte per lane, or 16 immediate bytes.
1766 case kExprS128Const:
1767 case kExprI8x16Shuffle:
1768 return length + kSimd128Size;
1769 default:
1770 decoder->DecodeError(pc, "invalid SIMD opcode");
1771 return length;
1772 }
1773 }
1774 case kAtomicPrefix: {
1775 uint32_t length = 0;
1776 opcode = decoder->read_prefixed_opcode<validate>(pc, &length,
1777 "atomic_index");
1778 switch (opcode) {
1779 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
1780 FOREACH_ATOMIC_OPCODE(DECLARE_OPCODE_CASE)
1781 #undef DECLARE_OPCODE_CASE
1782 {
1783 MemoryAccessImmediate<validate> imm(decoder, pc + length,
1784 UINT32_MAX);
1785 return length + imm.length;
1786 }
1787 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
1788 FOREACH_ATOMIC_0_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
1789 #undef DECLARE_OPCODE_CASE
1790 {
1791 return length + 1;
1792 }
1793 default:
1794 decoder->DecodeError(pc, "invalid Atomics opcode");
1795 return length;
1796 }
1797 }
1798 case kGCPrefix: {
1799 uint32_t length = 0;
1800 opcode =
1801 decoder->read_prefixed_opcode<validate>(pc, &length, "gc_index");
1802 switch (opcode) {
1803 case kExprStructNewWithRtt:
1804 case kExprStructNewDefault: {
1805 StructIndexImmediate<validate> imm(decoder, pc + length);
1806 return length + imm.length;
1807 }
1808 case kExprStructGet:
1809 case kExprStructGetS:
1810 case kExprStructGetU:
1811 case kExprStructSet: {
1812 FieldIndexImmediate<validate> imm(decoder, pc + length);
1813 return length + imm.length;
1814 }
1815 case kExprArrayNewWithRtt:
1816 case kExprArrayNewDefault:
1817 case kExprArrayGet:
1818 case kExprArrayGetS:
1819 case kExprArrayGetU:
1820 case kExprArraySet:
1821 case kExprArrayLen: {
1822 ArrayIndexImmediate<validate> imm(decoder, pc + length);
1823 return length + imm.length;
1824 }
1825 case kExprBrOnCast: {
1826 BranchDepthImmediate<validate> imm(decoder, pc + length);
1827 return length + imm.length;
1828 }
1829 case kExprRttCanon:
1830 case kExprRttSub: {
1831 // TODO(7748): Account for rtt.sub's additional immediates if
1832 // they stick.
1833 HeapTypeImmediate<validate> imm(WasmFeatures::All(), decoder,
1834 pc + length);
1835 return length + imm.length;
1836 }
1837
1838 case kExprI31New:
1839 case kExprI31GetS:
1840 case kExprI31GetU:
1841 return length;
1842 case kExprRefTest:
1843 case kExprRefCast: {
1844 HeapTypeImmediate<validate> ht1(WasmFeatures::All(), decoder,
1845 pc + length);
1846 HeapTypeImmediate<validate> ht2(WasmFeatures::All(), decoder,
1847 pc + length + ht1.length);
1848 return length + ht1.length + ht2.length;
1849 }
1850
1851 default:
1852 // This is unreachable except for malformed modules.
1853 decoder->DecodeError(pc, "invalid gc opcode");
1854 return length;
1855 }
1856 }
1857 default:
1858 return 1;
1859 }
1860 }
1861
1862 // TODO(clemensb): This is only used by the interpreter; move there.
1863 V8_EXPORT_PRIVATE std::pair<uint32_t, uint32_t> StackEffect(const byte* pc) {
1864 WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
1865 // Handle "simple" opcodes with a fixed signature first.
1866 const FunctionSig* sig = WasmOpcodes::Signature(opcode);
1867 if (!sig) sig = WasmOpcodes::AsmjsSignature(opcode);
1868 if (sig) return {sig->parameter_count(), sig->return_count()};
1869
1870 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
1871 // clang-format off
1872 switch (opcode) {
1873 case kExprSelect:
1874 case kExprSelectWithType:
1875 return {3, 1};
1876 case kExprTableSet:
1877 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
1878 return {2, 0};
1879 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
1880 case kExprTableGet:
1881 case kExprLocalTee:
1882 case kExprMemoryGrow:
1883 case kExprRefAsNonNull:
1884 case kExprBrOnNull:
1885 case kExprRefIsNull:
1886 return {1, 1};
1887 case kExprLocalSet:
1888 case kExprGlobalSet:
1889 case kExprDrop:
1890 case kExprBrIf:
1891 case kExprBrTable:
1892 case kExprIf:
1893 case kExprRethrow:
1894 return {1, 0};
1895 case kExprLocalGet:
1896 case kExprGlobalGet:
1897 case kExprI32Const:
1898 case kExprI64Const:
1899 case kExprF32Const:
1900 case kExprF64Const:
1901 case kExprRefNull:
1902 case kExprRefFunc:
1903 case kExprMemorySize:
1904 return {0, 1};
1905 case kExprCallFunction: {
1906 CallFunctionImmediate<validate> imm(this, pc + 1);
1907 CHECK(Complete(imm));
1908 return {imm.sig->parameter_count(), imm.sig->return_count()};
1909 }
1910 case kExprCallIndirect: {
1911 CallIndirectImmediate<validate> imm(this->enabled_, this, pc + 1);
1912 CHECK(Complete(imm));
1913 // Indirect calls pop an additional argument for the table index.
1914 return {imm.sig->parameter_count() + 1,
1915 imm.sig->return_count()};
1916 }
1917 case kExprThrow: {
1918 ExceptionIndexImmediate<validate> imm(this, pc + 1);
1919 CHECK(Complete(imm));
1920 DCHECK_EQ(0, imm.exception->sig->return_count());
1921 return {imm.exception->sig->parameter_count(), 0};
1922 }
1923 case kExprBr:
1924 case kExprBlock:
1925 case kExprLoop:
1926 case kExprEnd:
1927 case kExprElse:
1928 case kExprTry:
1929 case kExprCatch:
1930 case kExprBrOnExn:
1931 case kExprNop:
1932 case kExprReturn:
1933 case kExprReturnCall:
1934 case kExprReturnCallIndirect:
1935 case kExprUnreachable:
1936 return {0, 0};
1937 case kExprLet:
1938 // TODO(7748): Implement
1939 return {0, 0};
1940 case kNumericPrefix:
1941 case kAtomicPrefix:
1942 case kSimdPrefix: {
1943 opcode = this->read_prefixed_opcode<validate>(pc);
1944 switch (opcode) {
1945 FOREACH_SIMD_1_OPERAND_1_PARAM_OPCODE(DECLARE_OPCODE_CASE)
1946 return {1, 1};
1947 FOREACH_SIMD_1_OPERAND_2_PARAM_OPCODE(DECLARE_OPCODE_CASE)
1948 FOREACH_SIMD_MASK_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
1949 return {2, 1};
1950 FOREACH_SIMD_CONST_OPCODE(DECLARE_OPCODE_CASE)
1951 return {0, 1};
1952 default: {
1953 sig = WasmOpcodes::Signature(opcode);
1954 if (sig) {
1955 return {sig->parameter_count(), sig->return_count()};
1956 } else {
1957 UNREACHABLE();
1958 }
1959 }
1960 }
1961 }
1962 case kGCPrefix: {
1963 opcode = this->read_prefixed_opcode<validate>(pc);
1964 switch (opcode) {
1965 case kExprStructNewDefault:
1966 case kExprStructGet:
1967 case kExprStructGetS:
1968 case kExprStructGetU:
1969 case kExprI31New:
1970 case kExprI31GetS:
1971 case kExprI31GetU:
1972 case kExprArrayLen:
1973 case kExprRttSub:
1974 return {1, 1};
1975 case kExprStructSet:
1976 return {2, 0};
1977 case kExprArrayNewDefault:
1978 case kExprArrayGet:
1979 case kExprArrayGetS:
1980 case kExprArrayGetU:
1981 case kExprRefTest:
1982 case kExprRefCast:
1983 case kExprBrOnCast:
1984 return {2, 1};
1985 case kExprArraySet:
1986 return {3, 0};
1987 case kExprRttCanon:
1988 return {0, 1};
1989 case kExprArrayNewWithRtt:
1990 return {3, 1};
1991 case kExprStructNewWithRtt: {
1992 StructIndexImmediate<validate> imm(this, this->pc_ + 2);
1993 this->Complete(imm);
1994 return {imm.struct_type->field_count() + 1, 1};
1995 }
1996 default:
1997 UNREACHABLE();
1998 }
1999 }
2000 default:
2001 FATAL("unimplemented opcode: %x (%s)", opcode,
2002 WasmOpcodes::OpcodeName(opcode));
2003 return {0, 0};
2004 }
2005 #undef DECLARE_OPCODE_CASE
2006 // clang-format on
2007 }
2008
2009 // The {Zone} is implicitly stored in the {ZoneAllocator} which is part of
2010 // this {ZoneVector}. Hence save one field and just get it from there if
2011 // needed (see {zone()} accessor below).
2012 ZoneVector<ValueType> local_types_;
2013
2014 // Cached value, for speed (yes, it's measurably faster to load this value
2015 // than to load the start and end pointer from a vector, subtract and shift).
2016 uint32_t num_locals_ = 0;
2017
2018 const WasmModule* module_;
2019 const WasmFeatures enabled_;
2020 WasmFeatures* detected_;
2021 const FunctionSig* sig_;
2022 };
2023
2024 #define CALL_INTERFACE(name, ...) interface_.name(this, ##__VA_ARGS__)
2025 #define CALL_INTERFACE_IF_REACHABLE(name, ...) \
2026 do { \
2027 DCHECK(!control_.empty()); \
2028 DCHECK_EQ(current_code_reachable_, \
2029 this->ok() && control_.back().reachable()); \
2030 if (current_code_reachable_) { \
2031 interface_.name(this, ##__VA_ARGS__); \
2032 } \
2033 } while (false)
2034 #define CALL_INTERFACE_IF_PARENT_REACHABLE(name, ...) \
2035 do { \
2036 DCHECK(!control_.empty()); \
2037 if (VALIDATE(this->ok()) && \
2038 (control_.size() == 1 || control_at(1)->reachable())) { \
2039 interface_.name(this, ##__VA_ARGS__); \
2040 } \
2041 } while (false)
2042
2043 template <Decoder::ValidateFlag validate, typename Interface>
2044 class WasmFullDecoder : public WasmDecoder<validate> {
2045 using Value = typename Interface::Value;
2046 using Control = typename Interface::Control;
2047 using ArgVector = base::SmallVector<Value, 8>;
2048
2049 // All Value types should be trivially copyable for performance. We push, pop,
2050 // and store them in local variables.
2051 ASSERT_TRIVIALLY_COPYABLE(Value);
2052
2053 public:
2054 template <typename... InterfaceArgs>
2055 WasmFullDecoder(Zone* zone, const WasmModule* module,
2056 const WasmFeatures& enabled, WasmFeatures* detected,
2057 const FunctionBody& body, InterfaceArgs&&... interface_args)
2058 : WasmDecoder<validate>(zone, module, enabled, detected, body.sig,
2059 body.start, body.end, body.offset),
2060 interface_(std::forward<InterfaceArgs>(interface_args)...),
2061 control_(zone) {}
2062
2063 Interface& interface() { return interface_; }
2064
2065 bool Decode() {
2066 DCHECK_EQ(stack_end_, stack_);
2067 DCHECK(control_.empty());
2068 DCHECK_LE(this->pc_, this->end_);
2069 DCHECK_EQ(this->num_locals(), 0);
2070
2071 this->InitializeLocalsFromSig();
2072 uint32_t params_count = static_cast<uint32_t>(this->num_locals());
2073 uint32_t locals_length;
2074 this->DecodeLocals(this->pc(), &locals_length, params_count);
2075 this->consume_bytes(locals_length);
2076 for (uint32_t index = params_count; index < this->num_locals(); index++) {
2077 if (!VALIDATE(this->local_type(index).is_defaultable())) {
2078 this->DecodeError(
2079 "Cannot define function-level local of non-defaultable type %s",
2080 this->local_type(index).name().c_str());
2081 return this->TraceFailed();
2082 }
2083 }
2084
2085 CALL_INTERFACE(StartFunction);
2086 DecodeFunctionBody();
2087 if (this->failed()) return TraceFailed();
2088
2089 if (!VALIDATE(control_.empty())) {
2090 if (control_.size() > 1) {
2091 this->DecodeError(control_.back().pc(),
2092 "unterminated control structure");
2093 } else {
2094 this->DecodeError("function body must end with \"end\" opcode");
2095 }
2096 return TraceFailed();
2097 }
2098 CALL_INTERFACE(FinishFunction);
2099 if (this->failed()) return TraceFailed();
2100
2101 TRACE("wasm-decode ok\n\n");
2102 return true;
2103 }
2104
2105 bool TraceFailed() {
2106 if (this->error_.offset()) {
2107 TRACE("wasm-error module+%-6d func+%d: %s\n\n", this->error_.offset(),
2108 this->GetBufferRelativeOffset(this->error_.offset()),
2109 this->error_.message().c_str());
2110 } else {
2111 TRACE("wasm-error: %s\n\n", this->error_.message().c_str());
2112 }
2113 return false;
2114 }
2115
2116 const char* SafeOpcodeNameAt(const byte* pc) {
2117 if (!pc) return "<null>";
2118 if (pc >= this->end_) return "<end>";
2119 WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
2120 if (!WasmOpcodes::IsPrefixOpcode(opcode)) {
2121 return WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(opcode));
2122 }
2123 opcode = this->template read_prefixed_opcode<Decoder::kFullValidation>(pc);
2124 return WasmOpcodes::OpcodeName(opcode);
2125 }
2126
2127 inline WasmCodePosition position() {
2128 int offset = static_cast<int>(this->pc_ - this->start_);
2129 DCHECK_EQ(this->pc_ - this->start_, offset); // overflows cannot happen
2130 return offset;
2131 }
2132
2133 inline uint32_t control_depth() const {
2134 return static_cast<uint32_t>(control_.size());
2135 }
2136
2137 inline Control* control_at(uint32_t depth) {
2138 DCHECK_GT(control_.size(), depth);
2139 return &control_.back() - depth;
2140 }
2141
2142 inline uint32_t stack_size() const {
2143 DCHECK_GE(stack_end_, stack_);
2144 DCHECK_GE(kMaxUInt32, stack_end_ - stack_);
2145 return static_cast<uint32_t>(stack_end_ - stack_);
2146 }
2147
2148 inline Value* stack_value(uint32_t depth) {
2149 DCHECK_LT(0, depth);
2150 DCHECK_GE(stack_size(), depth);
2151 return stack_end_ - depth;
2152 }
2153
2154 void SetSucceedingCodeDynamicallyUnreachable() {
2155 Control* current = &control_.back();
2156 if (current->reachable()) {
2157 current->reachability = kSpecOnlyReachable;
2158 current_code_reachable_ = false;
2159 }
2160 }
2161
2162 private:
2163 Interface interface_;
2164
2165 // The value stack, stored as individual pointers for maximum performance.
2166 Value* stack_ = nullptr;
2167 Value* stack_end_ = nullptr;
2168 Value* stack_capacity_end_ = nullptr;
2169 ASSERT_TRIVIALLY_COPYABLE(Value);
2170
2171 // stack of blocks, loops, and ifs.
2172 ZoneVector<Control> control_;
2173
2174 // Controls whether code should be generated for the current block (basically
2175 // a cache for {ok() && control_.back().reachable()}).
2176 bool current_code_reachable_ = true;
2177
2178 static Value UnreachableValue(const uint8_t* pc) {
2179 return Value{pc, kWasmBottom};
2180 }
2181
2182 bool CheckHasMemory() {
2183 if (!VALIDATE(this->module_->has_memory)) {
2184 this->DecodeError(this->pc_ - 1, "memory instruction with no memory");
2185 return false;
2186 }
2187 return true;
2188 }
2189
2190 bool CheckSimdPostMvp(WasmOpcode opcode) {
2191 if (!FLAG_wasm_simd_post_mvp && WasmOpcodes::IsSimdPostMvpOpcode(opcode)) {
2192 this->DecodeError(
2193 "simd opcode not available, enable with --wasm-simd-post-mvp");
2194 return false;
2195 }
2196 return true;
2197 }
2198
2199 #ifdef DEBUG
2200 class TraceLine {
2201 public:
2202 explicit TraceLine(WasmFullDecoder* decoder) : decoder_(decoder) {
2203 WasmOpcode opcode = static_cast<WasmOpcode>(*decoder->pc());
2204 if (!WasmOpcodes::IsPrefixOpcode(opcode)) AppendOpcode(opcode);
2205 }
2206
2207 void AppendOpcode(WasmOpcode opcode) {
2208 DCHECK(!WasmOpcodes::IsPrefixOpcode(opcode));
2209 Append(TRACE_INST_FORMAT, decoder_->startrel(decoder_->pc_),
2210 WasmOpcodes::OpcodeName(opcode));
2211 }
2212
2213 ~TraceLine() {
2214 if (!FLAG_trace_wasm_decoder) return;
2215 AppendStackState();
2216 PrintF("%.*s\n", len_, buffer_);
2217 }
2218
2219 // Appends a formatted string.
2220 PRINTF_FORMAT(2, 3)
2221 void Append(const char* format, ...) {
2222 if (!FLAG_trace_wasm_decoder) return;
2223 va_list va_args;
2224 va_start(va_args, format);
2225 size_t remaining_len = kMaxLen - len_;
2226 Vector<char> remaining_msg_space(buffer_ + len_, remaining_len);
2227 int len = VSNPrintF(remaining_msg_space, format, va_args);
2228 va_end(va_args);
2229 len_ += len < 0 ? remaining_len : len;
2230 }
2231
2232 private:
2233 void AppendStackState() {
2234 DCHECK(FLAG_trace_wasm_decoder);
2235 Append(" ");
2236 for (Control& c : decoder_->control_) {
2237 switch (c.kind) {
2238 case kControlIf:
2239 Append("I");
2240 break;
2241 case kControlBlock:
2242 Append("B");
2243 break;
2244 case kControlLoop:
2245 Append("L");
2246 break;
2247 case kControlTry:
2248 Append("T");
2249 break;
2250 case kControlIfElse:
2251 case kControlTryCatch:
2252 case kControlLet: // TODO(7748): Implement
2253 break;
2254 }
2255 if (c.start_merge.arity) Append("%u-", c.start_merge.arity);
2256 Append("%u", c.end_merge.arity);
2257 if (!c.reachable()) Append("%c", c.unreachable() ? '*' : '#');
2258 }
2259 Append(" | ");
2260 for (size_t i = 0; i < decoder_->stack_size(); ++i) {
2261 Value& val = decoder_->stack_[i];
2262 Append(" %c", val.type.short_name());
2263 }
2264 }
2265
2266 static constexpr int kMaxLen = 512;
2267
2268 char buffer_[kMaxLen];
2269 int len_ = 0;
2270 WasmFullDecoder* const decoder_;
2271 };
2272 #else
2273 class TraceLine {
2274 public:
2275 explicit TraceLine(WasmFullDecoder*) {}
2276
2277 void AppendOpcode(WasmOpcode) {}
2278
2279 PRINTF_FORMAT(2, 3)
2280 void Append(const char* format, ...) {}
2281 };
2282 #endif
2283
2284 #define DECODE(name) \
2285 static int Decode##name(WasmFullDecoder* decoder, WasmOpcode opcode) { \
2286 TraceLine trace_msg(decoder); \
2287 return decoder->Decode##name##Impl(&trace_msg, opcode); \
2288 } \
2289 V8_INLINE int Decode##name##Impl(TraceLine* trace_msg, WasmOpcode opcode)
2290
2291 DECODE(Nop) { return 1; }
2292
2293 #define BUILD_SIMPLE_OPCODE(op, _, sig) \
2294 DECODE(op) { return BuildSimpleOperator_##sig(kExpr##op); }
2295 FOREACH_SIMPLE_OPCODE(BUILD_SIMPLE_OPCODE)
2296 #undef BUILD_SIMPLE_OPCODE
2297
2298 DECODE(Block) {
2299 BlockTypeImmediate<validate> imm(this->enabled_, this, this->pc_ + 1);
2300 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2301 ArgVector args = PopArgs(imm.sig);
2302 Control* block = PushControl(kControlBlock);
2303 SetBlockType(block, imm, args.begin());
2304 CALL_INTERFACE_IF_REACHABLE(Block, block);
2305 PushMergeValues(block, &block->start_merge);
2306 return 1 + imm.length;
2307 }
2308
2309 DECODE(Rethrow) {
2310 CHECK_PROTOTYPE_OPCODE(eh);
2311 Value exception = Pop(0, kWasmExnRef);
2312 CALL_INTERFACE_IF_REACHABLE(Rethrow, exception);
2313 EndControl();
2314 return 1;
2315 }
2316
2317 DECODE(Throw) {
2318 CHECK_PROTOTYPE_OPCODE(eh);
2319 ExceptionIndexImmediate<validate> imm(this, this->pc_ + 1);
2320 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2321 ArgVector args = PopArgs(imm.exception->ToFunctionSig());
2322 CALL_INTERFACE_IF_REACHABLE(Throw, imm, VectorOf(args));
2323 EndControl();
2324 return 1 + imm.length;
2325 }
2326
2327 DECODE(Try) {
2328 CHECK_PROTOTYPE_OPCODE(eh);
2329 BlockTypeImmediate<validate> imm(this->enabled_, this, this->pc_ + 1);
2330 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2331 ArgVector args = PopArgs(imm.sig);
2332 Control* try_block = PushControl(kControlTry);
2333 SetBlockType(try_block, imm, args.begin());
2334 CALL_INTERFACE_IF_REACHABLE(Try, try_block);
2335 PushMergeValues(try_block, &try_block->start_merge);
2336 return 1 + imm.length;
2337 }
2338
2339 DECODE(Catch) {
2340 CHECK_PROTOTYPE_OPCODE(eh);
2341 if (!VALIDATE(!control_.empty())) {
2342 this->DecodeError("catch does not match any try");
2343 return 0;
2344 }
2345 Control* c = &control_.back();
2346 if (!VALIDATE(c->is_try())) {
2347 this->DecodeError("catch does not match any try");
2348 return 0;
2349 }
2350 if (!VALIDATE(c->is_incomplete_try())) {
2351 this->DecodeError("catch already present for try");
2352 return 0;
2353 }
2354 c->kind = kControlTryCatch;
2355 FallThruTo(c);
2356 DCHECK_LE(stack_ + c->stack_depth, stack_end_);
2357 stack_end_ = stack_ + c->stack_depth;
2358 c->reachability = control_at(1)->innerReachability();
2359 current_code_reachable_ = this->ok() && c->reachable();
2360 Value* exception = Push(kWasmExnRef);
2361 CALL_INTERFACE_IF_PARENT_REACHABLE(Catch, c, exception);
2362 return 1;
2363 }
2364
2365 DECODE(BrOnExn) {
2366 CHECK_PROTOTYPE_OPCODE(eh);
2367 BranchOnExceptionImmediate<validate> imm(this, this->pc_ + 1);
2368 if (!this->Validate(this->pc() + 1, imm, control_.size())) return 0;
2369 Control* c = control_at(imm.depth.depth);
2370 Value exception = Pop(0, kWasmExnRef);
2371 const WasmExceptionSig* sig = imm.index.exception->sig;
2372 int value_count = static_cast<int>(sig->parameter_count());
2373 // TODO(wasm): This operand stack mutation is an ugly hack to make
2374 // both type checking here as well as environment merging in the
2375 // graph builder interface work out of the box. We should introduce
2376 // special handling for both and do minimal/no stack mutation here.
2377 EnsureStackSpace(value_count);
2378 for (int i = 0; i < value_count; ++i) Push(sig->GetParam(i));
2379 Vector<Value> values(stack_ + c->stack_depth, value_count);
2380 TypeCheckBranchResult check_result = TypeCheckBranch(c, true);
2381 if (this->failed()) return 0;
2382 if (V8_LIKELY(check_result == kReachableBranch)) {
2383 CALL_INTERFACE(BrOnException, exception, imm.index, imm.depth.depth,
2384 values);
2385 c->br_merge()->reached = true;
2386 } else if (check_result == kInvalidStack) {
2387 return 0;
2388 }
2389 for (int i = value_count - 1; i >= 0; i--) Pop(i);
2390 Value* pexception = Push(kWasmExnRef);
2391 *pexception = exception;
2392 return 1 + imm.length;
2393 }
2394
2395 DECODE(BrOnNull) {
2396 CHECK_PROTOTYPE_OPCODE(typed_funcref);
2397 BranchDepthImmediate<validate> imm(this, this->pc_ + 1);
2398 if (!this->Validate(this->pc_ + 1, imm, control_.size())) return 0;
2399 Value ref_object = Pop(0);
2400 if (this->failed()) return 0;
2401 Control* c = control_at(imm.depth);
2402 TypeCheckBranchResult check_result = TypeCheckBranch(c, true);
2403 if (V8_LIKELY(check_result == kReachableBranch)) {
2404 switch (ref_object.type.kind()) {
2405 case ValueType::kBottom:
2406 // We are in unreachable code, just forward the bottom value.
2407 case ValueType::kRef: {
2408 Value* result = Push(ref_object.type);
2409 CALL_INTERFACE(PassThrough, ref_object, result);
2410 break;
2411 }
2412 case ValueType::kOptRef: {
2413 // We need to Push the result value after calling BrOnNull on
2414 // the interface. Therefore we must sync the ref_object and
2415 // result nodes afterwards (in PassThrough).
2416 CALL_INTERFACE(BrOnNull, ref_object, imm.depth);
2417 Value* result =
2418 Push(ValueType::Ref(ref_object.type.heap_type(), kNonNullable));
2419 CALL_INTERFACE(PassThrough, ref_object, result);
2420 c->br_merge()->reached = true;
2421 break;
2422 }
2423 default:
2424 this->DecodeError("invalid argument type to br_on_null");
2425 return 0;
2426 }
2427 } else if (check_result == kInvalidStack) {
2428 return 0;
2429 }
2430 return 1 + imm.length;
2431 }
2432
2433 DECODE(Let) {
2434 CHECK_PROTOTYPE_OPCODE(typed_funcref);
2435 BlockTypeImmediate<validate> imm(this->enabled_, this, this->pc_ + 1);
2436 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2437 uint32_t old_local_count = this->num_locals();
2438 // Temporarily add the let-defined values to the beginning of the function
2439 // locals.
2440 uint32_t locals_length;
2441 if (!this->DecodeLocals(this->pc() + 1 + imm.length, &locals_length, 0)) {
2442 return 0;
2443 }
2444 uint32_t num_added_locals = this->num_locals() - old_local_count;
2445 ArgVector let_local_values =
2446 PopArgs(static_cast<uint32_t>(imm.in_arity()),
2447 VectorOf(this->local_types_.data(), num_added_locals));
2448 ArgVector args = PopArgs(imm.sig);
2449 Control* let_block = PushControl(kControlLet, num_added_locals);
2450 SetBlockType(let_block, imm, args.begin());
2451 CALL_INTERFACE_IF_REACHABLE(Block, let_block);
2452 PushMergeValues(let_block, &let_block->start_merge);
2453 CALL_INTERFACE_IF_REACHABLE(AllocateLocals, VectorOf(let_local_values));
2454 return 1 + imm.length + locals_length;
2455 }
2456
2457 DECODE(Loop) {
2458 BlockTypeImmediate<validate> imm(this->enabled_, this, this->pc_ + 1);
2459 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2460 ArgVector args = PopArgs(imm.sig);
2461 Control* block = PushControl(kControlLoop);
2462 SetBlockType(&control_.back(), imm, args.begin());
2463 CALL_INTERFACE_IF_REACHABLE(Loop, block);
2464 PushMergeValues(block, &block->start_merge);
2465 return 1 + imm.length;
2466 }
2467
2468 DECODE(If) {
2469 BlockTypeImmediate<validate> imm(this->enabled_, this, this->pc_ + 1);
2470 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2471 Value cond = Pop(0, kWasmI32);
2472 ArgVector args = PopArgs(imm.sig);
2473 if (!VALIDATE(this->ok())) return 0;
2474 Control* if_block = PushControl(kControlIf);
2475 SetBlockType(if_block, imm, args.begin());
2476 CALL_INTERFACE_IF_REACHABLE(If, cond, if_block);
2477 PushMergeValues(if_block, &if_block->start_merge);
2478 return 1 + imm.length;
2479 }
2480
2481 DECODE(Else) {
2482 if (!VALIDATE(!control_.empty())) {
2483 this->DecodeError("else does not match any if");
2484 return 0;
2485 }
2486 Control* c = &control_.back();
2487 if (!VALIDATE(c->is_if())) {
2488 this->DecodeError("else does not match an if");
2489 return 0;
2490 }
2491 if (!VALIDATE(c->is_onearmed_if())) {
2492 this->DecodeError("else already present for if");
2493 return 0;
2494 }
2495 if (!TypeCheckFallThru()) return 0;
2496 c->kind = kControlIfElse;
2497 CALL_INTERFACE_IF_PARENT_REACHABLE(Else, c);
2498 if (c->reachable()) c->end_merge.reached = true;
2499 PushMergeValues(c, &c->start_merge);
2500 c->reachability = control_at(1)->innerReachability();
2501 current_code_reachable_ = this->ok() && c->reachable();
2502 return 1;
2503 }
2504
2505 DECODE(End) {
2506 if (!VALIDATE(!control_.empty())) {
2507 this->DecodeError("end does not match any if, try, or block");
2508 return 0;
2509 }
2510 Control* c = &control_.back();
2511 if (!VALIDATE(!c->is_incomplete_try())) {
2512 this->DecodeError("missing catch or catch-all in try");
2513 return 0;
2514 }
2515 if (c->is_onearmed_if()) {
2516 if (!VALIDATE(c->end_merge.arity == c->start_merge.arity)) {
2517 this->DecodeError(
2518 c->pc(), "start-arity and end-arity of one-armed if must match");
2519 return 0;
2520 }
2521 if (!TypeCheckOneArmedIf(c)) return 0;
2522 }
2523 if (c->is_let()) {
2524 this->local_types_.erase(this->local_types_.begin(),
2525 this->local_types_.begin() + c->locals_count);
2526 this->num_locals_ -= c->locals_count;
2527 CALL_INTERFACE_IF_REACHABLE(DeallocateLocals, c->locals_count);
2528 }
2529 if (!TypeCheckFallThru()) return 0;
2530
2531 if (control_.size() == 1) {
2532 // If at the last (implicit) control, check we are at end.
2533 if (!VALIDATE(this->pc_ + 1 == this->end_)) {
2534 this->DecodeError(this->pc_ + 1, "trailing code after function end");
2535 return 0;
2536 }
2537 // The result of the block is the return value.
2538 trace_msg->Append("\n" TRACE_INST_FORMAT, startrel(this->pc_),
2539 "(implicit) return");
2540 DoReturn();
2541 control_.clear();
2542 return 1;
2543 }
2544 PopControl(c);
2545 return 1;
2546 }
2547
2548 DECODE(Select) {
2549 Value cond = Pop(2, kWasmI32);
2550 Value fval = Pop(1);
2551 Value tval = Pop(0, fval.type);
2552 ValueType type = tval.type == kWasmBottom ? fval.type : tval.type;
2553 if (!VALIDATE(!type.is_reference_type())) {
2554 this->DecodeError(
2555 "select without type is only valid for value type inputs");
2556 return 0;
2557 }
2558 Value* result = Push(type);
2559 CALL_INTERFACE_IF_REACHABLE(Select, cond, fval, tval, result);
2560 return 1;
2561 }
2562
2563 DECODE(SelectWithType) {
2564 CHECK_PROTOTYPE_OPCODE(reftypes);
2565 SelectTypeImmediate<validate> imm(this->enabled_, this, this->pc_ + 1);
2566 if (this->failed()) return 0;
2567 Value cond = Pop(2, kWasmI32);
2568 Value fval = Pop(1, imm.type);
2569 Value tval = Pop(0, imm.type);
2570 Value* result = Push(imm.type);
2571 CALL_INTERFACE_IF_REACHABLE(Select, cond, fval, tval, result);
2572 return 1 + imm.length;
2573 }
2574
2575 DECODE(Br) {
2576 BranchDepthImmediate<validate> imm(this, this->pc_ + 1);
2577 if (!this->Validate(this->pc_ + 1, imm, control_.size())) return 0;
2578 Control* c = control_at(imm.depth);
2579 TypeCheckBranchResult check_result = TypeCheckBranch(c, false);
2580 if (V8_LIKELY(check_result == kReachableBranch)) {
2581 if (imm.depth == control_.size() - 1) {
2582 DoReturn();
2583 } else {
2584 CALL_INTERFACE(Br, c);
2585 c->br_merge()->reached = true;
2586 }
2587 } else if (check_result == kInvalidStack) {
2588 return 0;
2589 }
2590 EndControl();
2591 return 1 + imm.length;
2592 }
2593
2594 DECODE(BrIf) {
2595 BranchDepthImmediate<validate> imm(this, this->pc_ + 1);
2596 if (!this->Validate(this->pc_ + 1, imm, control_.size())) return 0;
2597 Value cond = Pop(0, kWasmI32);
2598 if (this->failed()) return 0;
2599 Control* c = control_at(imm.depth);
2600 TypeCheckBranchResult check_result = TypeCheckBranch(c, true);
2601 if (V8_LIKELY(check_result == kReachableBranch)) {
2602 CALL_INTERFACE(BrIf, cond, imm.depth);
2603 c->br_merge()->reached = true;
2604 } else if (check_result == kInvalidStack) {
2605 return 0;
2606 }
2607 return 1 + imm.length;
2608 }
2609
2610 DECODE(BrTable) {
2611 BranchTableImmediate<validate> imm(this, this->pc_ + 1);
2612 BranchTableIterator<validate> iterator(this, imm);
2613 Value key = Pop(0, kWasmI32);
2614 if (this->failed()) return 0;
2615 if (!this->Validate(this->pc_ + 1, imm, control_.size())) return 0;
2616
2617 // Cache the branch targets during the iteration, so that we can set
2618 // all branch targets as reachable after the {CALL_INTERFACE} call.
2619 std::vector<bool> br_targets(control_.size());
2620
2621 // The result types of the br_table instruction. We have to check the
2622 // stack against these types. Only needed during validation.
2623 std::vector<ValueType> result_types;
2624
2625 while (iterator.has_next()) {
2626 const uint32_t index = iterator.cur_index();
2627 const byte* pos = iterator.pc();
2628 uint32_t target = iterator.next();
2629 if (!VALIDATE(ValidateBrTableTarget(target, pos, index))) return 0;
2630 // Avoid redundant branch target checks.
2631 if (br_targets[target]) continue;
2632 br_targets[target] = true;
2633
2634 if (validate) {
2635 if (index == 0) {
2636 // With the first branch target, initialize the result types.
2637 result_types = InitializeBrTableResultTypes(target);
2638 } else if (!UpdateBrTableResultTypes(&result_types, target, pos,
2639 index)) {
2640 return 0;
2641 }
2642 }
2643 }
2644
2645 if (!VALIDATE(TypeCheckBrTable(result_types))) return 0;
2646
2647 DCHECK(this->ok());
2648
2649 if (current_code_reachable_) {
2650 CALL_INTERFACE(BrTable, imm, key);
2651
2652 for (int i = 0, e = control_depth(); i < e; ++i) {
2653 if (!br_targets[i]) continue;
2654 control_at(i)->br_merge()->reached = true;
2655 }
2656 }
2657
2658 EndControl();
2659 return 1 + iterator.length();
2660 }
2661
2662 DECODE(Return) {
2663 if (V8_LIKELY(current_code_reachable_)) {
2664 if (!VALIDATE(TypeCheckReturn())) return 0;
2665 DoReturn();
2666 } else {
2667 // We pop all return values from the stack to check their type.
2668 // Since we deal with unreachable code, we do not have to keep the
2669 // values.
2670 int num_returns = static_cast<int>(this->sig_->return_count());
2671 for (int i = num_returns - 1; i >= 0; --i) {
2672 Pop(i, this->sig_->GetReturn(i));
2673 }
2674 }
2675
2676 EndControl();
2677 return 1;
2678 }
2679
2680 DECODE(Unreachable) {
2681 CALL_INTERFACE_IF_REACHABLE(Unreachable);
2682 EndControl();
2683 return 1;
2684 }
2685
2686 DECODE(I32Const) {
2687 ImmI32Immediate<validate> imm(this, this->pc_ + 1);
2688 Value* value = Push(kWasmI32);
2689 CALL_INTERFACE_IF_REACHABLE(I32Const, value, imm.value);
2690 return 1 + imm.length;
2691 }
2692
2693 DECODE(I64Const) {
2694 ImmI64Immediate<validate> imm(this, this->pc_ + 1);
2695 Value* value = Push(kWasmI64);
2696 CALL_INTERFACE_IF_REACHABLE(I64Const, value, imm.value);
2697 return 1 + imm.length;
2698 }
2699
2700 DECODE(F32Const) {
2701 ImmF32Immediate<validate> imm(this, this->pc_ + 1);
2702 Value* value = Push(kWasmF32);
2703 CALL_INTERFACE_IF_REACHABLE(F32Const, value, imm.value);
2704 return 1 + imm.length;
2705 }
2706
2707 DECODE(F64Const) {
2708 ImmF64Immediate<validate> imm(this, this->pc_ + 1);
2709 Value* value = Push(kWasmF64);
2710 CALL_INTERFACE_IF_REACHABLE(F64Const, value, imm.value);
2711 return 1 + imm.length;
2712 }
2713
2714 DECODE(RefNull) {
2715 CHECK_PROTOTYPE_OPCODE(reftypes);
2716 HeapTypeImmediate<validate> imm(this->enabled_, this, this->pc_ + 1);
2717 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2718 ValueType type = ValueType::Ref(imm.type, kNullable);
2719 Value* value = Push(type);
2720 CALL_INTERFACE_IF_REACHABLE(RefNull, type, value);
2721 return 1 + imm.length;
2722 }
2723
2724 DECODE(RefIsNull) {
2725 CHECK_PROTOTYPE_OPCODE(reftypes);
2726 Value value = Pop(0);
2727 Value* result = Push(kWasmI32);
2728 switch (value.type.kind()) {
2729 case ValueType::kOptRef:
2730 CALL_INTERFACE_IF_REACHABLE(UnOp, kExprRefIsNull, value, result);
2731 return 1;
2732 case ValueType::kBottom:
2733 // We are in unreachable code, the return value does not matter.
2734 case ValueType::kRef:
2735 // For non-nullable references, the result is always false.
2736 CALL_INTERFACE_IF_REACHABLE(I32Const, result, 0);
2737 return 1;
2738 default:
2739 if (validate) {
2740 this->DecodeError(
2741 "invalid argument type to ref.is_null. Expected reference type, "
2742 "got %s",
2743 value.type.name().c_str());
2744 return 0;
2745 }
2746 UNREACHABLE();
2747 }
2748 }
2749
2750 DECODE(RefFunc) {
2751 CHECK_PROTOTYPE_OPCODE(reftypes);
2752 FunctionIndexImmediate<validate> imm(this, this->pc_ + 1);
2753 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2754 HeapType heap_type(this->enabled_.has_typed_funcref()
2755 ? this->module_->functions[imm.index].sig_index
2756 : HeapType::kFunc);
2757 Value* value = Push(ValueType::Ref(heap_type, kNonNullable));
2758 CALL_INTERFACE_IF_REACHABLE(RefFunc, imm.index, value);
2759 return 1 + imm.length;
2760 }
2761
2762 DECODE(RefAsNonNull) {
2763 CHECK_PROTOTYPE_OPCODE(typed_funcref);
2764 Value value = Pop(0);
2765 switch (value.type.kind()) {
2766 case ValueType::kBottom:
2767 // We are in unreachable code. Forward the bottom value.
2768 case ValueType::kRef: {
2769 Value* result = Push(value.type);
2770 CALL_INTERFACE_IF_REACHABLE(PassThrough, value, result);
2771 return 1;
2772 }
2773 case ValueType::kOptRef: {
2774 Value* result =
2775 Push(ValueType::Ref(value.type.heap_type(), kNonNullable));
2776 CALL_INTERFACE_IF_REACHABLE(RefAsNonNull, value, result);
2777 return 1;
2778 }
2779 default:
2780 if (validate) {
2781 this->DecodeError(
2782 "invalid agrument type to ref.as_non_null: Expected reference "
2783 "type, got %s",
2784 value.type.name().c_str());
2785 }
2786 return 0;
2787 }
2788 }
2789
2790 DECODE(LocalGet) {
2791 LocalIndexImmediate<validate> imm(this, this->pc_ + 1);
2792 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2793 Value* value = Push(this->local_type(imm.index));
2794 CALL_INTERFACE_IF_REACHABLE(LocalGet, value, imm);
2795 return 1 + imm.length;
2796 }
2797
2798 DECODE(LocalSet) {
2799 LocalIndexImmediate<validate> imm(this, this->pc_ + 1);
2800 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2801 Value value = Pop(0, this->local_type(imm.index));
2802 CALL_INTERFACE_IF_REACHABLE(LocalSet, value, imm);
2803 return 1 + imm.length;
2804 }
2805
2806 DECODE(LocalTee) {
2807 LocalIndexImmediate<validate> imm(this, this->pc_ + 1);
2808 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2809 Value value = Pop(0, this->local_type(imm.index));
2810 Value* result = Push(value.type);
2811 CALL_INTERFACE_IF_REACHABLE(LocalTee, value, result, imm);
2812 return 1 + imm.length;
2813 }
2814
2815 DECODE(Drop) {
2816 Value value = Pop(0);
2817 CALL_INTERFACE_IF_REACHABLE(Drop, value);
2818 return 1;
2819 }
2820
2821 DECODE(GlobalGet) {
2822 GlobalIndexImmediate<validate> imm(this, this->pc_ + 1);
2823 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2824 Value* result = Push(imm.type);
2825 CALL_INTERFACE_IF_REACHABLE(GlobalGet, result, imm);
2826 return 1 + imm.length;
2827 }
2828
2829 DECODE(GlobalSet) {
2830 GlobalIndexImmediate<validate> imm(this, this->pc_ + 1);
2831 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2832 if (!VALIDATE(imm.global->mutability)) {
2833 this->DecodeError("immutable global #%u cannot be assigned", imm.index);
2834 return 0;
2835 }
2836 Value value = Pop(0, imm.type);
2837 CALL_INTERFACE_IF_REACHABLE(GlobalSet, value, imm);
2838 return 1 + imm.length;
2839 }
2840
2841 DECODE(TableGet) {
2842 CHECK_PROTOTYPE_OPCODE(reftypes);
2843 TableIndexImmediate<validate> imm(this, this->pc_ + 1);
2844 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2845 Value index = Pop(0, kWasmI32);
2846 Value* result = Push(this->module_->tables[imm.index].type);
2847 CALL_INTERFACE_IF_REACHABLE(TableGet, index, result, imm);
2848 return 1 + imm.length;
2849 }
2850
2851 DECODE(TableSet) {
2852 CHECK_PROTOTYPE_OPCODE(reftypes);
2853 TableIndexImmediate<validate> imm(this, this->pc_ + 1);
2854 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2855 Value value = Pop(1, this->module_->tables[imm.index].type);
2856 Value index = Pop(0, kWasmI32);
2857 CALL_INTERFACE_IF_REACHABLE(TableSet, index, value, imm);
2858 return 1 + imm.length;
2859 }
2860
2861 DECODE(LoadMem) {
2862 // Hard-code the list of load types. The opcodes are highly unlikely to
2863 // ever change, and we have some checks here to guard against that.
2864 static_assert(sizeof(LoadType) == sizeof(uint8_t), "LoadType is compact");
2865 static constexpr uint8_t kMinOpcode = kExprI32LoadMem;
2866 static constexpr uint8_t kMaxOpcode = kExprI64LoadMem32U;
2867 static constexpr LoadType kLoadTypes[] = {
2868 LoadType::kI32Load, LoadType::kI64Load, LoadType::kF32Load,
2869 LoadType::kF64Load, LoadType::kI32Load8S, LoadType::kI32Load8U,
2870 LoadType::kI32Load16S, LoadType::kI32Load16U, LoadType::kI64Load8S,
2871 LoadType::kI64Load8U, LoadType::kI64Load16S, LoadType::kI64Load16U,
2872 LoadType::kI64Load32S, LoadType::kI64Load32U};
2873 STATIC_ASSERT(arraysize(kLoadTypes) == kMaxOpcode - kMinOpcode + 1);
2874 DCHECK_LE(kMinOpcode, opcode);
2875 DCHECK_GE(kMaxOpcode, opcode);
2876 return DecodeLoadMem(kLoadTypes[opcode - kMinOpcode]);
2877 }
2878
2879 DECODE(StoreMem) {
2880 // Hard-code the list of store types. The opcodes are highly unlikely to
2881 // ever change, and we have some checks here to guard against that.
2882 static_assert(sizeof(StoreType) == sizeof(uint8_t), "StoreType is compact");
2883 static constexpr uint8_t kMinOpcode = kExprI32StoreMem;
2884 static constexpr uint8_t kMaxOpcode = kExprI64StoreMem32;
2885 static constexpr StoreType kStoreTypes[] = {
2886 StoreType::kI32Store, StoreType::kI64Store, StoreType::kF32Store,
2887 StoreType::kF64Store, StoreType::kI32Store8, StoreType::kI32Store16,
2888 StoreType::kI64Store8, StoreType::kI64Store16, StoreType::kI64Store32};
2889 STATIC_ASSERT(arraysize(kStoreTypes) == kMaxOpcode - kMinOpcode + 1);
2890 DCHECK_LE(kMinOpcode, opcode);
2891 DCHECK_GE(kMaxOpcode, opcode);
2892 return DecodeStoreMem(kStoreTypes[opcode - kMinOpcode]);
2893 }
2894
2895 DECODE(MemoryGrow) {
2896 if (!CheckHasMemory()) return 0;
2897 MemoryIndexImmediate<validate> imm(this, this->pc_ + 1);
2898 if (!VALIDATE(this->module_->origin == kWasmOrigin)) {
2899 this->DecodeError("grow_memory is not supported for asmjs modules");
2900 return 0;
2901 }
2902 Value value = Pop(0, kWasmI32);
2903 Value* result = Push(kWasmI32);
2904 CALL_INTERFACE_IF_REACHABLE(MemoryGrow, value, result);
2905 return 1 + imm.length;
2906 }
2907
2908 DECODE(MemorySize) {
2909 if (!CheckHasMemory()) return 0;
2910 MemoryIndexImmediate<validate> imm(this, this->pc_ + 1);
2911 Value* result = Push(kWasmI32);
2912 CALL_INTERFACE_IF_REACHABLE(CurrentMemoryPages, result);
2913 return 1 + imm.length;
2914 }
2915
2916 DECODE(CallFunction) {
2917 CallFunctionImmediate<validate> imm(this, this->pc_ + 1);
2918 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2919 ArgVector args = PopArgs(imm.sig);
2920 Value* returns = PushReturns(imm.sig);
2921 CALL_INTERFACE_IF_REACHABLE(CallDirect, imm, args.begin(), returns);
2922 return 1 + imm.length;
2923 }
2924
2925 DECODE(CallIndirect) {
2926 CallIndirectImmediate<validate> imm(this->enabled_, this, this->pc_ + 1);
2927 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2928 Value index = Pop(0, kWasmI32);
2929 ArgVector args = PopArgs(imm.sig);
2930 Value* returns = PushReturns(imm.sig);
2931 CALL_INTERFACE_IF_REACHABLE(CallIndirect, index, imm, args.begin(),
2932 returns);
2933 return 1 + imm.length;
2934 }
2935
2936 DECODE(ReturnCall) {
2937 CHECK_PROTOTYPE_OPCODE(return_call);
2938 CallFunctionImmediate<validate> imm(this, this->pc_ + 1);
2939 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2940 if (!VALIDATE(this->CanReturnCall(imm.sig))) {
2941 this->DecodeError("%s: %s", WasmOpcodes::OpcodeName(kExprReturnCall),
2942 "tail call return types mismatch");
2943 return 0;
2944 }
2945 ArgVector args = PopArgs(imm.sig);
2946 CALL_INTERFACE_IF_REACHABLE(ReturnCall, imm, args.begin());
2947 EndControl();
2948 return 1 + imm.length;
2949 }
2950
2951 DECODE(ReturnCallIndirect) {
2952 CHECK_PROTOTYPE_OPCODE(return_call);
2953 CallIndirectImmediate<validate> imm(this->enabled_, this, this->pc_ + 1);
2954 if (!this->Validate(this->pc_ + 1, imm)) return 0;
2955 if (!VALIDATE(this->CanReturnCall(imm.sig))) {
2956 this->DecodeError("%s: %s",
2957 WasmOpcodes::OpcodeName(kExprReturnCallIndirect),
2958 "tail call return types mismatch");
2959 return 0;
2960 }
2961 Value index = Pop(0, kWasmI32);
2962 ArgVector args = PopArgs(imm.sig);
2963 CALL_INTERFACE_IF_REACHABLE(ReturnCallIndirect, index, imm, args.begin());
2964 EndControl();
2965 return 1 + imm.length;
2966 }
2967
2968 DECODE(CallRef) {
2969 CHECK_PROTOTYPE_OPCODE(typed_funcref);
2970 Value func_ref = Pop(0);
2971 ValueType func_type = func_ref.type;
2972 if (func_type == kWasmBottom) {
2973 // We are in unreachable code, maintain the polymorphic stack.
2974 return 1;
2975 }
2976 if (!VALIDATE(func_type.is_object_reference_type() &&
2977 func_type.has_index() &&
2978 this->module_->has_signature(func_type.ref_index()))) {
2979 this->DecodeError(
2980 "call_ref: Expected function reference on top of stack, found %s of "
2981 "type %s instead",
2982 SafeOpcodeNameAt(func_ref.pc()), func_type.name().c_str());
2983 return 0;
2984 }
2985 const FunctionSig* sig = this->module_->signature(func_type.ref_index());
2986 ArgVector args = PopArgs(sig);
2987 Value* returns = PushReturns(sig);
2988 CALL_INTERFACE_IF_REACHABLE(CallRef, func_ref, sig, func_type.ref_index(),
2989 args.begin(), returns);
2990 return 1;
2991 }
2992
2993 DECODE(ReturnCallRef) {
2994 CHECK_PROTOTYPE_OPCODE(typed_funcref);
2995 CHECK_PROTOTYPE_OPCODE(return_call);
2996 Value func_ref = Pop(0);
2997 ValueType func_type = func_ref.type;
2998 if (func_type == kWasmBottom) {
2999 // We are in unreachable code, maintain the polymorphic stack.
3000 return 1;
3001 }
3002 if (!VALIDATE(func_type.is_object_reference_type() &&
3003 func_type.has_index() &&
3004 this->module_->has_signature(func_type.ref_index()))) {
3005 this->DecodeError(
3006 "return_call_ref: Expected function reference on top of stack, found "
3007 "%s of type %s instead",
3008 SafeOpcodeNameAt(func_ref.pc()), func_type.name().c_str());
3009 return 0;
3010 }
3011 const FunctionSig* sig = this->module_->signature(func_type.ref_index());
3012 ArgVector args = PopArgs(sig);
3013 CALL_INTERFACE_IF_REACHABLE(ReturnCallRef, func_ref, sig,
3014 func_type.ref_index(), args.begin());
3015 EndControl();
3016 return 1;
3017 }
3018
3019 DECODE(Numeric) {
3020 uint32_t opcode_length = 0;
3021 WasmOpcode full_opcode = this->template read_prefixed_opcode<validate>(
3022 this->pc_, &opcode_length, "numeric index");
3023 if (full_opcode == kExprTableGrow || full_opcode == kExprTableSize ||
3024 full_opcode == kExprTableFill) {
3025 CHECK_PROTOTYPE_OPCODE(reftypes);
3026 } else if (full_opcode >= kExprMemoryInit) {
3027 CHECK_PROTOTYPE_OPCODE(bulk_memory);
3028 }
3029 trace_msg->AppendOpcode(full_opcode);
3030 return DecodeNumericOpcode(full_opcode, opcode_length);
3031 }
3032
3033 DECODE(Simd) {
3034 CHECK_PROTOTYPE_OPCODE(simd);
3035 uint32_t opcode_length = 0;
3036 WasmOpcode full_opcode = this->template read_prefixed_opcode<validate>(
3037 this->pc_, &opcode_length);
3038 if (!VALIDATE(this->ok())) return 0;
3039 trace_msg->AppendOpcode(full_opcode);
3040 return DecodeSimdOpcode(full_opcode, opcode_length);
3041 }
3042
3043 DECODE(Atomic) {
3044 CHECK_PROTOTYPE_OPCODE(threads);
3045 uint32_t opcode_length = 0;
3046 WasmOpcode full_opcode = this->template read_prefixed_opcode<validate>(
3047 this->pc_, &opcode_length, "atomic index");
3048 trace_msg->AppendOpcode(full_opcode);
3049 return DecodeAtomicOpcode(full_opcode, opcode_length);
3050 }
3051
3052 DECODE(GC) {
3053 CHECK_PROTOTYPE_OPCODE(gc);
3054 uint32_t opcode_length = 0;
3055 WasmOpcode full_opcode = this->template read_prefixed_opcode<validate>(
3056 this->pc_, &opcode_length, "gc index");
3057 trace_msg->AppendOpcode(full_opcode);
3058 return DecodeGCOpcode(full_opcode, opcode_length);
3059 }
3060
3061 #define SIMPLE_PROTOTYPE_CASE(name, opc, sig) \
3062 DECODE(name) { return BuildSimplePrototypeOperator(opcode); }
3063 FOREACH_SIMPLE_PROTOTYPE_OPCODE(SIMPLE_PROTOTYPE_CASE)
3064 #undef SIMPLE_PROTOTYPE_CASE
3065
3066 DECODE(UnknownOrAsmJs) {
3067 // Deal with special asmjs opcodes.
3068 if (!VALIDATE(is_asmjs_module(this->module_))) {
3069 this->DecodeError("Invalid opcode 0x%x", opcode);
3070 return 0;
3071 }
3072 const FunctionSig* sig = WasmOpcodes::AsmjsSignature(opcode);
3073 DCHECK_NOT_NULL(sig);
3074 return BuildSimpleOperator(opcode, sig);
3075 }
3076
3077 #undef DECODE
3078
3079 using OpcodeHandler = int (*)(WasmFullDecoder*, WasmOpcode);
3080
3081 // Ideally we would use template specialization for the different opcodes, but
3082 // GCC does not allow to specialize templates in class scope
3083 // (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=85282), and specializing
3084 // outside the class is not allowed for non-specialized classes.
3085 // Hence just list all implementations explicitly here, which also gives more
3086 // freedom to use the same implementation for different opcodes.
3087 #define DECODE_IMPL(opcode) DECODE_IMPL2(kExpr##opcode, opcode)
3088 #define DECODE_IMPL2(opcode, name) \
3089 if (idx == opcode) return &WasmFullDecoder::Decode##name
3090
3091 static constexpr OpcodeHandler GetOpcodeHandlerTableEntry(size_t idx) {
3092 DECODE_IMPL(Nop);
3093 #define BUILD_SIMPLE_OPCODE(op, _, sig) DECODE_IMPL(op);
3094 FOREACH_SIMPLE_OPCODE(BUILD_SIMPLE_OPCODE)
3095 #undef BUILD_SIMPLE_OPCODE
3096 DECODE_IMPL(Block);
3097 DECODE_IMPL(Rethrow);
3098 DECODE_IMPL(Throw);
3099 DECODE_IMPL(Try);
3100 DECODE_IMPL(Catch);
3101 DECODE_IMPL(BrOnExn);
3102 DECODE_IMPL(BrOnNull);
3103 DECODE_IMPL(Let);
3104 DECODE_IMPL(Loop);
3105 DECODE_IMPL(If);
3106 DECODE_IMPL(Else);
3107 DECODE_IMPL(End);
3108 DECODE_IMPL(Select);
3109 DECODE_IMPL(SelectWithType);
3110 DECODE_IMPL(Br);
3111 DECODE_IMPL(BrIf);
3112 DECODE_IMPL(BrTable);
3113 DECODE_IMPL(Return);
3114 DECODE_IMPL(Unreachable);
3115 DECODE_IMPL(I32Const);
3116 DECODE_IMPL(I64Const);
3117 DECODE_IMPL(F32Const);
3118 DECODE_IMPL(F64Const);
3119 DECODE_IMPL(RefNull);
3120 DECODE_IMPL(RefIsNull);
3121 DECODE_IMPL(RefFunc);
3122 DECODE_IMPL(RefAsNonNull);
3123 DECODE_IMPL(LocalGet);
3124 DECODE_IMPL(LocalSet);
3125 DECODE_IMPL(LocalTee);
3126 DECODE_IMPL(Drop);
3127 DECODE_IMPL(GlobalGet);
3128 DECODE_IMPL(GlobalSet);
3129 DECODE_IMPL(TableGet);
3130 DECODE_IMPL(TableSet);
3131 #define DECODE_LOAD_MEM(op, ...) DECODE_IMPL2(kExpr##op, LoadMem);
3132 FOREACH_LOAD_MEM_OPCODE(DECODE_LOAD_MEM)
3133 #undef DECODE_LOAD_MEM
3134 #define DECODE_STORE_MEM(op, ...) DECODE_IMPL2(kExpr##op, StoreMem);
3135 FOREACH_STORE_MEM_OPCODE(DECODE_STORE_MEM)
3136 #undef DECODE_LOAD_MEM
3137 DECODE_IMPL(MemoryGrow);
3138 DECODE_IMPL(MemorySize);
3139 DECODE_IMPL(CallFunction);
3140 DECODE_IMPL(CallIndirect);
3141 DECODE_IMPL(ReturnCall);
3142 DECODE_IMPL(ReturnCallIndirect);
3143 DECODE_IMPL(CallRef);
3144 DECODE_IMPL(ReturnCallRef);
3145 DECODE_IMPL2(kNumericPrefix, Numeric);
3146 DECODE_IMPL2(kSimdPrefix, Simd);
3147 DECODE_IMPL2(kAtomicPrefix, Atomic);
3148 DECODE_IMPL2(kGCPrefix, GC);
3149 #define SIMPLE_PROTOTYPE_CASE(name, opc, sig) DECODE_IMPL(name);
3150 FOREACH_SIMPLE_PROTOTYPE_OPCODE(SIMPLE_PROTOTYPE_CASE)
3151 #undef SIMPLE_PROTOTYPE_CASE
3152 return &WasmFullDecoder::DecodeUnknownOrAsmJs;
3153 }
3154
3155 #undef DECODE_IMPL
3156 #undef DECODE_IMPL2
3157
3158 OpcodeHandler GetOpcodeHandler(uint8_t opcode) {
3159 static constexpr std::array<OpcodeHandler, 256> kOpcodeHandlers =
3160 base::make_array<256>(GetOpcodeHandlerTableEntry);
3161 return kOpcodeHandlers[opcode];
3162 }
3163
3164 void DecodeFunctionBody() {
3165 TRACE("wasm-decode %p...%p (module+%u, %d bytes)\n", this->start(),
3166 this->end(), this->pc_offset(),
3167 static_cast<int>(this->end() - this->start()));
3168
3169 // Set up initial function block.
3170 {
3171 Control* c = PushControl(kControlBlock);
3172 InitMerge(&c->start_merge, 0, [](uint32_t) -> Value { UNREACHABLE(); });
3173 InitMerge(&c->end_merge,
3174 static_cast<uint32_t>(this->sig_->return_count()),
3175 [&](uint32_t i) {
3176 return Value{this->pc_, this->sig_->GetReturn(i)};
3177 });
3178 CALL_INTERFACE(StartFunctionBody, c);
3179 }
3180
3181 // Decode the function body.
3182 while (this->pc_ < this->end_) {
3183 // Most operations only grow the stack by at least one element (unary and
3184 // binary operations, local.get, constants, ...). Thus check that there is
3185 // enough space for those operations centrally, and avoid any bounds
3186 // checks in those operations.
3187 EnsureStackSpace(1);
3188 uint8_t first_byte = *this->pc_;
3189 WasmOpcode opcode = static_cast<WasmOpcode>(first_byte);
3190 CALL_INTERFACE_IF_REACHABLE(NextInstruction, opcode);
3191 OpcodeHandler handler = GetOpcodeHandler(first_byte);
3192 int len = (*handler)(this, opcode);
3193 this->pc_ += len;
3194 }
3195
3196 if (!VALIDATE(this->pc_ == this->end_)) {
3197 this->DecodeError("Beyond end of code");
3198 }
3199 }
3200
3201 void EndControl() {
3202 DCHECK(!control_.empty());
3203 Control* current = &control_.back();
3204 DCHECK_LE(stack_ + current->stack_depth, stack_end_);
3205 stack_end_ = stack_ + current->stack_depth;
3206 CALL_INTERFACE_IF_REACHABLE(EndControl, current);
3207 current->reachability = kUnreachable;
3208 current_code_reachable_ = false;
3209 }
3210
3211 template <typename func>
3212 void InitMerge(Merge<Value>* merge, uint32_t arity, func get_val) {
3213 merge->arity = arity;
3214 if (arity == 1) {
3215 merge->vals.first = get_val(0);
3216 } else if (arity > 1) {
3217 merge->vals.array = this->zone()->template NewArray<Value>(arity);
3218 for (uint32_t i = 0; i < arity; i++) {
3219 merge->vals.array[i] = get_val(i);
3220 }
3221 }
3222 }
3223
3224 void SetBlockType(Control* c, BlockTypeImmediate<validate>& imm,
3225 Value* args) {
3226 const byte* pc = this->pc_;
3227 InitMerge(&c->end_merge, imm.out_arity(), [pc, &imm](uint32_t i) {
3228 return Value{pc, imm.out_type(i)};
3229 });
3230 InitMerge(&c->start_merge, imm.in_arity(),
3231 [args](uint32_t i) { return args[i]; });
3232 }
3233
3234 // Pops arguments as required by signature.
3235 V8_INLINE ArgVector PopArgs(const FunctionSig* sig) {
3236 int count = sig ? static_cast<int>(sig->parameter_count()) : 0;
3237 ArgVector args(count);
3238 for (int i = count - 1; i >= 0; --i) {
3239 args[i] = Pop(i, sig->GetParam(i));
3240 }
3241 return args;
3242 }
3243
3244 V8_INLINE ArgVector PopArgs(const StructType* type) {
3245 int count = static_cast<int>(type->field_count());
3246 ArgVector args(count);
3247 for (int i = count - 1; i >= 0; i--) {
3248 args[i] = Pop(i, type->field(i).Unpacked());
3249 }
3250 return args;
3251 }
3252
3253 V8_INLINE ArgVector PopArgs(uint32_t base_index,
3254 Vector<ValueType> arg_types) {
3255 ArgVector args(arg_types.size());
3256 for (int i = static_cast<int>(arg_types.size()) - 1; i >= 0; i--) {
3257 args[i] = Pop(base_index + i, arg_types[i]);
3258 }
3259 return args;
3260 }
3261
3262 ValueType GetReturnType(const FunctionSig* sig) {
3263 DCHECK_GE(1, sig->return_count());
3264 return sig->return_count() == 0 ? kWasmStmt : sig->GetReturn();
3265 }
3266
3267 Control* PushControl(ControlKind kind, uint32_t locals_count = 0) {
3268 Reachability reachability =
3269 control_.empty() ? kReachable : control_.back().innerReachability();
3270 control_.emplace_back(kind, locals_count, stack_size(), this->pc_,
3271 reachability);
3272 current_code_reachable_ = this->ok() && reachability == kReachable;
3273 return &control_.back();
3274 }
3275
3276 void PopControl(Control* c) {
3277 DCHECK_EQ(c, &control_.back());
3278 CALL_INTERFACE_IF_PARENT_REACHABLE(PopControl, c);
3279
3280 // A loop just leaves the values on the stack.
3281 if (!c->is_loop()) PushMergeValues(c, &c->end_merge);
3282
3283 bool parent_reached =
3284 c->reachable() || c->end_merge.reached || c->is_onearmed_if();
3285 control_.pop_back();
3286 // If the parent block was reachable before, but the popped control does not
3287 // return to here, this block becomes "spec only reachable".
3288 if (!parent_reached) SetSucceedingCodeDynamicallyUnreachable();
3289 current_code_reachable_ = control_.back().reachable();
3290 }
3291
3292 int DecodeLoadMem(LoadType type, int prefix_len = 1) {
3293 if (!CheckHasMemory()) return 0;
3294 MemoryAccessImmediate<validate> imm(this, this->pc_ + prefix_len,
3295 type.size_log_2());
3296 ValueType index_type = this->module_->is_memory64 ? kWasmI64 : kWasmI32;
3297 Value index = Pop(0, index_type);
3298 Value* result = Push(type.value_type());
3299 CALL_INTERFACE_IF_REACHABLE(LoadMem, type, imm, index, result);
3300 return prefix_len + imm.length;
3301 }
3302
3303 int DecodeLoadTransformMem(LoadType type, LoadTransformationKind transform,
3304 uint32_t opcode_length) {
3305 if (!CheckHasMemory()) return 0;
3306 // Load extends always load 64-bits.
3307 uint32_t max_alignment =
3308 transform == LoadTransformationKind::kExtend ? 3 : type.size_log_2();
3309 MemoryAccessImmediate<validate> imm(this, this->pc_ + opcode_length,
3310 max_alignment);
3311 ValueType index_type = this->module_->is_memory64 ? kWasmI64 : kWasmI32;
3312 Value index = Pop(0, index_type);
3313 Value* result = Push(kWasmS128);
3314 CALL_INTERFACE_IF_REACHABLE(LoadTransform, type, transform, imm, index,
3315 result);
3316 return opcode_length + imm.length;
3317 }
3318
3319 int DecodeLoadLane(LoadType type, uint32_t opcode_length) {
3320 if (!CheckHasMemory()) return 0;
3321 MemoryAccessImmediate<validate> mem_imm(this, this->pc_ + opcode_length,
3322 type.size_log_2());
3323 SimdLaneImmediate<validate> lane_imm(
3324 this, this->pc_ + opcode_length + mem_imm.length);
3325 Value v128 = Pop(1, kWasmS128);
3326 Value index = Pop(0, kWasmI32);
3327
3328 Value* result = Push(kWasmS128);
3329 CALL_INTERFACE_IF_REACHABLE(LoadLane, type, v128, index, mem_imm,
3330 lane_imm.lane, result);
3331 return opcode_length + mem_imm.length + lane_imm.length;
3332 }
3333
3334 int DecodeStoreLane(StoreType type, uint32_t opcode_length) {
3335 if (!CheckHasMemory()) return 0;
3336 MemoryAccessImmediate<validate> mem_imm(this, this->pc_ + opcode_length,
3337 type.size_log_2());
3338 SimdLaneImmediate<validate> lane_imm(
3339 this, this->pc_ + opcode_length + mem_imm.length);
3340 Value v128 = Pop(1, kWasmS128);
3341 Value index = Pop(0, kWasmI32);
3342
3343 CALL_INTERFACE_IF_REACHABLE(StoreLane, type, mem_imm, index, v128,
3344 lane_imm.lane);
3345 return opcode_length + mem_imm.length + lane_imm.length;
3346 }
3347
3348 int DecodeStoreMem(StoreType store, int prefix_len = 1) {
3349 if (!CheckHasMemory()) return 0;
3350 MemoryAccessImmediate<validate> imm(this, this->pc_ + prefix_len,
3351 store.size_log_2());
3352 Value value = Pop(1, store.value_type());
3353 ValueType index_type = this->module_->is_memory64 ? kWasmI64 : kWasmI32;
3354 Value index = Pop(0, index_type);
3355 CALL_INTERFACE_IF_REACHABLE(StoreMem, store, imm, index, value);
3356 return prefix_len + imm.length;
3357 }
3358
3359 bool ValidateBrTableTarget(uint32_t target, const byte* pos, int index) {
3360 if (!VALIDATE(target < this->control_.size())) {
3361 this->DecodeError(pos, "improper branch in br_table target %u (depth %u)",
3362 index, target);
3363 return false;
3364 }
3365 return true;
3366 }
3367
3368 std::vector<ValueType> InitializeBrTableResultTypes(uint32_t target) {
3369 Merge<Value>* merge = control_at(target)->br_merge();
3370 int br_arity = merge->arity;
3371 std::vector<ValueType> result(br_arity);
3372 for (int i = 0; i < br_arity; ++i) {
3373 result[i] = (*merge)[i].type;
3374 }
3375 return result;
3376 }
3377
3378 bool UpdateBrTableResultTypes(std::vector<ValueType>* result_types,
3379 uint32_t target, const byte* pos, int index) {
3380 Merge<Value>* merge = control_at(target)->br_merge();
3381 int br_arity = merge->arity;
3382 // First we check if the arities match.
3383 if (!VALIDATE(br_arity == static_cast<int>(result_types->size()))) {
3384 this->DecodeError(pos,
3385 "inconsistent arity in br_table target %u (previous "
3386 "was %zu, this one is %u)",
3387 index, result_types->size(), br_arity);
3388 return false;
3389 }
3390
3391 for (int i = 0; i < br_arity; ++i) {
3392 if (this->enabled_.has_reftypes()) {
3393 // The expected type is the biggest common sub type of all targets.
3394 ValueType type = (*result_types)[i];
3395 (*result_types)[i] =
3396 CommonSubtype((*result_types)[i], (*merge)[i].type, this->module_);
3397 if (!VALIDATE((*result_types)[i] != kWasmBottom)) {
3398 this->DecodeError(pos,
3399 "inconsistent type in br_table target %u (previous "
3400 "was %s, this one is %s)",
3401 index, type.name().c_str(),
3402 (*merge)[i].type.name().c_str());
3403 return false;
3404 }
3405 } else {
3406 // All target must have the same signature.
3407 if (!VALIDATE((*result_types)[i] == (*merge)[i].type)) {
3408 this->DecodeError(pos,
3409 "inconsistent type in br_table target %u (previous "
3410 "was %s, this one is %s)",
3411 index, (*result_types)[i].name().c_str(),
3412 (*merge)[i].type.name().c_str());
3413 return false;
3414 }
3415 }
3416 }
3417 return true;
3418 }
3419
3420 bool TypeCheckBrTable(const std::vector<ValueType>& result_types) {
3421 int br_arity = static_cast<int>(result_types.size());
3422 if (V8_LIKELY(!control_.back().unreachable())) {
3423 int available =
3424 static_cast<int>(stack_size()) - control_.back().stack_depth;
3425 // There have to be enough values on the stack.
3426 if (!VALIDATE(available >= br_arity)) {
3427 this->DecodeError(
3428 "expected %u elements on the stack for branch to @%d, found %u",
3429 br_arity, startrel(control_.back().pc()), available);
3430 return false;
3431 }
3432 Value* stack_values = stack_end_ - br_arity;
3433 // Type-check the topmost br_arity values on the stack.
3434 for (int i = 0; i < br_arity; ++i) {
3435 Value& val = stack_values[i];
3436 if (!VALIDATE(IsSubtypeOf(val.type, result_types[i], this->module_))) {
3437 this->DecodeError("type error in merge[%u] (expected %s, got %s)", i,
3438 result_types[i].name().c_str(),
3439 val.type.name().c_str());
3440 return false;
3441 }
3442 }
3443 } else { // !control_.back().reachable()
3444 // Pop values from the stack, accoring to the expected signature.
3445 for (int i = 0; i < br_arity; ++i) Pop(i + 1, result_types[i]);
3446 }
3447 return this->ok();
3448 }
3449
3450 uint32_t SimdConstOp(uint32_t opcode_length) {
3451 Simd128Immediate<validate> imm(this, this->pc_ + opcode_length);
3452 auto* result = Push(kWasmS128);
3453 CALL_INTERFACE_IF_REACHABLE(S128Const, imm, result);
3454 return opcode_length + kSimd128Size;
3455 }
3456
3457 uint32_t SimdExtractLane(WasmOpcode opcode, ValueType type,
3458 uint32_t opcode_length) {
3459 SimdLaneImmediate<validate> imm(this, this->pc_ + opcode_length);
3460 if (this->Validate(this->pc_ + opcode_length, opcode, imm)) {
3461 Value inputs[] = {Pop(0, kWasmS128)};
3462 Value* result = Push(type);
3463 CALL_INTERFACE_IF_REACHABLE(SimdLaneOp, opcode, imm, ArrayVector(inputs),
3464 result);
3465 }
3466 return opcode_length + imm.length;
3467 }
3468
3469 uint32_t SimdReplaceLane(WasmOpcode opcode, ValueType type,
3470 uint32_t opcode_length) {
3471 SimdLaneImmediate<validate> imm(this, this->pc_ + opcode_length);
3472 if (this->Validate(this->pc_ + opcode_length, opcode, imm)) {
3473 Value inputs[2] = {UnreachableValue(this->pc_),
3474 UnreachableValue(this->pc_)};
3475 inputs[1] = Pop(1, type);
3476 inputs[0] = Pop(0, kWasmS128);
3477 Value* result = Push(kWasmS128);
3478 CALL_INTERFACE_IF_REACHABLE(SimdLaneOp, opcode, imm, ArrayVector(inputs),
3479 result);
3480 }
3481 return opcode_length + imm.length;
3482 }
3483
3484 uint32_t Simd8x16ShuffleOp(uint32_t opcode_length) {
3485 Simd128Immediate<validate> imm(this, this->pc_ + opcode_length);
3486 if (this->Validate(this->pc_ + opcode_length, imm)) {
3487 Value input1 = Pop(1, kWasmS128);
3488 Value input0 = Pop(0, kWasmS128);
3489 Value* result = Push(kWasmS128);
3490 CALL_INTERFACE_IF_REACHABLE(Simd8x16ShuffleOp, imm, input0, input1,
3491 result);
3492 }
3493 return opcode_length + 16;
3494 }
3495
3496 uint32_t DecodeSimdOpcode(WasmOpcode opcode, uint32_t opcode_length) {
3497 // opcode_length is the number of bytes that this SIMD-specific opcode takes
3498 // up in the LEB128 encoded form.
3499 switch (opcode) {
3500 case kExprF64x2ExtractLane:
3501 return SimdExtractLane(opcode, kWasmF64, opcode_length);
3502 case kExprF32x4ExtractLane:
3503 return SimdExtractLane(opcode, kWasmF32, opcode_length);
3504 case kExprI64x2ExtractLane:
3505 return SimdExtractLane(opcode, kWasmI64, opcode_length);
3506 case kExprI32x4ExtractLane:
3507 case kExprI16x8ExtractLaneS:
3508 case kExprI16x8ExtractLaneU:
3509 case kExprI8x16ExtractLaneS:
3510 case kExprI8x16ExtractLaneU:
3511 return SimdExtractLane(opcode, kWasmI32, opcode_length);
3512 case kExprF64x2ReplaceLane:
3513 return SimdReplaceLane(opcode, kWasmF64, opcode_length);
3514 case kExprF32x4ReplaceLane:
3515 return SimdReplaceLane(opcode, kWasmF32, opcode_length);
3516 case kExprI64x2ReplaceLane:
3517 return SimdReplaceLane(opcode, kWasmI64, opcode_length);
3518 case kExprI32x4ReplaceLane:
3519 case kExprI16x8ReplaceLane:
3520 case kExprI8x16ReplaceLane:
3521 return SimdReplaceLane(opcode, kWasmI32, opcode_length);
3522 case kExprI8x16Shuffle:
3523 return Simd8x16ShuffleOp(opcode_length);
3524 case kExprS128LoadMem:
3525 return DecodeLoadMem(LoadType::kS128Load, opcode_length);
3526 case kExprS128StoreMem:
3527 return DecodeStoreMem(StoreType::kS128Store, opcode_length);
3528 case kExprS128Load32Zero:
3529 return DecodeLoadTransformMem(LoadType::kI32Load,
3530 LoadTransformationKind::kZeroExtend,
3531 opcode_length);
3532 case kExprS128Load64Zero:
3533 return DecodeLoadTransformMem(LoadType::kI64Load,
3534 LoadTransformationKind::kZeroExtend,
3535 opcode_length);
3536 case kExprS128Load8Splat:
3537 return DecodeLoadTransformMem(LoadType::kI32Load8S,
3538 LoadTransformationKind::kSplat,
3539 opcode_length);
3540 case kExprS128Load16Splat:
3541 return DecodeLoadTransformMem(LoadType::kI32Load16S,
3542 LoadTransformationKind::kSplat,
3543 opcode_length);
3544 case kExprS128Load32Splat:
3545 return DecodeLoadTransformMem(
3546 LoadType::kI32Load, LoadTransformationKind::kSplat, opcode_length);
3547 case kExprS128Load64Splat:
3548 return DecodeLoadTransformMem(
3549 LoadType::kI64Load, LoadTransformationKind::kSplat, opcode_length);
3550 case kExprS128Load8x8S:
3551 return DecodeLoadTransformMem(LoadType::kI32Load8S,
3552 LoadTransformationKind::kExtend,
3553 opcode_length);
3554 case kExprS128Load8x8U:
3555 return DecodeLoadTransformMem(LoadType::kI32Load8U,
3556 LoadTransformationKind::kExtend,
3557 opcode_length);
3558 case kExprS128Load16x4S:
3559 return DecodeLoadTransformMem(LoadType::kI32Load16S,
3560 LoadTransformationKind::kExtend,
3561 opcode_length);
3562 case kExprS128Load16x4U:
3563 return DecodeLoadTransformMem(LoadType::kI32Load16U,
3564 LoadTransformationKind::kExtend,
3565 opcode_length);
3566 case kExprS128Load32x2S:
3567 return DecodeLoadTransformMem(LoadType::kI64Load32S,
3568 LoadTransformationKind::kExtend,
3569 opcode_length);
3570 case kExprS128Load32x2U:
3571 return DecodeLoadTransformMem(LoadType::kI64Load32U,
3572 LoadTransformationKind::kExtend,
3573 opcode_length);
3574 case kExprS128Load8Lane: {
3575 return DecodeLoadLane(LoadType::kI32Load8S, opcode_length);
3576 }
3577 case kExprS128Load16Lane: {
3578 return DecodeLoadLane(LoadType::kI32Load16S, opcode_length);
3579 }
3580 case kExprS128Load32Lane: {
3581 return DecodeLoadLane(LoadType::kI32Load, opcode_length);
3582 }
3583 case kExprS128Load64Lane: {
3584 return DecodeLoadLane(LoadType::kI64Load, opcode_length);
3585 }
3586 case kExprS128Store8Lane: {
3587 return DecodeStoreLane(StoreType::kI32Store8, opcode_length);
3588 }
3589 case kExprS128Store16Lane: {
3590 return DecodeStoreLane(StoreType::kI32Store16, opcode_length);
3591 }
3592 case kExprS128Store32Lane: {
3593 return DecodeStoreLane(StoreType::kI32Store, opcode_length);
3594 }
3595 case kExprS128Store64Lane: {
3596 return DecodeStoreLane(StoreType::kI64Store, opcode_length);
3597 }
3598 case kExprS128Const:
3599 return SimdConstOp(opcode_length);
3600 default: {
3601 if (!CheckSimdPostMvp(opcode)) {
3602 return 0;
3603 }
3604 const FunctionSig* sig = WasmOpcodes::Signature(opcode);
3605 if (!VALIDATE(sig != nullptr)) {
3606 this->DecodeError("invalid simd opcode");
3607 return 0;
3608 }
3609 ArgVector args = PopArgs(sig);
3610 Value* results =
3611 sig->return_count() == 0 ? nullptr : Push(GetReturnType(sig));
3612 CALL_INTERFACE_IF_REACHABLE(SimdOp, opcode, VectorOf(args), results);
3613 return opcode_length;
3614 }
3615 }
3616 }
3617
3618 int DecodeGCOpcode(WasmOpcode opcode, uint32_t opcode_length) {
3619 switch (opcode) {
3620 case kExprStructNewWithRtt: {
3621 StructIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
3622 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
3623 Value rtt = Pop(imm.struct_type->field_count());
3624 if (!VALIDATE(rtt.type.is_rtt() || rtt.type.is_bottom())) {
3625 this->DecodeError(
3626 "struct.new_with_rtt expected rtt, found %s of type %s",
3627 SafeOpcodeNameAt(rtt.pc()), rtt.type.name().c_str());
3628 return 0;
3629 }
3630 // TODO(7748): Drop this check if {imm} is dropped from the proposal
3631 // Ã la https://github.com/WebAssembly/function-references/pull/31.
3632 if (!VALIDATE(rtt.type.is_bottom() ||
3633 rtt.type.heap_representation() == imm.index)) {
3634 this->DecodeError(
3635 "struct.new_with_rtt expected rtt for type %d, found rtt for "
3636 "type %s",
3637 imm.index, rtt.type.heap_type().name().c_str());
3638 return 0;
3639 }
3640 ArgVector args = PopArgs(imm.struct_type);
3641 Value* value = Push(ValueType::Ref(imm.index, kNonNullable));
3642 CALL_INTERFACE_IF_REACHABLE(StructNewWithRtt, imm, rtt, args.begin(),
3643 value);
3644 return opcode_length + imm.length;
3645 }
3646 case kExprStructNewDefault: {
3647 StructIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
3648 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
3649 if (validate) {
3650 for (uint32_t i = 0; i < imm.struct_type->field_count(); i++) {
3651 ValueType ftype = imm.struct_type->field(i);
3652 if (!VALIDATE(ftype.is_defaultable())) {
3653 this->DecodeError(
3654 "struct.new_default_with_rtt: struct type %d has "
3655 "non-defaultable type %s for field %d",
3656 imm.index, ftype.name().c_str(), i);
3657 return 0;
3658 }
3659 }
3660 }
3661 Value rtt = Pop(0);
3662 if (!VALIDATE(rtt.type.is_rtt() || rtt.type.is_bottom())) {
3663 this->DecodeError(
3664 "struct.new_default_with_rtt expected rtt, found %s of type %s",
3665 SafeOpcodeNameAt(rtt.pc()), rtt.type.name().c_str());
3666 return 0;
3667 }
3668 // TODO(7748): Drop this check if {imm} is dropped from the proposal
3669 // Ã la https://github.com/WebAssembly/function-references/pull/31.
3670 if (!VALIDATE(rtt.type.is_bottom() ||
3671 rtt.type.heap_representation() == imm.index)) {
3672 this->DecodeError(
3673 "struct.new_default_with_rtt expected rtt for type %d, found rtt "
3674 "for type %s",
3675 imm.index, rtt.type.heap_type().name().c_str());
3676 return 0;
3677 }
3678 Value* value = Push(ValueType::Ref(imm.index, kNonNullable));
3679 CALL_INTERFACE_IF_REACHABLE(StructNewDefault, imm, rtt, value);
3680 return opcode_length + imm.length;
3681 }
3682 case kExprStructGet: {
3683 FieldIndexImmediate<validate> field(this, this->pc_ + opcode_length);
3684 if (!this->Validate(this->pc_ + opcode_length, field)) return 0;
3685 ValueType field_type =
3686 field.struct_index.struct_type->field(field.index);
3687 if (!VALIDATE(!field_type.is_packed())) {
3688 this->DecodeError(
3689 "struct.get used with a field of packed type. Use struct.get_s "
3690 "or struct.get_u instead.");
3691 return 0;
3692 }
3693 Value struct_obj =
3694 Pop(0, ValueType::Ref(field.struct_index.index, kNullable));
3695 Value* value = Push(field_type);
3696 CALL_INTERFACE_IF_REACHABLE(StructGet, struct_obj, field, true, value);
3697 return opcode_length + field.length;
3698 }
3699 case kExprStructGetU:
3700 case kExprStructGetS: {
3701 FieldIndexImmediate<validate> field(this, this->pc_ + opcode_length);
3702 if (!this->Validate(this->pc_ + opcode_length, field)) return 0;
3703 ValueType field_type =
3704 field.struct_index.struct_type->field(field.index);
3705 if (!VALIDATE(field_type.is_packed())) {
3706 this->DecodeError(
3707 "%s is only valid for packed struct fields. Use struct.get "
3708 "instead.",
3709 WasmOpcodes::OpcodeName(opcode));
3710 return 0;
3711 }
3712 Value struct_obj =
3713 Pop(0, ValueType::Ref(field.struct_index.index, kNullable));
3714 Value* value = Push(field_type.Unpacked());
3715 CALL_INTERFACE_IF_REACHABLE(StructGet, struct_obj, field,
3716 opcode == kExprStructGetS, value);
3717 return opcode_length + field.length;
3718 }
3719 case kExprStructSet: {
3720 FieldIndexImmediate<validate> field(this, this->pc_ + opcode_length);
3721 if (!this->Validate(this->pc_ + opcode_length, field)) return 0;
3722 const StructType* struct_type = field.struct_index.struct_type;
3723 if (!VALIDATE(struct_type->mutability(field.index))) {
3724 this->DecodeError("setting immutable struct field");
3725 return 0;
3726 }
3727 Value field_value = Pop(1, struct_type->field(field.index).Unpacked());
3728 Value struct_obj =
3729 Pop(0, ValueType::Ref(field.struct_index.index, kNullable));
3730 CALL_INTERFACE_IF_REACHABLE(StructSet, struct_obj, field, field_value);
3731 return opcode_length + field.length;
3732 }
3733 case kExprArrayNewWithRtt: {
3734 ArrayIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
3735 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
3736 Value rtt = Pop(2);
3737 if (!VALIDATE(rtt.type.is_rtt() || rtt.type.is_bottom())) {
3738 this->DecodeError(
3739 this->pc_ + opcode_length,
3740 "array.new_with_rtt expected rtt, found %s of type %s",
3741 SafeOpcodeNameAt(rtt.pc()), rtt.type.name().c_str());
3742 return 0;
3743 }
3744 // TODO(7748): Drop this check if {imm} is dropped from the proposal
3745 // Ã la https://github.com/WebAssembly/function-references/pull/31.
3746 if (!VALIDATE(rtt.type.is_bottom() ||
3747 rtt.type.heap_representation() == imm.index)) {
3748 this->DecodeError(
3749 this->pc_ + opcode_length,
3750 "array.new_with_rtt expected rtt for type %d, found "
3751 "rtt for type %s",
3752 imm.index, rtt.type.heap_type().name().c_str());
3753 return 0;
3754 }
3755 Value length = Pop(1, kWasmI32);
3756 Value initial_value = Pop(0, imm.array_type->element_type().Unpacked());
3757 Value* value = Push(ValueType::Ref(imm.index, kNonNullable));
3758 CALL_INTERFACE_IF_REACHABLE(ArrayNewWithRtt, imm, length, initial_value,
3759 rtt, value);
3760 return opcode_length + imm.length;
3761 }
3762 case kExprArrayNewDefault: {
3763 ArrayIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
3764 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
3765 if (!VALIDATE(imm.array_type->element_type().is_defaultable())) {
3766 this->DecodeError(
3767 "array.new_default_with_rtt: array type %d has "
3768 "non-defaultable element type %s",
3769 imm.index, imm.array_type->element_type().name().c_str());
3770 return 0;
3771 }
3772 Value rtt = Pop(1);
3773 if (!VALIDATE(rtt.type.is_rtt() || rtt.type.is_bottom())) {
3774 this->DecodeError(
3775 this->pc_ + opcode_length,
3776 "array.new_default_with_rtt expected rtt, found %s of type %s",
3777 SafeOpcodeNameAt(rtt.pc()), rtt.type.name().c_str());
3778 return 0;
3779 }
3780 // TODO(7748): Drop this check if {imm} is dropped from the proposal
3781 // Ã la https://github.com/WebAssembly/function-references/pull/31.
3782 if (!VALIDATE(rtt.type.is_bottom() ||
3783 rtt.type.heap_representation() == imm.index)) {
3784 this->DecodeError(this->pc_ + opcode_length,
3785 "array.new_default_with_rtt expected rtt for type "
3786 "%d, found rtt for type %s",
3787 imm.index, rtt.type.heap_type().name().c_str());
3788 return 0;
3789 }
3790 Value length = Pop(0, kWasmI32);
3791 Value* value = Push(ValueType::Ref(imm.index, kNonNullable));
3792 CALL_INTERFACE_IF_REACHABLE(ArrayNewDefault, imm, length, rtt, value);
3793 return opcode_length + imm.length;
3794 }
3795 case kExprArrayGetS:
3796 case kExprArrayGetU: {
3797 ArrayIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
3798 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
3799 if (!VALIDATE(imm.array_type->element_type().is_packed())) {
3800 this->DecodeError(
3801 "%s is only valid for packed arrays. Use array.get instead.",
3802 WasmOpcodes::OpcodeName(opcode));
3803 return 0;
3804 }
3805 Value index = Pop(1, kWasmI32);
3806 Value array_obj = Pop(0, ValueType::Ref(imm.index, kNullable));
3807 Value* value = Push(imm.array_type->element_type().Unpacked());
3808 CALL_INTERFACE_IF_REACHABLE(ArrayGet, array_obj, imm, index,
3809 opcode == kExprArrayGetS, value);
3810 return opcode_length + imm.length;
3811 }
3812 case kExprArrayGet: {
3813 ArrayIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
3814 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
3815 if (!VALIDATE(!imm.array_type->element_type().is_packed())) {
3816 this->DecodeError(
3817 "array.get used with a field of packed type. Use array.get_s or "
3818 "array.get_u instead.");
3819 return 0;
3820 }
3821 Value index = Pop(1, kWasmI32);
3822 Value array_obj = Pop(0, ValueType::Ref(imm.index, kNullable));
3823 Value* value = Push(imm.array_type->element_type());
3824 CALL_INTERFACE_IF_REACHABLE(ArrayGet, array_obj, imm, index, true,
3825 value);
3826 return opcode_length + imm.length;
3827 }
3828 case kExprArraySet: {
3829 ArrayIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
3830 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
3831 if (!VALIDATE(imm.array_type->mutability())) {
3832 this->DecodeError("setting element of immutable array");
3833 return 0;
3834 }
3835 Value value = Pop(2, imm.array_type->element_type().Unpacked());
3836 Value index = Pop(1, kWasmI32);
3837 Value array_obj = Pop(0, ValueType::Ref(imm.index, kNullable));
3838 CALL_INTERFACE_IF_REACHABLE(ArraySet, array_obj, imm, index, value);
3839 return opcode_length + imm.length;
3840 }
3841 case kExprArrayLen: {
3842 ArrayIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
3843 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
3844 Value array_obj = Pop(0, ValueType::Ref(imm.index, kNullable));
3845 Value* value = Push(kWasmI32);
3846 CALL_INTERFACE_IF_REACHABLE(ArrayLen, array_obj, value);
3847 return opcode_length + imm.length;
3848 }
3849 case kExprI31New: {
3850 Value input = Pop(0, kWasmI32);
3851 Value* value = Push(kWasmI31Ref);
3852 CALL_INTERFACE_IF_REACHABLE(I31New, input, value);
3853 return opcode_length;
3854 }
3855 case kExprI31GetS: {
3856 Value i31 = Pop(0, kWasmI31Ref);
3857 Value* value = Push(kWasmI32);
3858 CALL_INTERFACE_IF_REACHABLE(I31GetS, i31, value);
3859 return opcode_length;
3860 }
3861 case kExprI31GetU: {
3862 Value i31 = Pop(0, kWasmI31Ref);
3863 Value* value = Push(kWasmI32);
3864 CALL_INTERFACE_IF_REACHABLE(I31GetU, i31, value);
3865 return opcode_length;
3866 }
3867 case kExprRttCanon: {
3868 HeapTypeImmediate<validate> imm(this->enabled_, this,
3869 this->pc_ + opcode_length);
3870 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
3871 Value* value = Push(ValueType::Rtt(imm.type, 1));
3872 CALL_INTERFACE_IF_REACHABLE(RttCanon, imm, value);
3873 return opcode_length + imm.length;
3874 }
3875 case kExprRttSub: {
3876 // TODO(7748): The proposal currently includes additional immediates
3877 // here: the subtyping depth <n> and the "parent type", see:
3878 // https://github.com/WebAssembly/gc/commit/20a80e34 .
3879 // If these immediates don't get dropped (in the spirit of
3880 // https://github.com/WebAssembly/function-references/pull/31 ),
3881 // implement them here.
3882 HeapTypeImmediate<validate> imm(this->enabled_, this,
3883 this->pc_ + opcode_length);
3884 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
3885 Value parent = Pop(0);
3886 if (parent.type.is_bottom()) {
3887 Push(kWasmBottom);
3888 } else {
3889 // TODO(7748): Consider exposing "IsSubtypeOfHeap(HeapType t1, t2)" so
3890 // we can avoid creating (ref heaptype) wrappers here.
3891 if (!VALIDATE(parent.type.is_rtt() &&
3892 IsSubtypeOf(ValueType::Ref(imm.type, kNonNullable),
3893 ValueType::Ref(parent.type.heap_type(),
3894 kNonNullable),
3895 this->module_))) {
3896 this->DecodeError("rtt.sub requires a supertype rtt on stack");
3897 return 0;
3898 }
3899 Value* value =
3900 Push(ValueType::Rtt(imm.type, parent.type.depth() + 1));
3901 CALL_INTERFACE_IF_REACHABLE(RttSub, imm, parent, value);
3902 }
3903 return opcode_length + imm.length;
3904 }
3905 case kExprRefTest: {
3906 // "Tests whether {obj}'s runtime type is a runtime subtype of {rtt}."
3907 HeapTypeImmediate<validate> obj_type(this->enabled_, this,
3908 this->pc_ + opcode_length);
3909 if (!this->Validate(this->pc_ + opcode_length, obj_type)) return 0;
3910 int len = opcode_length + obj_type.length;
3911 HeapTypeImmediate<validate> rtt_type(this->enabled_, this,
3912 this->pc_ + len);
3913 if (!this->Validate(this->pc_ + len, rtt_type)) return 0;
3914 len += rtt_type.length;
3915 // The static type of {obj} must be a supertype of the {rtt}'s type.
3916 if (!VALIDATE(IsSubtypeOf(ValueType::Ref(rtt_type.type, kNonNullable),
3917 ValueType::Ref(obj_type.type, kNonNullable),
3918 this->module_))) {
3919 this->DecodeError(
3920 "ref.test: rtt type must be subtype of object type");
3921 return 0;
3922 }
3923 Value rtt = Pop(1);
3924 if (!VALIDATE(
3925 (rtt.type.is_rtt() && rtt.type.heap_type() == rtt_type.type) ||
3926 rtt.type == kWasmBottom)) {
3927 this->DecodeError("ref.test: expected rtt for type %s but got %s",
3928 rtt_type.type.name().c_str(),
3929 rtt.type.name().c_str());
3930 return 0;
3931 }
3932 Value obj = Pop(0, ValueType::Ref(obj_type.type, kNullable));
3933 Value* value = Push(kWasmI32);
3934 CALL_INTERFACE_IF_REACHABLE(RefTest, obj, rtt, value);
3935 return len;
3936 }
3937 case kExprRefCast: {
3938 HeapTypeImmediate<validate> obj_type(this->enabled_, this,
3939 this->pc_ + opcode_length);
3940 if (!this->Validate(this->pc_ + opcode_length, obj_type)) return 0;
3941 int len = opcode_length + obj_type.length;
3942 HeapTypeImmediate<validate> rtt_type(this->enabled_, this,
3943 this->pc_ + len);
3944 if (!this->Validate(this->pc_ + len, rtt_type)) return 0;
3945 len += rtt_type.length;
3946 if (!VALIDATE(IsSubtypeOf(ValueType::Ref(rtt_type.type, kNonNullable),
3947 ValueType::Ref(obj_type.type, kNonNullable),
3948 this->module_))) {
3949 this->DecodeError(
3950 "ref.cast: rtt type must be subtype of object type");
3951 return 0;
3952 }
3953 Value rtt = Pop(1);
3954 if (!VALIDATE(
3955 (rtt.type.is_rtt() && rtt.type.heap_type() == rtt_type.type) ||
3956 rtt.type == kWasmBottom)) {
3957 this->DecodeError("ref.cast: expected rtt for type %s but got %s",
3958 rtt_type.type.name().c_str(),
3959 rtt.type.name().c_str());
3960 return 0;
3961 }
3962 Value obj = Pop(0, ValueType::Ref(obj_type.type, kNullable));
3963 Value* value = Push(ValueType::Ref(rtt_type.type, kNonNullable));
3964 CALL_INTERFACE_IF_REACHABLE(RefCast, obj, rtt, value);
3965 return len;
3966 }
3967 case kExprBrOnCast: {
3968 BranchDepthImmediate<validate> branch_depth(this,
3969 this->pc_ + opcode_length);
3970 if (!this->Validate(this->pc_ + opcode_length, branch_depth,
3971 control_.size())) {
3972 return 0;
3973 }
3974 // TODO(7748): If the heap type immediates remain in the spec, read
3975 // them here.
3976 Value rtt = Pop(1);
3977 if (!VALIDATE(rtt.type.is_rtt() || rtt.type.is_bottom())) {
3978 this->DecodeError("br_on_cast[1]: expected rtt on stack");
3979 return 0;
3980 }
3981 Value obj = Pop(0);
3982 if (!VALIDATE(obj.type.is_object_reference_type() ||
3983 rtt.type.is_bottom())) {
3984 this->DecodeError("br_on_cast[0]: expected reference on stack");
3985 return 0;
3986 }
3987 // The static type of {obj} must be a supertype of {rtt}'s type.
3988 if (!VALIDATE(
3989 rtt.type.is_bottom() || obj.type.is_bottom() ||
3990 IsSubtypeOf(ValueType::Ref(rtt.type.heap_type(), kNonNullable),
3991 ValueType::Ref(obj.type.heap_type(), kNonNullable),
3992 this->module_))) {
3993 this->DecodeError(
3994 "br_on_cast: rtt type must be a subtype of object type");
3995 return 0;
3996 }
3997 Control* c = control_at(branch_depth.depth);
3998 Value* result_on_branch =
3999 Push(rtt.type.is_bottom()
4000 ? kWasmBottom
4001 : ValueType::Ref(rtt.type.heap_type(), kNonNullable));
4002 TypeCheckBranchResult check_result = TypeCheckBranch(c, true);
4003 if (V8_LIKELY(check_result == kReachableBranch)) {
4004 CALL_INTERFACE(BrOnCast, obj, rtt, result_on_branch,
4005 branch_depth.depth);
4006 c->br_merge()->reached = true;
4007 } else if (check_result == kInvalidStack) {
4008 return 0;
4009 }
4010 Pop(0); // Drop {result_on_branch}, restore original value.
4011 Value* result_on_fallthrough = Push(obj.type);
4012 *result_on_fallthrough = obj;
4013 return opcode_length + branch_depth.length;
4014 }
4015 default:
4016 this->DecodeError("invalid gc opcode");
4017 return 0;
4018 }
4019 }
4020
4021 uint32_t DecodeAtomicOpcode(WasmOpcode opcode, uint32_t opcode_length) {
4022 ValueType ret_type;
4023 const FunctionSig* sig = WasmOpcodes::Signature(opcode);
4024 if (!VALIDATE(sig != nullptr)) {
4025 this->DecodeError("invalid atomic opcode");
4026 return 0;
4027 }
4028 MachineType memtype;
4029 switch (opcode) {
4030 #define CASE_ATOMIC_STORE_OP(Name, Type) \
4031 case kExpr##Name: { \
4032 memtype = MachineType::Type(); \
4033 ret_type = kWasmStmt; \
4034 break; /* to generic mem access code below */ \
4035 }
4036 ATOMIC_STORE_OP_LIST(CASE_ATOMIC_STORE_OP)
4037 #undef CASE_ATOMIC_OP
4038 #define CASE_ATOMIC_OP(Name, Type) \
4039 case kExpr##Name: { \
4040 memtype = MachineType::Type(); \
4041 ret_type = GetReturnType(sig); \
4042 break; /* to generic mem access code below */ \
4043 }
4044 ATOMIC_OP_LIST(CASE_ATOMIC_OP)
4045 #undef CASE_ATOMIC_OP
4046 case kExprAtomicFence: {
4047 byte zero =
4048 this->template read_u8<validate>(this->pc_ + opcode_length, "zero");
4049 if (!VALIDATE(zero == 0)) {
4050 this->DecodeError(this->pc_ + opcode_length,
4051 "invalid atomic operand");
4052 return 0;
4053 }
4054 CALL_INTERFACE_IF_REACHABLE(AtomicFence);
4055 return 1 + opcode_length;
4056 }
4057 default:
4058 this->DecodeError("invalid atomic opcode");
4059 return 0;
4060 }
4061 if (!CheckHasMemory()) return 0;
4062 MemoryAccessImmediate<validate> imm(
4063 this, this->pc_ + opcode_length,
4064 ElementSizeLog2Of(memtype.representation()));
4065 // TODO(10949): Fix this for memory64 (index type should be kWasmI64
4066 // then).
4067 CHECK(!this->module_->is_memory64);
4068 ArgVector args = PopArgs(sig);
4069 Value* result = ret_type == kWasmStmt ? nullptr : Push(GetReturnType(sig));
4070 CALL_INTERFACE_IF_REACHABLE(AtomicOp, opcode, VectorOf(args), imm, result);
4071 return opcode_length + imm.length;
4072 }
4073
4074 unsigned DecodeNumericOpcode(WasmOpcode opcode, uint32_t opcode_length) {
4075 const FunctionSig* sig = WasmOpcodes::Signature(opcode);
4076 if (!VALIDATE(sig != nullptr)) {
4077 this->DecodeError("invalid numeric opcode");
4078 return 0;
4079 }
4080 switch (opcode) {
4081 case kExprI32SConvertSatF32:
4082 case kExprI32UConvertSatF32:
4083 case kExprI32SConvertSatF64:
4084 case kExprI32UConvertSatF64:
4085 case kExprI64SConvertSatF32:
4086 case kExprI64UConvertSatF32:
4087 case kExprI64SConvertSatF64:
4088 case kExprI64UConvertSatF64: {
4089 BuildSimpleOperator(opcode, sig);
4090 return opcode_length;
4091 }
4092 case kExprMemoryInit: {
4093 MemoryInitImmediate<validate> imm(this, this->pc_ + opcode_length);
4094 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4095 Value size = Pop(2, sig->GetParam(2));
4096 Value src = Pop(1, sig->GetParam(1));
4097 Value dst = Pop(0, sig->GetParam(0));
4098 CALL_INTERFACE_IF_REACHABLE(MemoryInit, imm, dst, src, size);
4099 return opcode_length + imm.length;
4100 }
4101 case kExprDataDrop: {
4102 DataDropImmediate<validate> imm(this, this->pc_ + opcode_length);
4103 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4104 CALL_INTERFACE_IF_REACHABLE(DataDrop, imm);
4105 return opcode_length + imm.length;
4106 }
4107 case kExprMemoryCopy: {
4108 MemoryCopyImmediate<validate> imm(this, this->pc_ + opcode_length);
4109 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4110 Value size = Pop(2, sig->GetParam(2));
4111 Value src = Pop(1, sig->GetParam(1));
4112 Value dst = Pop(0, sig->GetParam(0));
4113 CALL_INTERFACE_IF_REACHABLE(MemoryCopy, imm, dst, src, size);
4114 return opcode_length + imm.length;
4115 }
4116 case kExprMemoryFill: {
4117 MemoryIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
4118 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4119 Value size = Pop(2, sig->GetParam(2));
4120 Value value = Pop(1, sig->GetParam(1));
4121 Value dst = Pop(0, sig->GetParam(0));
4122 CALL_INTERFACE_IF_REACHABLE(MemoryFill, imm, dst, value, size);
4123 return opcode_length + imm.length;
4124 }
4125 case kExprTableInit: {
4126 TableInitImmediate<validate> imm(this, this->pc_ + opcode_length);
4127 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4128 ArgVector args = PopArgs(sig);
4129 CALL_INTERFACE_IF_REACHABLE(TableInit, imm, VectorOf(args));
4130 return opcode_length + imm.length;
4131 }
4132 case kExprElemDrop: {
4133 ElemDropImmediate<validate> imm(this, this->pc_ + opcode_length);
4134 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4135 CALL_INTERFACE_IF_REACHABLE(ElemDrop, imm);
4136 return opcode_length + imm.length;
4137 }
4138 case kExprTableCopy: {
4139 TableCopyImmediate<validate> imm(this, this->pc_ + opcode_length);
4140 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4141 ArgVector args = PopArgs(sig);
4142 CALL_INTERFACE_IF_REACHABLE(TableCopy, imm, VectorOf(args));
4143 return opcode_length + imm.length;
4144 }
4145 case kExprTableGrow: {
4146 TableIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
4147 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4148 Value delta = Pop(1, sig->GetParam(1));
4149 Value value = Pop(0, this->module_->tables[imm.index].type);
4150 Value* result = Push(kWasmI32);
4151 CALL_INTERFACE_IF_REACHABLE(TableGrow, imm, value, delta, result);
4152 return opcode_length + imm.length;
4153 }
4154 case kExprTableSize: {
4155 TableIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
4156 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4157 Value* result = Push(kWasmI32);
4158 CALL_INTERFACE_IF_REACHABLE(TableSize, imm, result);
4159 return opcode_length + imm.length;
4160 }
4161 case kExprTableFill: {
4162 TableIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
4163 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4164 Value count = Pop(2, sig->GetParam(2));
4165 Value value = Pop(1, this->module_->tables[imm.index].type);
4166 Value start = Pop(0, sig->GetParam(0));
4167 CALL_INTERFACE_IF_REACHABLE(TableFill, imm, start, value, count);
4168 return opcode_length + imm.length;
4169 }
4170 default:
4171 this->DecodeError("invalid numeric opcode");
4172 return 0;
4173 }
4174 }
4175
4176 void DoReturn() {
4177 size_t return_count = this->sig_->return_count();
4178 if (return_count > 1) {
4179 this->detected_->Add(kFeature_mv);
4180 }
4181 DCHECK_GE(stack_size(), return_count);
4182 Vector<Value> return_values =
4183 Vector<Value>{stack_end_ - return_count, return_count};
4184
4185 CALL_INTERFACE_IF_REACHABLE(DoReturn, return_values);
4186 }
4187
4188 V8_INLINE void EnsureStackSpace(int slots_needed) {
4189 if (V8_LIKELY(stack_capacity_end_ - stack_end_ >= slots_needed)) return;
4190 GrowStackSpace(slots_needed);
4191 }
4192
4193 V8_NOINLINE void GrowStackSpace(int slots_needed) {
4194 size_t new_stack_capacity =
4195 std::max(size_t{8},
4196 base::bits::RoundUpToPowerOfTwo(stack_size() + slots_needed));
4197 Value* new_stack =
4198 this->zone()->template NewArray<Value>(new_stack_capacity);
4199 if (stack_) {
4200 std::copy(stack_, stack_end_, new_stack);
4201 this->zone()->DeleteArray(stack_, stack_capacity_end_ - stack_);
4202 }
4203 stack_end_ = new_stack + (stack_end_ - stack_);
4204 stack_ = new_stack;
4205 stack_capacity_end_ = new_stack + new_stack_capacity;
4206 }
4207
4208 V8_INLINE Value* Push(ValueType type) {
4209 DCHECK_NE(kWasmStmt, type);
4210 // {EnsureStackSpace} should have been called before, either in the central
4211 // decoding loop, or individually if more than one element is pushed.
4212 DCHECK_GT(stack_capacity_end_, stack_end_);
4213 *stack_end_ = Value{this->pc_, type};
4214 ++stack_end_;
4215 return stack_end_ - 1;
4216 }
4217
4218 void PushMergeValues(Control* c, Merge<Value>* merge) {
4219 DCHECK_EQ(c, &control_.back());
4220 DCHECK(merge == &c->start_merge || merge == &c->end_merge);
4221 DCHECK_LE(stack_ + c->stack_depth, stack_end_);
4222 stack_end_ = stack_ + c->stack_depth;
4223 if (merge->arity == 1) {
4224 // {EnsureStackSpace} should have been called before in the central
4225 // decoding loop.
4226 DCHECK_GT(stack_capacity_end_, stack_end_);
4227 *stack_end_++ = merge->vals.first;
4228 } else {
4229 EnsureStackSpace(merge->arity);
4230 for (uint32_t i = 0; i < merge->arity; i++) {
4231 *stack_end_++ = merge->vals.array[i];
4232 }
4233 }
4234 DCHECK_EQ(c->stack_depth + merge->arity, stack_size());
4235 }
4236
4237 Value* PushReturns(const FunctionSig* sig) {
4238 size_t return_count = sig->return_count();
4239 EnsureStackSpace(static_cast<int>(return_count));
4240 for (size_t i = 0; i < return_count; ++i) {
4241 Push(sig->GetReturn(i));
4242 }
4243 return stack_end_ - return_count;
4244 }
4245
4246 // We do not inline these functions because doing so causes a large binary
4247 // size increase. Not inlining them should not create a performance
4248 // degradation, because their invocations are guarded by V8_LIKELY.
4249 V8_NOINLINE void PopTypeError(int index, Value val, ValueType expected) {
4250 this->DecodeError(val.pc(), "%s[%d] expected type %s, found %s of type %s",
4251 SafeOpcodeNameAt(this->pc_), index,
4252 expected.name().c_str(), SafeOpcodeNameAt(val.pc()),
4253 val.type.name().c_str());
4254 }
4255
4256 V8_NOINLINE void NotEnoughArgumentsError(int index) {
4257 this->DecodeError(
4258 "not enough arguments on the stack for %s, expected %d more",
4259 SafeOpcodeNameAt(this->pc_), index + 1);
4260 }
4261
4262 V8_INLINE Value Pop(int index, ValueType expected) {
4263 Value val = Pop(index);
4264 if (!VALIDATE(IsSubtypeOf(val.type, expected, this->module_) ||
4265 val.type == kWasmBottom || expected == kWasmBottom)) {
4266 PopTypeError(index, val, expected);
4267 }
4268 return val;
4269 }
4270
4271 V8_INLINE Value Pop(int index) {
4272 DCHECK(!control_.empty());
4273 uint32_t limit = control_.back().stack_depth;
4274 if (stack_size() <= limit) {
4275 // Popping past the current control start in reachable code.
4276 if (!VALIDATE(control_.back().unreachable())) {
4277 NotEnoughArgumentsError(index);
4278 }
4279 return UnreachableValue(this->pc_);
4280 }
4281 DCHECK_LT(stack_, stack_end_);
4282 stack_end_--;
4283 return *stack_end_;
4284 }
4285
4286 // Pops values from the stack, as defined by {merge}. Thereby we type-check
4287 // unreachable merges. Afterwards the values are pushed again on the stack
4288 // according to the signature in {merge}. This is done so follow-up validation
4289 // is possible.
4290 bool TypeCheckUnreachableMerge(Merge<Value>& merge, bool conditional_branch) {
4291 int arity = merge.arity;
4292 // For conditional branches, stack value '0' is the condition of the branch,
4293 // and the result values start at index '1'.
4294 int index_offset = conditional_branch ? 1 : 0;
4295 for (int i = arity - 1; i >= 0; --i) Pop(index_offset + i, merge[i].type);
4296 // Push values of the correct type back on the stack.
4297 EnsureStackSpace(arity);
4298 for (int i = 0; i < arity; ++i) Push(merge[i].type);
4299 return this->ok();
4300 }
4301
4302 int startrel(const byte* ptr) { return static_cast<int>(ptr - this->start_); }
4303
4304 void FallThruTo(Control* c) {
4305 DCHECK_EQ(c, &control_.back());
4306 if (!TypeCheckFallThru()) return;
4307 if (!c->reachable()) return;
4308
4309 if (!c->is_loop()) CALL_INTERFACE(FallThruTo, c);
4310 c->end_merge.reached = true;
4311 }
4312
4313 bool TypeCheckMergeValues(Control* c, Merge<Value>* merge) {
4314 // This is a CHECK instead of a DCHECK because {validate} is a constexpr,
4315 // and a CHECK makes the whole function unreachable.
4316 static_assert(validate, "Call this function only within VALIDATE");
4317 DCHECK(merge == &c->start_merge || merge == &c->end_merge);
4318 DCHECK_GE(stack_size(), c->stack_depth + merge->arity);
4319 // The computation of {stack_values} is only valid if {merge->arity} is >0.
4320 DCHECK_LT(0, merge->arity);
4321 Value* stack_values = stack_end_ - merge->arity;
4322 // Typecheck the topmost {merge->arity} values on the stack.
4323 for (uint32_t i = 0; i < merge->arity; ++i) {
4324 Value& val = stack_values[i];
4325 Value& old = (*merge)[i];
4326 if (!VALIDATE(IsSubtypeOf(val.type, old.type, this->module_))) {
4327 this->DecodeError("type error in merge[%u] (expected %s, got %s)", i,
4328 old.type.name().c_str(), val.type.name().c_str());
4329 return false;
4330 }
4331 }
4332
4333 return true;
4334 }
4335
4336 bool TypeCheckOneArmedIf(Control* c) {
4337 static_assert(validate, "Call this function only within VALIDATE");
4338 DCHECK(c->is_onearmed_if());
4339 DCHECK_EQ(c->start_merge.arity, c->end_merge.arity);
4340 for (uint32_t i = 0; i < c->start_merge.arity; ++i) {
4341 Value& start = c->start_merge[i];
4342 Value& end = c->end_merge[i];
4343 if (!VALIDATE(IsSubtypeOf(start.type, end.type, this->module_))) {
4344 this->DecodeError("type error in merge[%u] (expected %s, got %s)", i,
4345 end.type.name().c_str(), start.type.name().c_str());
4346 return false;
4347 }
4348 }
4349
4350 return true;
4351 }
4352
4353 bool TypeCheckFallThru() {
4354 static_assert(validate, "Call this function only within VALIDATE");
4355 Control& c = control_.back();
4356 if (V8_LIKELY(c.reachable())) {
4357 uint32_t expected = c.end_merge.arity;
4358 DCHECK_GE(stack_size(), c.stack_depth);
4359 uint32_t actual = stack_size() - c.stack_depth;
4360 // Fallthrus must match the arity of the control exactly.
4361 if (!VALIDATE(actual == expected)) {
4362 this->DecodeError(
4363 "expected %u elements on the stack for fallthru to @%d, found %u",
4364 expected, startrel(c.pc()), actual);
4365 return false;
4366 }
4367 if (expected == 0) return true; // Fast path.
4368
4369 return TypeCheckMergeValues(&c, &c.end_merge);
4370 }
4371
4372 // Type-check an unreachable fallthru. First we do an arity check, then a
4373 // type check. Note that type-checking may require an adjustment of the
4374 // stack, if some stack values are missing to match the block signature.
4375 Merge<Value>& merge = c.end_merge;
4376 int arity = static_cast<int>(merge.arity);
4377 int available = static_cast<int>(stack_size()) - c.stack_depth;
4378 // For fallthrus, not more than the needed values should be available.
4379 if (!VALIDATE(available <= arity)) {
4380 this->DecodeError(
4381 "expected %u elements on the stack for fallthru to @%d, found %u",
4382 arity, startrel(c.pc()), available);
4383 return false;
4384 }
4385 // Pop all values from the stack for type checking of existing stack
4386 // values.
4387 return TypeCheckUnreachableMerge(merge, false);
4388 }
4389
4390 enum TypeCheckBranchResult {
4391 kReachableBranch,
4392 kUnreachableBranch,
4393 kInvalidStack,
4394 };
4395
4396 TypeCheckBranchResult TypeCheckBranch(Control* c, bool conditional_branch) {
4397 if (V8_LIKELY(control_.back().reachable())) {
4398 // We only do type-checking here. This is only needed during validation.
4399 if (!validate) return kReachableBranch;
4400
4401 // Branches must have at least the number of values expected; can have
4402 // more.
4403 uint32_t expected = c->br_merge()->arity;
4404 if (expected == 0) return kReachableBranch; // Fast path.
4405 DCHECK_GE(stack_size(), control_.back().stack_depth);
4406 uint32_t actual =
4407 static_cast<uint32_t>(stack_size()) - control_.back().stack_depth;
4408 if (!VALIDATE(actual >= expected)) {
4409 this->DecodeError(
4410 "expected %u elements on the stack for br to @%d, found %u",
4411 expected, startrel(c->pc()), actual);
4412 return kInvalidStack;
4413 }
4414 return TypeCheckMergeValues(c, c->br_merge()) ? kReachableBranch
4415 : kInvalidStack;
4416 }
4417
4418 return TypeCheckUnreachableMerge(*c->br_merge(), conditional_branch)
4419 ? kUnreachableBranch
4420 : kInvalidStack;
4421 }
4422
4423 bool TypeCheckReturn() {
4424 int num_returns = static_cast<int>(this->sig_->return_count());
4425 // No type checking is needed if there are no returns.
4426 if (num_returns == 0) return true;
4427
4428 // Returns must have at least the number of values expected; can have more.
4429 int num_available =
4430 static_cast<int>(stack_size()) - control_.back().stack_depth;
4431 if (!VALIDATE(num_available >= num_returns)) {
4432 this->DecodeError(
4433 "expected %u elements on the stack for return, found %u", num_returns,
4434 num_available);
4435 return false;
4436 }
4437
4438 // Typecheck the topmost {num_returns} values on the stack.
4439 // This line requires num_returns > 0.
4440 Value* stack_values = stack_end_ - num_returns;
4441 for (int i = 0; i < num_returns; ++i) {
4442 Value& val = stack_values[i];
4443 ValueType expected_type = this->sig_->GetReturn(i);
4444 if (!VALIDATE(IsSubtypeOf(val.type, expected_type, this->module_))) {
4445 this->DecodeError("type error in return[%u] (expected %s, got %s)", i,
4446 expected_type.name().c_str(),
4447 val.type.name().c_str());
4448 return false;
4449 }
4450 }
4451 return true;
4452 }
4453
4454 void onFirstError() override {
4455 this->end_ = this->pc_; // Terminate decoding loop.
4456 this->current_code_reachable_ = false;
4457 TRACE(" !%s\n", this->error_.message().c_str());
4458 CALL_INTERFACE(OnFirstError);
4459 }
4460
4461 int BuildSimplePrototypeOperator(WasmOpcode opcode) {
4462 if (opcode == kExprRefEq) {
4463 CHECK_PROTOTYPE_OPCODE(gc);
4464 }
4465 const FunctionSig* sig = WasmOpcodes::Signature(opcode);
4466 return BuildSimpleOperator(opcode, sig);
4467 }
4468
4469 int BuildSimpleOperator(WasmOpcode opcode, const FunctionSig* sig) {
4470 DCHECK_GE(1, sig->return_count());
4471 ValueType ret = sig->return_count() == 0 ? kWasmStmt : sig->GetReturn(0);
4472 if (sig->parameter_count() == 1) {
4473 return BuildSimpleOperator(opcode, ret, sig->GetParam(0));
4474 } else {
4475 DCHECK_EQ(2, sig->parameter_count());
4476 return BuildSimpleOperator(opcode, ret, sig->GetParam(0),
4477 sig->GetParam(1));
4478 }
4479 }
4480
4481 int BuildSimpleOperator(WasmOpcode opcode, ValueType return_type,
4482 ValueType arg_type) {
4483 Value val = Pop(0, arg_type);
4484 Value* ret = return_type == kWasmStmt ? nullptr : Push(return_type);
4485 CALL_INTERFACE_IF_REACHABLE(UnOp, opcode, val, ret);
4486 return 1;
4487 }
4488
4489 int BuildSimpleOperator(WasmOpcode opcode, ValueType return_type,
4490 ValueType lhs_type, ValueType rhs_type) {
4491 Value rval = Pop(1, rhs_type);
4492 Value lval = Pop(0, lhs_type);
4493 Value* ret = return_type == kWasmStmt ? nullptr : Push(return_type);
4494 CALL_INTERFACE_IF_REACHABLE(BinOp, opcode, lval, rval, ret);
4495 return 1;
4496 }
4497
4498 #define DEFINE_SIMPLE_SIG_OPERATOR(sig, ...) \
4499 int BuildSimpleOperator_##sig(WasmOpcode opcode) { \
4500 return BuildSimpleOperator(opcode, __VA_ARGS__); \
4501 }
4502 FOREACH_SIGNATURE(DEFINE_SIMPLE_SIG_OPERATOR)
4503 #undef DEFINE_SIMPLE_SIG_OPERATOR
4504 };
4505
4506 #undef CALL_INTERFACE
4507 #undef CALL_INTERFACE_IF_REACHABLE
4508 #undef CALL_INTERFACE_IF_PARENT_REACHABLE
4509
4510 class EmptyInterface {
4511 public:
4512 static constexpr Decoder::ValidateFlag validate = Decoder::kFullValidation;
4513 using Value = ValueBase<validate>;
4514 using Control = ControlBase<Value, validate>;
4515 using FullDecoder = WasmFullDecoder<validate, EmptyInterface>;
4516
4517 #define DEFINE_EMPTY_CALLBACK(name, ...) \
4518 void name(FullDecoder* decoder, ##__VA_ARGS__) {}
4519 INTERFACE_FUNCTIONS(DEFINE_EMPTY_CALLBACK)
4520 #undef DEFINE_EMPTY_CALLBACK
4521 };
4522
4523 #undef TRACE
4524 #undef TRACE_INST_FORMAT
4525 #undef VALIDATE
4526 #undef CHECK_PROTOTYPE_OPCODE
4527
4528 } // namespace wasm
4529 } // namespace internal
4530 } // namespace v8
4531
4532 #endif // V8_WASM_FUNCTION_BODY_DECODER_IMPL_H_
4533