1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/signature.h"
6
7 #include "src/base/platform/elapsed-timer.h"
8 #include "src/bit-vector.h"
9 #include "src/flags.h"
10 #include "src/handles.h"
11 #include "src/objects-inl.h"
12 #include "src/zone/zone-containers.h"
13
14 #include "src/wasm/decoder.h"
15 #include "src/wasm/function-body-decoder-impl.h"
16 #include "src/wasm/function-body-decoder.h"
17 #include "src/wasm/wasm-limits.h"
18 #include "src/wasm/wasm-module.h"
19 #include "src/wasm/wasm-opcodes.h"
20
21 #include "src/ostreams.h"
22
23 #include "src/compiler/wasm-compiler.h"
24
25 namespace v8 {
26 namespace internal {
27 namespace wasm {
28
29 #if DEBUG
30 #define TRACE(...) \
31 do { \
32 if (FLAG_trace_wasm_decoder) PrintF(__VA_ARGS__); \
33 } while (false)
34 #else
35 #define TRACE(...)
36 #endif
37
38 #define CHECK_PROTOTYPE_OPCODE(flag) \
39 if (module_ != nullptr && module_->origin == kAsmJsOrigin) { \
40 error("Opcode not supported for asmjs modules"); \
41 } \
42 if (!FLAG_##flag) { \
43 error("Invalid opcode (enable with --" #flag ")"); \
44 break; \
45 }
46
47 // An SsaEnv environment carries the current local variable renaming
48 // as well as the current effect and control dependency in the TF graph.
49 // It maintains a control state that tracks whether the environment
50 // is reachable, has reached a control end, or has been merged.
51 struct SsaEnv {
52 enum State { kControlEnd, kUnreachable, kReached, kMerged };
53
54 State state;
55 TFNode* control;
56 TFNode* effect;
57 TFNode** locals;
58
gov8::internal::wasm::SsaEnv59 bool go() { return state >= kReached; }
Killv8::internal::wasm::SsaEnv60 void Kill(State new_state = kControlEnd) {
61 state = new_state;
62 locals = nullptr;
63 control = nullptr;
64 effect = nullptr;
65 }
SetNotMergedv8::internal::wasm::SsaEnv66 void SetNotMerged() {
67 if (state == kMerged) state = kReached;
68 }
69 };
70
71 // An entry on the value stack.
72 struct Value {
73 const byte* pc;
74 TFNode* node;
75 ValueType type;
76 };
77
78 struct TryInfo : public ZoneObject {
79 SsaEnv* catch_env;
80 TFNode* exception;
81
TryInfov8::internal::wasm::TryInfo82 explicit TryInfo(SsaEnv* c) : catch_env(c), exception(nullptr) {}
83 };
84
85 struct MergeValues {
86 uint32_t arity;
87 union {
88 Value* array;
89 Value first;
90 } vals; // Either multiple values or a single value.
91
operator []v8::internal::wasm::MergeValues92 Value& operator[](size_t i) {
93 DCHECK_GT(arity, i);
94 return arity == 1 ? vals.first : vals.array[i];
95 }
96 };
97
98 static Value* NO_VALUE = nullptr;
99
100 enum ControlKind { kControlIf, kControlBlock, kControlLoop, kControlTry };
101
102 // An entry on the control stack (i.e. if, block, loop).
103 struct Control {
104 const byte* pc;
105 ControlKind kind;
106 size_t stack_depth; // stack height at the beginning of the construct.
107 SsaEnv* end_env; // end environment for the construct.
108 SsaEnv* false_env; // false environment (only for if).
109 TryInfo* try_info; // Information used for compiling try statements.
110 int32_t previous_catch; // The previous Control (on the stack) with a catch.
111 bool unreachable; // The current block has been ended.
112
113 // Values merged into the end of this control construct.
114 MergeValues merge;
115
is_ifv8::internal::wasm::Control116 inline bool is_if() const { return kind == kControlIf; }
is_blockv8::internal::wasm::Control117 inline bool is_block() const { return kind == kControlBlock; }
is_loopv8::internal::wasm::Control118 inline bool is_loop() const { return kind == kControlLoop; }
is_tryv8::internal::wasm::Control119 inline bool is_try() const { return kind == kControlTry; }
120
121 // Named constructors.
Blockv8::internal::wasm::Control122 static Control Block(const byte* pc, size_t stack_depth, SsaEnv* end_env,
123 int32_t previous_catch) {
124 return {pc, kControlBlock, stack_depth, end_env, nullptr,
125 nullptr, previous_catch, false, {0, {NO_VALUE}}};
126 }
127
Ifv8::internal::wasm::Control128 static Control If(const byte* pc, size_t stack_depth, SsaEnv* end_env,
129 SsaEnv* false_env, int32_t previous_catch) {
130 return {pc, kControlIf, stack_depth, end_env, false_env,
131 nullptr, previous_catch, false, {0, {NO_VALUE}}};
132 }
133
Loopv8::internal::wasm::Control134 static Control Loop(const byte* pc, size_t stack_depth, SsaEnv* end_env,
135 int32_t previous_catch) {
136 return {pc, kControlLoop, stack_depth, end_env, nullptr,
137 nullptr, previous_catch, false, {0, {NO_VALUE}}};
138 }
139
Tryv8::internal::wasm::Control140 static Control Try(const byte* pc, size_t stack_depth, SsaEnv* end_env,
141 Zone* zone, SsaEnv* catch_env, int32_t previous_catch) {
142 DCHECK_NOT_NULL(catch_env);
143 TryInfo* try_info = new (zone) TryInfo(catch_env);
144 return {pc, kControlTry, stack_depth, end_env, nullptr,
145 try_info, previous_catch, false, {0, {NO_VALUE}}};
146 }
147 };
148
149 // Macros that build nodes only if there is a graph and the current SSA
150 // environment is reachable from start. This avoids problems with malformed
151 // TF graphs when decoding inputs that have unreachable code.
152 #define BUILD(func, ...) \
153 (build() ? CheckForException(builder_->func(__VA_ARGS__)) : nullptr)
154 #define BUILD0(func) (build() ? CheckForException(builder_->func()) : nullptr)
155
156 // Generic Wasm bytecode decoder with utilities for decoding operands,
157 // lengths, etc.
158 class WasmDecoder : public Decoder {
159 public:
WasmDecoder(const WasmModule * module,FunctionSig * sig,const byte * start,const byte * end)160 WasmDecoder(const WasmModule* module, FunctionSig* sig, const byte* start,
161 const byte* end)
162 : Decoder(start, end),
163 module_(module),
164 sig_(sig),
165 local_types_(nullptr) {}
166 const WasmModule* module_;
167 FunctionSig* sig_;
168
169 ZoneVector<ValueType>* local_types_;
170
total_locals() const171 size_t total_locals() const {
172 return local_types_ == nullptr ? 0 : local_types_->size();
173 }
174
DecodeLocals(Decoder * decoder,const FunctionSig * sig,ZoneVector<ValueType> * type_list)175 static bool DecodeLocals(Decoder* decoder, const FunctionSig* sig,
176 ZoneVector<ValueType>* type_list) {
177 DCHECK_NOT_NULL(type_list);
178 // Initialize from signature.
179 if (sig != nullptr) {
180 type_list->reserve(sig->parameter_count());
181 for (size_t i = 0; i < sig->parameter_count(); ++i) {
182 type_list->push_back(sig->GetParam(i));
183 }
184 }
185 // Decode local declarations, if any.
186 uint32_t entries = decoder->consume_u32v("local decls count");
187 if (decoder->failed()) return false;
188
189 TRACE("local decls count: %u\n", entries);
190 while (entries-- > 0 && decoder->ok() && decoder->more()) {
191 uint32_t count = decoder->consume_u32v("local count");
192 if (decoder->failed()) return false;
193
194 if ((count + type_list->size()) > kV8MaxWasmFunctionLocals) {
195 decoder->error(decoder->pc() - 1, "local count too large");
196 return false;
197 }
198 byte code = decoder->consume_u8("local type");
199 if (decoder->failed()) return false;
200
201 ValueType type;
202 switch (code) {
203 case kLocalI32:
204 type = kWasmI32;
205 break;
206 case kLocalI64:
207 type = kWasmI64;
208 break;
209 case kLocalF32:
210 type = kWasmF32;
211 break;
212 case kLocalF64:
213 type = kWasmF64;
214 break;
215 case kLocalS128:
216 type = kWasmS128;
217 break;
218 case kLocalS1x4:
219 type = kWasmS1x4;
220 break;
221 case kLocalS1x8:
222 type = kWasmS1x8;
223 break;
224 case kLocalS1x16:
225 type = kWasmS1x16;
226 break;
227 default:
228 decoder->error(decoder->pc() - 1, "invalid local type");
229 return false;
230 }
231 type_list->insert(type_list->end(), count, type);
232 }
233 DCHECK(decoder->ok());
234 return true;
235 }
236
AnalyzeLoopAssignment(Decoder * decoder,const byte * pc,int locals_count,Zone * zone)237 static BitVector* AnalyzeLoopAssignment(Decoder* decoder, const byte* pc,
238 int locals_count, Zone* zone) {
239 if (pc >= decoder->end()) return nullptr;
240 if (*pc != kExprLoop) return nullptr;
241
242 BitVector* assigned = new (zone) BitVector(locals_count, zone);
243 int depth = 0;
244 // Iteratively process all AST nodes nested inside the loop.
245 while (pc < decoder->end() && decoder->ok()) {
246 WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
247 unsigned length = 1;
248 switch (opcode) {
249 case kExprLoop:
250 case kExprIf:
251 case kExprBlock:
252 case kExprTry:
253 length = OpcodeLength(decoder, pc);
254 depth++;
255 break;
256 case kExprSetLocal: // fallthru
257 case kExprTeeLocal: {
258 LocalIndexOperand operand(decoder, pc);
259 if (assigned->length() > 0 &&
260 operand.index < static_cast<uint32_t>(assigned->length())) {
261 // Unverified code might have an out-of-bounds index.
262 assigned->Add(operand.index);
263 }
264 length = 1 + operand.length;
265 break;
266 }
267 case kExprEnd:
268 depth--;
269 break;
270 default:
271 length = OpcodeLength(decoder, pc);
272 break;
273 }
274 if (depth <= 0) break;
275 pc += length;
276 }
277 return decoder->ok() ? assigned : nullptr;
278 }
279
Validate(const byte * pc,LocalIndexOperand & operand)280 inline bool Validate(const byte* pc, LocalIndexOperand& operand) {
281 if (operand.index < total_locals()) {
282 if (local_types_) {
283 operand.type = local_types_->at(operand.index);
284 } else {
285 operand.type = kWasmStmt;
286 }
287 return true;
288 }
289 error(pc, pc + 1, "invalid local index: %u", operand.index);
290 return false;
291 }
292
Validate(const byte * pc,GlobalIndexOperand & operand)293 inline bool Validate(const byte* pc, GlobalIndexOperand& operand) {
294 if (module_ != nullptr && operand.index < module_->globals.size()) {
295 operand.global = &module_->globals[operand.index];
296 operand.type = operand.global->type;
297 return true;
298 }
299 error(pc, pc + 1, "invalid global index: %u", operand.index);
300 return false;
301 }
302
Complete(const byte * pc,CallFunctionOperand & operand)303 inline bool Complete(const byte* pc, CallFunctionOperand& operand) {
304 if (module_ != nullptr && operand.index < module_->functions.size()) {
305 operand.sig = module_->functions[operand.index].sig;
306 return true;
307 }
308 return false;
309 }
310
Validate(const byte * pc,CallFunctionOperand & operand)311 inline bool Validate(const byte* pc, CallFunctionOperand& operand) {
312 if (Complete(pc, operand)) {
313 return true;
314 }
315 error(pc, pc + 1, "invalid function index: %u", operand.index);
316 return false;
317 }
318
Complete(const byte * pc,CallIndirectOperand & operand)319 inline bool Complete(const byte* pc, CallIndirectOperand& operand) {
320 if (module_ != nullptr && operand.index < module_->signatures.size()) {
321 operand.sig = module_->signatures[operand.index];
322 return true;
323 }
324 return false;
325 }
326
Validate(const byte * pc,CallIndirectOperand & operand)327 inline bool Validate(const byte* pc, CallIndirectOperand& operand) {
328 if (module_ == nullptr || module_->function_tables.empty()) {
329 error("function table has to exist to execute call_indirect");
330 return false;
331 }
332 if (Complete(pc, operand)) {
333 return true;
334 }
335 error(pc, pc + 1, "invalid signature index: #%u", operand.index);
336 return false;
337 }
338
Validate(const byte * pc,BreakDepthOperand & operand,ZoneVector<Control> & control)339 inline bool Validate(const byte* pc, BreakDepthOperand& operand,
340 ZoneVector<Control>& control) {
341 if (operand.depth < control.size()) {
342 operand.target = &control[control.size() - operand.depth - 1];
343 return true;
344 }
345 error(pc, pc + 1, "invalid break depth: %u", operand.depth);
346 return false;
347 }
348
Validate(const byte * pc,BranchTableOperand & operand,size_t block_depth)349 bool Validate(const byte* pc, BranchTableOperand& operand,
350 size_t block_depth) {
351 // TODO(titzer): add extra redundant validation for br_table here?
352 return true;
353 }
354
Validate(const byte * pc,WasmOpcode opcode,SimdLaneOperand & operand)355 inline bool Validate(const byte* pc, WasmOpcode opcode,
356 SimdLaneOperand& operand) {
357 uint8_t num_lanes = 0;
358 switch (opcode) {
359 case kExprF32x4ExtractLane:
360 case kExprF32x4ReplaceLane:
361 case kExprI32x4ExtractLane:
362 case kExprI32x4ReplaceLane:
363 num_lanes = 4;
364 break;
365 case kExprI16x8ExtractLane:
366 case kExprI16x8ReplaceLane:
367 num_lanes = 8;
368 break;
369 case kExprI8x16ExtractLane:
370 case kExprI8x16ReplaceLane:
371 num_lanes = 16;
372 break;
373 default:
374 UNREACHABLE();
375 break;
376 }
377 if (operand.lane < 0 || operand.lane >= num_lanes) {
378 error(pc_, pc_ + 2, "invalid lane index");
379 return false;
380 } else {
381 return true;
382 }
383 }
384
Validate(const byte * pc,WasmOpcode opcode,SimdShiftOperand & operand)385 inline bool Validate(const byte* pc, WasmOpcode opcode,
386 SimdShiftOperand& operand) {
387 uint8_t max_shift = 0;
388 switch (opcode) {
389 case kExprI32x4Shl:
390 case kExprI32x4ShrS:
391 case kExprI32x4ShrU:
392 max_shift = 32;
393 break;
394 case kExprI16x8Shl:
395 case kExprI16x8ShrS:
396 case kExprI16x8ShrU:
397 max_shift = 16;
398 break;
399 case kExprI8x16Shl:
400 case kExprI8x16ShrS:
401 case kExprI8x16ShrU:
402 max_shift = 8;
403 break;
404 default:
405 UNREACHABLE();
406 break;
407 }
408 if (operand.shift < 0 || operand.shift >= max_shift) {
409 error(pc_, pc_ + 2, "invalid shift amount");
410 return false;
411 } else {
412 return true;
413 }
414 }
415
OpcodeLength(Decoder * decoder,const byte * pc)416 static unsigned OpcodeLength(Decoder* decoder, const byte* pc) {
417 switch (static_cast<byte>(*pc)) {
418 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
419 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
420 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
421 #undef DECLARE_OPCODE_CASE
422 {
423 MemoryAccessOperand operand(decoder, pc, UINT32_MAX);
424 return 1 + operand.length;
425 }
426 case kExprBr:
427 case kExprBrIf: {
428 BreakDepthOperand operand(decoder, pc);
429 return 1 + operand.length;
430 }
431 case kExprSetGlobal:
432 case kExprGetGlobal: {
433 GlobalIndexOperand operand(decoder, pc);
434 return 1 + operand.length;
435 }
436
437 case kExprCallFunction: {
438 CallFunctionOperand operand(decoder, pc);
439 return 1 + operand.length;
440 }
441 case kExprCallIndirect: {
442 CallIndirectOperand operand(decoder, pc);
443 return 1 + operand.length;
444 }
445
446 case kExprTry:
447 case kExprIf: // fall thru
448 case kExprLoop:
449 case kExprBlock: {
450 BlockTypeOperand operand(decoder, pc);
451 return 1 + operand.length;
452 }
453
454 case kExprSetLocal:
455 case kExprTeeLocal:
456 case kExprGetLocal:
457 case kExprCatch: {
458 LocalIndexOperand operand(decoder, pc);
459 return 1 + operand.length;
460 }
461 case kExprBrTable: {
462 BranchTableOperand operand(decoder, pc);
463 BranchTableIterator iterator(decoder, operand);
464 return 1 + iterator.length();
465 }
466 case kExprI32Const: {
467 ImmI32Operand operand(decoder, pc);
468 return 1 + operand.length;
469 }
470 case kExprI64Const: {
471 ImmI64Operand operand(decoder, pc);
472 return 1 + operand.length;
473 }
474 case kExprGrowMemory:
475 case kExprMemorySize: {
476 MemoryIndexOperand operand(decoder, pc);
477 return 1 + operand.length;
478 }
479 case kExprF32Const:
480 return 5;
481 case kExprF64Const:
482 return 9;
483 case kSimdPrefix: {
484 byte simd_index = decoder->checked_read_u8(pc, 1, "simd_index");
485 WasmOpcode opcode =
486 static_cast<WasmOpcode>(kSimdPrefix << 8 | simd_index);
487 switch (opcode) {
488 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
489 FOREACH_SIMD_0_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
490 #undef DECLARE_OPCODE_CASE
491 {
492 return 2;
493 }
494 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
495 FOREACH_SIMD_1_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
496 #undef DECLARE_OPCODE_CASE
497 {
498 return 3;
499 }
500 default:
501 decoder->error(pc, "invalid SIMD opcode");
502 return 2;
503 }
504 }
505 default:
506 return 1;
507 }
508 }
509 };
510
511 static const int32_t kNullCatch = -1;
512
513 // The full WASM decoder for bytecode. Verifies bytecode and, optionally,
514 // generates a TurboFan IR graph.
515 class WasmFullDecoder : public WasmDecoder {
516 public:
WasmFullDecoder(Zone * zone,const wasm::WasmModule * module,const FunctionBody & body)517 WasmFullDecoder(Zone* zone, const wasm::WasmModule* module,
518 const FunctionBody& body)
519 : WasmFullDecoder(zone, module, nullptr, body) {}
520
WasmFullDecoder(Zone * zone,TFBuilder * builder,const FunctionBody & body)521 WasmFullDecoder(Zone* zone, TFBuilder* builder, const FunctionBody& body)
522 : WasmFullDecoder(zone, builder->module_env() == nullptr
523 ? nullptr
524 : builder->module_env()->module,
525 builder, body) {}
526
Decode()527 bool Decode() {
528 if (FLAG_wasm_code_fuzzer_gen_test) {
529 PrintRawWasmCode(start_, end_);
530 }
531 base::ElapsedTimer decode_timer;
532 if (FLAG_trace_wasm_decode_time) {
533 decode_timer.Start();
534 }
535 stack_.clear();
536 control_.clear();
537
538 if (end_ < pc_) {
539 error("function body end < start");
540 return false;
541 }
542
543 DCHECK_EQ(0, local_types_->size());
544 WasmDecoder::DecodeLocals(this, sig_, local_types_);
545 InitSsaEnv();
546 DecodeFunctionBody();
547
548 if (failed()) return TraceFailed();
549
550 if (!control_.empty()) {
551 // Generate a better error message whether the unterminated control
552 // structure is the function body block or an innner structure.
553 if (control_.size() > 1) {
554 error(pc_, control_.back().pc, "unterminated control structure");
555 } else {
556 error("function body must end with \"end\" opcode.");
557 }
558 return TraceFailed();
559 }
560
561 if (!last_end_found_) {
562 error("function body must end with \"end\" opcode.");
563 return false;
564 }
565
566 if (FLAG_trace_wasm_decode_time) {
567 double ms = decode_timer.Elapsed().InMillisecondsF();
568 PrintF("wasm-decode %s (%0.3f ms)\n\n", ok() ? "ok" : "failed", ms);
569 } else {
570 TRACE("wasm-decode %s\n\n", ok() ? "ok" : "failed");
571 }
572
573 return true;
574 }
575
TraceFailed()576 bool TraceFailed() {
577 TRACE("wasm-error module+%-6d func+%d: %s\n\n", baserel(error_pc_),
578 startrel(error_pc_), error_msg_.get());
579 return false;
580 }
581
582 private:
WasmFullDecoder(Zone * zone,const wasm::WasmModule * module,TFBuilder * builder,const FunctionBody & body)583 WasmFullDecoder(Zone* zone, const wasm::WasmModule* module,
584 TFBuilder* builder, const FunctionBody& body)
585 : WasmDecoder(module, body.sig, body.start, body.end),
586 zone_(zone),
587 builder_(builder),
588 base_(body.base),
589 local_type_vec_(zone),
590 stack_(zone),
591 control_(zone),
592 last_end_found_(false),
593 current_catch_(kNullCatch) {
594 local_types_ = &local_type_vec_;
595 }
596
597 static const size_t kErrorMsgSize = 128;
598
599 Zone* zone_;
600 TFBuilder* builder_;
601 const byte* base_;
602
603 SsaEnv* ssa_env_;
604
605 ZoneVector<ValueType> local_type_vec_; // types of local variables.
606 ZoneVector<Value> stack_; // stack of values.
607 ZoneVector<Control> control_; // stack of blocks, loops, and ifs.
608 bool last_end_found_;
609
610 int32_t current_catch_;
611
current_try_info()612 TryInfo* current_try_info() { return control_[current_catch_].try_info; }
613
build()614 inline bool build() { return builder_ && ssa_env_->go(); }
615
InitSsaEnv()616 void InitSsaEnv() {
617 TFNode* start = nullptr;
618 SsaEnv* ssa_env = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv)));
619 size_t size = sizeof(TFNode*) * EnvironmentCount();
620 ssa_env->state = SsaEnv::kReached;
621 ssa_env->locals =
622 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr;
623
624 if (builder_) {
625 start = builder_->Start(static_cast<int>(sig_->parameter_count() + 1));
626 // Initialize local variables.
627 uint32_t index = 0;
628 while (index < sig_->parameter_count()) {
629 ssa_env->locals[index] = builder_->Param(index);
630 index++;
631 }
632 while (index < local_type_vec_.size()) {
633 ValueType type = local_type_vec_[index];
634 TFNode* node = DefaultValue(type);
635 while (index < local_type_vec_.size() &&
636 local_type_vec_[index] == type) {
637 // Do a whole run of like-typed locals at a time.
638 ssa_env->locals[index++] = node;
639 }
640 }
641 }
642 ssa_env->control = start;
643 ssa_env->effect = start;
644 SetEnv("initial", ssa_env);
645 if (builder_) {
646 // The function-prologue stack check is associated with position 0, which
647 // is never a position of any instruction in the function.
648 builder_->StackCheck(0);
649 }
650 }
651
DefaultValue(ValueType type)652 TFNode* DefaultValue(ValueType type) {
653 switch (type) {
654 case kWasmI32:
655 return builder_->Int32Constant(0);
656 case kWasmI64:
657 return builder_->Int64Constant(0);
658 case kWasmF32:
659 return builder_->Float32Constant(0);
660 case kWasmF64:
661 return builder_->Float64Constant(0);
662 case kWasmS128:
663 return builder_->CreateS128Value(0);
664 default:
665 UNREACHABLE();
666 return nullptr;
667 }
668 }
669
indentation()670 char* indentation() {
671 static const int kMaxIndent = 64;
672 static char bytes[kMaxIndent + 1];
673 for (int i = 0; i < kMaxIndent; ++i) bytes[i] = ' ';
674 bytes[kMaxIndent] = 0;
675 if (stack_.size() < kMaxIndent / 2) {
676 bytes[stack_.size() * 2] = 0;
677 }
678 return bytes;
679 }
680
CheckHasMemory()681 bool CheckHasMemory() {
682 if (!module_->has_memory) {
683 error(pc_ - 1, "memory instruction with no memory");
684 }
685 return module_->has_memory;
686 }
687
688 // Decodes the body of a function.
DecodeFunctionBody()689 void DecodeFunctionBody() {
690 TRACE("wasm-decode %p...%p (module+%d, %d bytes) %s\n",
691 reinterpret_cast<const void*>(start_),
692 reinterpret_cast<const void*>(end_), baserel(pc_),
693 static_cast<int>(end_ - start_), builder_ ? "graph building" : "");
694
695 {
696 // Set up initial function block.
697 SsaEnv* break_env = ssa_env_;
698 SetEnv("initial env", Steal(break_env));
699 PushBlock(break_env);
700 Control* c = &control_.back();
701 c->merge.arity = static_cast<uint32_t>(sig_->return_count());
702
703 if (c->merge.arity == 1) {
704 c->merge.vals.first = {pc_, nullptr, sig_->GetReturn(0)};
705 } else if (c->merge.arity > 1) {
706 c->merge.vals.array = zone_->NewArray<Value>(c->merge.arity);
707 for (unsigned i = 0; i < c->merge.arity; i++) {
708 c->merge.vals.array[i] = {pc_, nullptr, sig_->GetReturn(i)};
709 }
710 }
711 }
712
713 while (pc_ < end_) { // decoding loop.
714 unsigned len = 1;
715 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_);
716 #if DEBUG
717 if (FLAG_trace_wasm_decoder && !WasmOpcodes::IsPrefixOpcode(opcode)) {
718 TRACE(" @%-8d #%-20s|", startrel(pc_),
719 WasmOpcodes::OpcodeName(opcode));
720 }
721 #endif
722
723 FunctionSig* sig = WasmOpcodes::Signature(opcode);
724 if (sig) {
725 BuildSimpleOperator(opcode, sig);
726 } else {
727 // Complex bytecode.
728 switch (opcode) {
729 case kExprNop:
730 break;
731 case kExprBlock: {
732 // The break environment is the outer environment.
733 BlockTypeOperand operand(this, pc_);
734 SsaEnv* break_env = ssa_env_;
735 PushBlock(break_env);
736 SetEnv("block:start", Steal(break_env));
737 SetBlockType(&control_.back(), operand);
738 len = 1 + operand.length;
739 break;
740 }
741 case kExprThrow: {
742 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
743 Value value = Pop(0, kWasmI32);
744 BUILD(Throw, value.node);
745 // TODO(titzer): Throw should end control, but currently we build a
746 // (reachable) runtime call instead of connecting it directly to
747 // end.
748 // EndControl();
749 break;
750 }
751 case kExprTry: {
752 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
753 BlockTypeOperand operand(this, pc_);
754 SsaEnv* outer_env = ssa_env_;
755 SsaEnv* try_env = Steal(outer_env);
756 SsaEnv* catch_env = UnreachableEnv();
757 PushTry(outer_env, catch_env);
758 SetEnv("try_catch:start", try_env);
759 SetBlockType(&control_.back(), operand);
760 len = 1 + operand.length;
761 break;
762 }
763 case kExprCatch: {
764 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
765 LocalIndexOperand operand(this, pc_);
766 len = 1 + operand.length;
767
768 if (control_.empty()) {
769 error("catch does not match any try");
770 break;
771 }
772
773 Control* c = &control_.back();
774 if (!c->is_try()) {
775 error("catch does not match any try");
776 break;
777 }
778
779 if (c->try_info->catch_env == nullptr) {
780 error(pc_, "catch already present for try with catch");
781 break;
782 }
783
784 FallThruTo(c);
785 stack_.resize(c->stack_depth);
786
787 DCHECK_NOT_NULL(c->try_info);
788 SsaEnv* catch_env = c->try_info->catch_env;
789 c->try_info->catch_env = nullptr;
790 SetEnv("catch:begin", catch_env);
791 current_catch_ = c->previous_catch;
792
793 if (Validate(pc_, operand)) {
794 if (ssa_env_->locals) {
795 TFNode* exception_as_i32 =
796 BUILD(Catch, c->try_info->exception, position());
797 ssa_env_->locals[operand.index] = exception_as_i32;
798 }
799 }
800
801 break;
802 }
803 case kExprLoop: {
804 BlockTypeOperand operand(this, pc_);
805 SsaEnv* finish_try_env = Steal(ssa_env_);
806 // The continue environment is the inner environment.
807 SsaEnv* loop_body_env = PrepareForLoop(pc_, finish_try_env);
808 SetEnv("loop:start", loop_body_env);
809 ssa_env_->SetNotMerged();
810 PushLoop(finish_try_env);
811 SetBlockType(&control_.back(), operand);
812 len = 1 + operand.length;
813 break;
814 }
815 case kExprIf: {
816 // Condition on top of stack. Split environments for branches.
817 BlockTypeOperand operand(this, pc_);
818 Value cond = Pop(0, kWasmI32);
819 TFNode* if_true = nullptr;
820 TFNode* if_false = nullptr;
821 BUILD(BranchNoHint, cond.node, &if_true, &if_false);
822 SsaEnv* end_env = ssa_env_;
823 SsaEnv* false_env = Split(ssa_env_);
824 false_env->control = if_false;
825 SsaEnv* true_env = Steal(ssa_env_);
826 true_env->control = if_true;
827 PushIf(end_env, false_env);
828 SetEnv("if:true", true_env);
829 SetBlockType(&control_.back(), operand);
830 len = 1 + operand.length;
831 break;
832 }
833 case kExprElse: {
834 if (control_.empty()) {
835 error("else does not match any if");
836 break;
837 }
838 Control* c = &control_.back();
839 if (!c->is_if()) {
840 error(pc_, c->pc, "else does not match an if");
841 break;
842 }
843 if (c->false_env == nullptr) {
844 error(pc_, c->pc, "else already present for if");
845 break;
846 }
847 FallThruTo(c);
848 stack_.resize(c->stack_depth);
849 // Switch to environment for false branch.
850 SetEnv("if_else:false", c->false_env);
851 c->false_env = nullptr; // record that an else is already seen
852 break;
853 }
854 case kExprEnd: {
855 if (control_.empty()) {
856 error("end does not match any if, try, or block");
857 return;
858 }
859 const char* name = "block:end";
860 Control* c = &control_.back();
861 if (c->is_loop()) {
862 // A loop just leaves the values on the stack.
863 TypeCheckFallThru(c);
864 if (c->unreachable) PushEndValues(c);
865 PopControl();
866 SetEnv("loop:end", ssa_env_);
867 break;
868 }
869 if (c->is_if()) {
870 if (c->false_env != nullptr) {
871 // End the true branch of a one-armed if.
872 Goto(c->false_env, c->end_env);
873 if (!c->unreachable && stack_.size() != c->stack_depth) {
874 error("end of if expected empty stack");
875 stack_.resize(c->stack_depth);
876 }
877 if (c->merge.arity > 0) {
878 error("non-void one-armed if");
879 }
880 name = "if:merge";
881 } else {
882 // End the false branch of a two-armed if.
883 name = "if_else:merge";
884 }
885 } else if (c->is_try()) {
886 name = "try:end";
887
888 // validate that catch was seen.
889 if (c->try_info->catch_env != nullptr) {
890 error(pc_, "missing catch in try");
891 break;
892 }
893 }
894 FallThruTo(c);
895 SetEnv(name, c->end_env);
896 PushEndValues(c);
897
898 if (control_.size() == 1) {
899 // If at the last (implicit) control, check we are at end.
900 if (pc_ + 1 != end_) {
901 error(pc_, pc_ + 1, "trailing code after function end");
902 break;
903 }
904 last_end_found_ = true;
905 if (ssa_env_->go()) {
906 // The result of the block is the return value.
907 TRACE(" @%-8d #xx:%-20s|", startrel(pc_), "(implicit) return");
908 DoReturn();
909 TRACE("\n");
910 } else {
911 TypeCheckFallThru(c);
912 }
913 }
914 PopControl();
915 break;
916 }
917 case kExprSelect: {
918 Value cond = Pop(2, kWasmI32);
919 Value fval = Pop();
920 Value tval = Pop(0, fval.type);
921 if (build()) {
922 TFNode* controls[2];
923 builder_->BranchNoHint(cond.node, &controls[0], &controls[1]);
924 TFNode* merge = builder_->Merge(2, controls);
925 TFNode* vals[2] = {tval.node, fval.node};
926 TFNode* phi = builder_->Phi(tval.type, 2, vals, merge);
927 Push(tval.type, phi);
928 ssa_env_->control = merge;
929 } else {
930 Push(tval.type == kWasmVar ? fval.type : tval.type, nullptr);
931 }
932 break;
933 }
934 case kExprBr: {
935 BreakDepthOperand operand(this, pc_);
936 if (Validate(pc_, operand, control_)) {
937 BreakTo(operand.depth);
938 }
939 len = 1 + operand.length;
940 EndControl();
941 break;
942 }
943 case kExprBrIf: {
944 BreakDepthOperand operand(this, pc_);
945 Value cond = Pop(0, kWasmI32);
946 if (ok() && Validate(pc_, operand, control_)) {
947 SsaEnv* fenv = ssa_env_;
948 SsaEnv* tenv = Split(fenv);
949 fenv->SetNotMerged();
950 BUILD(BranchNoHint, cond.node, &tenv->control, &fenv->control);
951 ssa_env_ = tenv;
952 BreakTo(operand.depth);
953 ssa_env_ = fenv;
954 }
955 len = 1 + operand.length;
956 break;
957 }
958 case kExprBrTable: {
959 BranchTableOperand operand(this, pc_);
960 BranchTableIterator iterator(this, operand);
961 if (Validate(pc_, operand, control_.size())) {
962 Value key = Pop(0, kWasmI32);
963 if (failed()) break;
964
965 SsaEnv* break_env = ssa_env_;
966 if (operand.table_count > 0) {
967 // Build branches to the various blocks based on the table.
968 TFNode* sw = BUILD(Switch, operand.table_count + 1, key.node);
969
970 SsaEnv* copy = Steal(break_env);
971 ssa_env_ = copy;
972 MergeValues* merge = nullptr;
973 while (ok() && iterator.has_next()) {
974 uint32_t i = iterator.cur_index();
975 const byte* pos = iterator.pc();
976 uint32_t target = iterator.next();
977 if (target >= control_.size()) {
978 error(pos, "improper branch in br_table");
979 break;
980 }
981 ssa_env_ = Split(copy);
982 ssa_env_->control = (i == operand.table_count)
983 ? BUILD(IfDefault, sw)
984 : BUILD(IfValue, i, sw);
985 BreakTo(target);
986
987 // Check that label types match up.
988 Control* c = &control_[control_.size() - target - 1];
989 if (i == 0) {
990 merge = &c->merge;
991 } else if (merge->arity != c->merge.arity) {
992 error(pos, pos, "inconsistent arity in br_table target %d"
993 " (previous was %u, this one %u)",
994 i, merge->arity, c->merge.arity);
995 } else if (control_.back().unreachable) {
996 for (uint32_t j = 0; ok() && j < merge->arity; ++j) {
997 if ((*merge)[j].type != c->merge[j].type) {
998 error(pos, pos,
999 "type error in br_table target %d operand %d"
1000 " (previous expected %s, this one %s)", i, j,
1001 WasmOpcodes::TypeName((*merge)[j].type),
1002 WasmOpcodes::TypeName(c->merge[j].type));
1003 }
1004 }
1005 }
1006 }
1007 if (failed()) break;
1008 } else {
1009 // Only a default target. Do the equivalent of br.
1010 const byte* pos = iterator.pc();
1011 uint32_t target = iterator.next();
1012 if (target >= control_.size()) {
1013 error(pos, "improper branch in br_table");
1014 break;
1015 }
1016 BreakTo(target);
1017 }
1018 // br_table ends the control flow like br.
1019 ssa_env_ = break_env;
1020 }
1021 len = 1 + iterator.length();
1022 EndControl();
1023 break;
1024 }
1025 case kExprReturn: {
1026 DoReturn();
1027 break;
1028 }
1029 case kExprUnreachable: {
1030 BUILD(Unreachable, position());
1031 EndControl();
1032 break;
1033 }
1034 case kExprI32Const: {
1035 ImmI32Operand operand(this, pc_);
1036 Push(kWasmI32, BUILD(Int32Constant, operand.value));
1037 len = 1 + operand.length;
1038 break;
1039 }
1040 case kExprI64Const: {
1041 ImmI64Operand operand(this, pc_);
1042 Push(kWasmI64, BUILD(Int64Constant, operand.value));
1043 len = 1 + operand.length;
1044 break;
1045 }
1046 case kExprF32Const: {
1047 ImmF32Operand operand(this, pc_);
1048 Push(kWasmF32, BUILD(Float32Constant, operand.value));
1049 len = 1 + operand.length;
1050 break;
1051 }
1052 case kExprF64Const: {
1053 ImmF64Operand operand(this, pc_);
1054 Push(kWasmF64, BUILD(Float64Constant, operand.value));
1055 len = 1 + operand.length;
1056 break;
1057 }
1058 case kExprGetLocal: {
1059 LocalIndexOperand operand(this, pc_);
1060 if (Validate(pc_, operand)) {
1061 if (build()) {
1062 Push(operand.type, ssa_env_->locals[operand.index]);
1063 } else {
1064 Push(operand.type, nullptr);
1065 }
1066 }
1067 len = 1 + operand.length;
1068 break;
1069 }
1070 case kExprSetLocal: {
1071 LocalIndexOperand operand(this, pc_);
1072 if (Validate(pc_, operand)) {
1073 Value val = Pop(0, local_type_vec_[operand.index]);
1074 if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node;
1075 }
1076 len = 1 + operand.length;
1077 break;
1078 }
1079 case kExprTeeLocal: {
1080 LocalIndexOperand operand(this, pc_);
1081 if (Validate(pc_, operand)) {
1082 Value val = Pop(0, local_type_vec_[operand.index]);
1083 if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node;
1084 Push(val.type, val.node);
1085 }
1086 len = 1 + operand.length;
1087 break;
1088 }
1089 case kExprDrop: {
1090 Pop();
1091 break;
1092 }
1093 case kExprGetGlobal: {
1094 GlobalIndexOperand operand(this, pc_);
1095 if (Validate(pc_, operand)) {
1096 Push(operand.type, BUILD(GetGlobal, operand.index));
1097 }
1098 len = 1 + operand.length;
1099 break;
1100 }
1101 case kExprSetGlobal: {
1102 GlobalIndexOperand operand(this, pc_);
1103 if (Validate(pc_, operand)) {
1104 if (operand.global->mutability) {
1105 Value val = Pop(0, operand.type);
1106 BUILD(SetGlobal, operand.index, val.node);
1107 } else {
1108 error(pc_, pc_ + 1, "immutable global #%u cannot be assigned",
1109 operand.index);
1110 }
1111 }
1112 len = 1 + operand.length;
1113 break;
1114 }
1115 case kExprI32LoadMem8S:
1116 len = DecodeLoadMem(kWasmI32, MachineType::Int8());
1117 break;
1118 case kExprI32LoadMem8U:
1119 len = DecodeLoadMem(kWasmI32, MachineType::Uint8());
1120 break;
1121 case kExprI32LoadMem16S:
1122 len = DecodeLoadMem(kWasmI32, MachineType::Int16());
1123 break;
1124 case kExprI32LoadMem16U:
1125 len = DecodeLoadMem(kWasmI32, MachineType::Uint16());
1126 break;
1127 case kExprI32LoadMem:
1128 len = DecodeLoadMem(kWasmI32, MachineType::Int32());
1129 break;
1130 case kExprI64LoadMem8S:
1131 len = DecodeLoadMem(kWasmI64, MachineType::Int8());
1132 break;
1133 case kExprI64LoadMem8U:
1134 len = DecodeLoadMem(kWasmI64, MachineType::Uint8());
1135 break;
1136 case kExprI64LoadMem16S:
1137 len = DecodeLoadMem(kWasmI64, MachineType::Int16());
1138 break;
1139 case kExprI64LoadMem16U:
1140 len = DecodeLoadMem(kWasmI64, MachineType::Uint16());
1141 break;
1142 case kExprI64LoadMem32S:
1143 len = DecodeLoadMem(kWasmI64, MachineType::Int32());
1144 break;
1145 case kExprI64LoadMem32U:
1146 len = DecodeLoadMem(kWasmI64, MachineType::Uint32());
1147 break;
1148 case kExprI64LoadMem:
1149 len = DecodeLoadMem(kWasmI64, MachineType::Int64());
1150 break;
1151 case kExprF32LoadMem:
1152 len = DecodeLoadMem(kWasmF32, MachineType::Float32());
1153 break;
1154 case kExprF64LoadMem:
1155 len = DecodeLoadMem(kWasmF64, MachineType::Float64());
1156 break;
1157 case kExprI32StoreMem8:
1158 len = DecodeStoreMem(kWasmI32, MachineType::Int8());
1159 break;
1160 case kExprI32StoreMem16:
1161 len = DecodeStoreMem(kWasmI32, MachineType::Int16());
1162 break;
1163 case kExprI32StoreMem:
1164 len = DecodeStoreMem(kWasmI32, MachineType::Int32());
1165 break;
1166 case kExprI64StoreMem8:
1167 len = DecodeStoreMem(kWasmI64, MachineType::Int8());
1168 break;
1169 case kExprI64StoreMem16:
1170 len = DecodeStoreMem(kWasmI64, MachineType::Int16());
1171 break;
1172 case kExprI64StoreMem32:
1173 len = DecodeStoreMem(kWasmI64, MachineType::Int32());
1174 break;
1175 case kExprI64StoreMem:
1176 len = DecodeStoreMem(kWasmI64, MachineType::Int64());
1177 break;
1178 case kExprF32StoreMem:
1179 len = DecodeStoreMem(kWasmF32, MachineType::Float32());
1180 break;
1181 case kExprF64StoreMem:
1182 len = DecodeStoreMem(kWasmF64, MachineType::Float64());
1183 break;
1184 case kExprGrowMemory: {
1185 if (!CheckHasMemory()) break;
1186 MemoryIndexOperand operand(this, pc_);
1187 DCHECK_NOT_NULL(module_);
1188 if (module_->origin != kAsmJsOrigin) {
1189 Value val = Pop(0, kWasmI32);
1190 Push(kWasmI32, BUILD(GrowMemory, val.node));
1191 } else {
1192 error("grow_memory is not supported for asmjs modules");
1193 }
1194 len = 1 + operand.length;
1195 break;
1196 }
1197 case kExprMemorySize: {
1198 if (!CheckHasMemory()) break;
1199 MemoryIndexOperand operand(this, pc_);
1200 Push(kWasmI32, BUILD(CurrentMemoryPages));
1201 len = 1 + operand.length;
1202 break;
1203 }
1204 case kExprCallFunction: {
1205 CallFunctionOperand operand(this, pc_);
1206 if (Validate(pc_, operand)) {
1207 TFNode** buffer = PopArgs(operand.sig);
1208 TFNode** rets = nullptr;
1209 BUILD(CallDirect, operand.index, buffer, &rets, position());
1210 PushReturns(operand.sig, rets);
1211 }
1212 len = 1 + operand.length;
1213 break;
1214 }
1215 case kExprCallIndirect: {
1216 CallIndirectOperand operand(this, pc_);
1217 if (Validate(pc_, operand)) {
1218 Value index = Pop(0, kWasmI32);
1219 TFNode** buffer = PopArgs(operand.sig);
1220 if (buffer) buffer[0] = index.node;
1221 TFNode** rets = nullptr;
1222 BUILD(CallIndirect, operand.index, buffer, &rets, position());
1223 PushReturns(operand.sig, rets);
1224 }
1225 len = 1 + operand.length;
1226 break;
1227 }
1228 case kSimdPrefix: {
1229 CHECK_PROTOTYPE_OPCODE(wasm_simd_prototype);
1230 len++;
1231 byte simd_index = checked_read_u8(pc_, 1, "simd index");
1232 opcode = static_cast<WasmOpcode>(opcode << 8 | simd_index);
1233 TRACE(" @%-4d #%-20s|", startrel(pc_),
1234 WasmOpcodes::OpcodeName(opcode));
1235 len += DecodeSimdOpcode(opcode);
1236 break;
1237 }
1238 case kAtomicPrefix: {
1239 if (module_ == nullptr || module_->origin != kAsmJsOrigin) {
1240 error("Atomics are allowed only in AsmJs modules");
1241 break;
1242 }
1243 if (!FLAG_wasm_atomics_prototype) {
1244 error("Invalid opcode (enable with --wasm_atomics_prototype)");
1245 break;
1246 }
1247 len = 2;
1248 byte atomic_opcode = checked_read_u8(pc_, 1, "atomic index");
1249 opcode = static_cast<WasmOpcode>(opcode << 8 | atomic_opcode);
1250 sig = WasmOpcodes::AtomicSignature(opcode);
1251 if (sig) {
1252 BuildAtomicOperator(opcode);
1253 }
1254 break;
1255 }
1256 default: {
1257 // Deal with special asmjs opcodes.
1258 if (module_ != nullptr && module_->origin == kAsmJsOrigin) {
1259 sig = WasmOpcodes::AsmjsSignature(opcode);
1260 if (sig) {
1261 BuildSimpleOperator(opcode, sig);
1262 }
1263 } else {
1264 error("Invalid opcode");
1265 return;
1266 }
1267 }
1268 }
1269 }
1270
1271 #if DEBUG
1272 if (FLAG_trace_wasm_decoder) {
1273 PrintF(" ");
1274 for (size_t i = 0; i < control_.size(); ++i) {
1275 Control* c = &control_[i];
1276 enum ControlKind {
1277 kControlIf,
1278 kControlBlock,
1279 kControlLoop,
1280 kControlTry
1281 };
1282 switch (c->kind) {
1283 case kControlIf:
1284 PrintF("I");
1285 break;
1286 case kControlBlock:
1287 PrintF("B");
1288 break;
1289 case kControlLoop:
1290 PrintF("L");
1291 break;
1292 case kControlTry:
1293 PrintF("T");
1294 break;
1295 default:
1296 break;
1297 }
1298 PrintF("%u", c->merge.arity);
1299 if (c->unreachable) PrintF("*");
1300 }
1301 PrintF(" | ");
1302 for (size_t i = 0; i < stack_.size(); ++i) {
1303 Value& val = stack_[i];
1304 WasmOpcode opcode = static_cast<WasmOpcode>(*val.pc);
1305 if (WasmOpcodes::IsPrefixOpcode(opcode)) {
1306 opcode = static_cast<WasmOpcode>(opcode << 8 | *(val.pc + 1));
1307 }
1308 PrintF(" %c@%d:%s", WasmOpcodes::ShortNameOf(val.type),
1309 static_cast<int>(val.pc - start_),
1310 WasmOpcodes::OpcodeName(opcode));
1311 switch (opcode) {
1312 case kExprI32Const: {
1313 ImmI32Operand operand(this, val.pc);
1314 PrintF("[%d]", operand.value);
1315 break;
1316 }
1317 case kExprGetLocal: {
1318 LocalIndexOperand operand(this, val.pc);
1319 PrintF("[%u]", operand.index);
1320 break;
1321 }
1322 case kExprSetLocal: // fallthru
1323 case kExprTeeLocal: {
1324 LocalIndexOperand operand(this, val.pc);
1325 PrintF("[%u]", operand.index);
1326 break;
1327 }
1328 default:
1329 break;
1330 }
1331 if (val.node == nullptr) PrintF("?");
1332 }
1333 PrintF("\n");
1334 }
1335 #endif
1336 pc_ += len;
1337 } // end decode loop
1338 if (pc_ > end_ && ok()) error("Beyond end of code");
1339 }
1340
EndControl()1341 void EndControl() {
1342 ssa_env_->Kill(SsaEnv::kControlEnd);
1343 if (!control_.empty()) {
1344 stack_.resize(control_.back().stack_depth);
1345 control_.back().unreachable = true;
1346 }
1347 }
1348
SetBlockType(Control * c,BlockTypeOperand & operand)1349 void SetBlockType(Control* c, BlockTypeOperand& operand) {
1350 c->merge.arity = operand.arity;
1351 if (c->merge.arity == 1) {
1352 c->merge.vals.first = {pc_, nullptr, operand.read_entry(0)};
1353 } else if (c->merge.arity > 1) {
1354 c->merge.vals.array = zone_->NewArray<Value>(c->merge.arity);
1355 for (unsigned i = 0; i < c->merge.arity; i++) {
1356 c->merge.vals.array[i] = {pc_, nullptr, operand.read_entry(i)};
1357 }
1358 }
1359 }
1360
PopArgs(FunctionSig * sig)1361 TFNode** PopArgs(FunctionSig* sig) {
1362 if (build()) {
1363 int count = static_cast<int>(sig->parameter_count());
1364 TFNode** buffer = builder_->Buffer(count + 1);
1365 buffer[0] = nullptr; // reserved for code object or function index.
1366 for (int i = count - 1; i >= 0; i--) {
1367 buffer[i + 1] = Pop(i, sig->GetParam(i)).node;
1368 }
1369 return buffer;
1370 } else {
1371 int count = static_cast<int>(sig->parameter_count());
1372 for (int i = count - 1; i >= 0; i--) {
1373 Pop(i, sig->GetParam(i));
1374 }
1375 return nullptr;
1376 }
1377 }
1378
GetReturnType(FunctionSig * sig)1379 ValueType GetReturnType(FunctionSig* sig) {
1380 return sig->return_count() == 0 ? kWasmStmt : sig->GetReturn();
1381 }
1382
PushBlock(SsaEnv * end_env)1383 void PushBlock(SsaEnv* end_env) {
1384 control_.emplace_back(
1385 Control::Block(pc_, stack_.size(), end_env, current_catch_));
1386 }
1387
PushLoop(SsaEnv * end_env)1388 void PushLoop(SsaEnv* end_env) {
1389 control_.emplace_back(
1390 Control::Loop(pc_, stack_.size(), end_env, current_catch_));
1391 }
1392
PushIf(SsaEnv * end_env,SsaEnv * false_env)1393 void PushIf(SsaEnv* end_env, SsaEnv* false_env) {
1394 control_.emplace_back(
1395 Control::If(pc_, stack_.size(), end_env, false_env, current_catch_));
1396 }
1397
PushTry(SsaEnv * end_env,SsaEnv * catch_env)1398 void PushTry(SsaEnv* end_env, SsaEnv* catch_env) {
1399 control_.emplace_back(Control::Try(pc_, stack_.size(), end_env, zone_,
1400 catch_env, current_catch_));
1401 current_catch_ = static_cast<int32_t>(control_.size() - 1);
1402 }
1403
PopControl()1404 void PopControl() { control_.pop_back(); }
1405
DecodeLoadMem(ValueType type,MachineType mem_type)1406 int DecodeLoadMem(ValueType type, MachineType mem_type) {
1407 if (!CheckHasMemory()) return 0;
1408 MemoryAccessOperand operand(this, pc_,
1409 ElementSizeLog2Of(mem_type.representation()));
1410
1411 Value index = Pop(0, kWasmI32);
1412 TFNode* node = BUILD(LoadMem, type, mem_type, index.node, operand.offset,
1413 operand.alignment, position());
1414 Push(type, node);
1415 return 1 + operand.length;
1416 }
1417
DecodeStoreMem(ValueType type,MachineType mem_type)1418 int DecodeStoreMem(ValueType type, MachineType mem_type) {
1419 if (!CheckHasMemory()) return 0;
1420 MemoryAccessOperand operand(this, pc_,
1421 ElementSizeLog2Of(mem_type.representation()));
1422 Value val = Pop(1, type);
1423 Value index = Pop(0, kWasmI32);
1424 BUILD(StoreMem, mem_type, index.node, operand.offset, operand.alignment,
1425 val.node, position());
1426 return 1 + operand.length;
1427 }
1428
SimdExtractLane(WasmOpcode opcode,ValueType type)1429 unsigned SimdExtractLane(WasmOpcode opcode, ValueType type) {
1430 SimdLaneOperand operand(this, pc_);
1431 if (Validate(pc_, opcode, operand)) {
1432 compiler::NodeVector inputs(1, zone_);
1433 inputs[0] = Pop(0, ValueType::kSimd128).node;
1434 TFNode* node = BUILD(SimdLaneOp, opcode, operand.lane, inputs);
1435 Push(type, node);
1436 }
1437 return operand.length;
1438 }
1439
SimdReplaceLane(WasmOpcode opcode,ValueType type)1440 unsigned SimdReplaceLane(WasmOpcode opcode, ValueType type) {
1441 SimdLaneOperand operand(this, pc_);
1442 if (Validate(pc_, opcode, operand)) {
1443 compiler::NodeVector inputs(2, zone_);
1444 inputs[1] = Pop(1, type).node;
1445 inputs[0] = Pop(0, ValueType::kSimd128).node;
1446 TFNode* node = BUILD(SimdLaneOp, opcode, operand.lane, inputs);
1447 Push(ValueType::kSimd128, node);
1448 }
1449 return operand.length;
1450 }
1451
SimdShiftOp(WasmOpcode opcode)1452 unsigned SimdShiftOp(WasmOpcode opcode) {
1453 SimdShiftOperand operand(this, pc_);
1454 if (Validate(pc_, opcode, operand)) {
1455 compiler::NodeVector inputs(1, zone_);
1456 inputs[0] = Pop(0, ValueType::kSimd128).node;
1457 TFNode* node = BUILD(SimdShiftOp, opcode, operand.shift, inputs);
1458 Push(ValueType::kSimd128, node);
1459 }
1460 return operand.length;
1461 }
1462
DecodeSimdOpcode(WasmOpcode opcode)1463 unsigned DecodeSimdOpcode(WasmOpcode opcode) {
1464 unsigned len = 0;
1465 switch (opcode) {
1466 case kExprF32x4ExtractLane: {
1467 len = SimdExtractLane(opcode, ValueType::kFloat32);
1468 break;
1469 }
1470 case kExprI32x4ExtractLane:
1471 case kExprI16x8ExtractLane:
1472 case kExprI8x16ExtractLane: {
1473 len = SimdExtractLane(opcode, ValueType::kWord32);
1474 break;
1475 }
1476 case kExprF32x4ReplaceLane: {
1477 len = SimdReplaceLane(opcode, ValueType::kFloat32);
1478 break;
1479 }
1480 case kExprI32x4ReplaceLane:
1481 case kExprI16x8ReplaceLane:
1482 case kExprI8x16ReplaceLane: {
1483 len = SimdReplaceLane(opcode, ValueType::kWord32);
1484 break;
1485 }
1486 case kExprI32x4Shl:
1487 case kExprI32x4ShrS:
1488 case kExprI32x4ShrU:
1489 case kExprI16x8Shl:
1490 case kExprI16x8ShrS:
1491 case kExprI16x8ShrU:
1492 case kExprI8x16Shl:
1493 case kExprI8x16ShrS:
1494 case kExprI8x16ShrU: {
1495 len = SimdShiftOp(opcode);
1496 break;
1497 }
1498 default: {
1499 FunctionSig* sig = WasmOpcodes::Signature(opcode);
1500 if (sig != nullptr) {
1501 compiler::NodeVector inputs(sig->parameter_count(), zone_);
1502 for (size_t i = sig->parameter_count(); i > 0; i--) {
1503 Value val = Pop(static_cast<int>(i - 1), sig->GetParam(i - 1));
1504 inputs[i - 1] = val.node;
1505 }
1506 TFNode* node = BUILD(SimdOp, opcode, inputs);
1507 Push(GetReturnType(sig), node);
1508 } else {
1509 error("invalid simd opcode");
1510 }
1511 }
1512 }
1513 return len;
1514 }
1515
BuildAtomicOperator(WasmOpcode opcode)1516 void BuildAtomicOperator(WasmOpcode opcode) { UNIMPLEMENTED(); }
1517
DoReturn()1518 void DoReturn() {
1519 int count = static_cast<int>(sig_->return_count());
1520 TFNode** buffer = nullptr;
1521 if (build()) buffer = builder_->Buffer(count);
1522
1523 // Pop return values off the stack in reverse order.
1524 for (int i = count - 1; i >= 0; i--) {
1525 Value val = Pop(i, sig_->GetReturn(i));
1526 if (buffer) buffer[i] = val.node;
1527 }
1528
1529 BUILD(Return, count, buffer);
1530 EndControl();
1531 }
1532
Push(ValueType type,TFNode * node)1533 void Push(ValueType type, TFNode* node) {
1534 if (type != kWasmStmt) {
1535 stack_.push_back({pc_, node, type});
1536 }
1537 }
1538
PushEndValues(Control * c)1539 void PushEndValues(Control* c) {
1540 DCHECK_EQ(c, &control_.back());
1541 stack_.resize(c->stack_depth);
1542 if (c->merge.arity == 1) {
1543 stack_.push_back(c->merge.vals.first);
1544 } else {
1545 for (unsigned i = 0; i < c->merge.arity; i++) {
1546 stack_.push_back(c->merge.vals.array[i]);
1547 }
1548 }
1549 DCHECK_EQ(c->stack_depth + c->merge.arity, stack_.size());
1550 }
1551
PushReturns(FunctionSig * sig,TFNode ** rets)1552 void PushReturns(FunctionSig* sig, TFNode** rets) {
1553 for (size_t i = 0; i < sig->return_count(); i++) {
1554 // When verifying only, then {rets} will be null, so push null.
1555 Push(sig->GetReturn(i), rets ? rets[i] : nullptr);
1556 }
1557 }
1558
SafeOpcodeNameAt(const byte * pc)1559 const char* SafeOpcodeNameAt(const byte* pc) {
1560 if (pc >= end_) return "<end>";
1561 return WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(*pc));
1562 }
1563
Pop(int index,ValueType expected)1564 Value Pop(int index, ValueType expected) {
1565 Value val = Pop();
1566 if (val.type != expected && val.type != kWasmVar && expected != kWasmVar) {
1567 error(pc_, val.pc, "%s[%d] expected type %s, found %s of type %s",
1568 SafeOpcodeNameAt(pc_), index, WasmOpcodes::TypeName(expected),
1569 SafeOpcodeNameAt(val.pc), WasmOpcodes::TypeName(val.type));
1570 }
1571 return val;
1572 }
1573
Pop()1574 Value Pop() {
1575 size_t limit = control_.empty() ? 0 : control_.back().stack_depth;
1576 if (stack_.size() <= limit) {
1577 // Popping past the current control start in reachable code.
1578 Value val = {pc_, nullptr, kWasmVar};
1579 if (!control_.back().unreachable) {
1580 error(pc_, pc_, "%s found empty stack", SafeOpcodeNameAt(pc_));
1581 }
1582 return val;
1583 }
1584 Value val = stack_.back();
1585 stack_.pop_back();
1586 return val;
1587 }
1588
baserel(const byte * ptr)1589 int baserel(const byte* ptr) {
1590 return base_ ? static_cast<int>(ptr - base_) : 0;
1591 }
1592
startrel(const byte * ptr)1593 int startrel(const byte* ptr) { return static_cast<int>(ptr - start_); }
1594
BreakTo(unsigned depth)1595 void BreakTo(unsigned depth) {
1596 Control* c = &control_[control_.size() - depth - 1];
1597 if (c->is_loop()) {
1598 // This is the inner loop block, which does not have a value.
1599 Goto(ssa_env_, c->end_env);
1600 } else {
1601 // Merge the value(s) into the end of the block.
1602 size_t expected = control_.back().stack_depth + c->merge.arity;
1603 if (stack_.size() < expected && !control_.back().unreachable) {
1604 error(
1605 pc_, pc_,
1606 "expected at least %u values on the stack for br to @%d, found %d",
1607 c->merge.arity, startrel(c->pc),
1608 static_cast<int>(stack_.size() - c->stack_depth));
1609 return;
1610 }
1611 MergeValuesInto(c);
1612 }
1613 }
1614
FallThruTo(Control * c)1615 void FallThruTo(Control* c) {
1616 DCHECK_EQ(c, &control_.back());
1617 // Merge the value(s) into the end of the block.
1618 size_t expected = c->stack_depth + c->merge.arity;
1619 if (stack_.size() == expected ||
1620 (stack_.size() < expected && c->unreachable)) {
1621 MergeValuesInto(c);
1622 c->unreachable = false;
1623 return;
1624 }
1625 error(pc_, pc_, "expected %u elements on the stack for fallthru to @%d",
1626 c->merge.arity, startrel(c->pc));
1627 }
1628
GetMergeValueFromStack(Control * c,size_t i)1629 inline Value& GetMergeValueFromStack(Control* c, size_t i) {
1630 return stack_[stack_.size() - c->merge.arity + i];
1631 }
1632
TypeCheckFallThru(Control * c)1633 void TypeCheckFallThru(Control* c) {
1634 DCHECK_EQ(c, &control_.back());
1635 // Fallthru must match arity exactly.
1636 int arity = static_cast<int>(c->merge.arity);
1637 if (c->stack_depth + arity < stack_.size() ||
1638 (c->stack_depth + arity != stack_.size() && !c->unreachable)) {
1639 error(pc_, pc_, "expected %d elements on the stack for fallthru to @%d",
1640 arity, startrel(c->pc));
1641 return;
1642 }
1643 // Typecheck the values left on the stack.
1644 size_t avail = stack_.size() - c->stack_depth;
1645 for (size_t i = avail >= c->merge.arity ? 0 : c->merge.arity - avail;
1646 i < c->merge.arity; i++) {
1647 Value& val = GetMergeValueFromStack(c, i);
1648 Value& old = c->merge[i];
1649 if (val.type != old.type) {
1650 error(pc_, pc_, "type error in merge[%zu] (expected %s, got %s)", i,
1651 WasmOpcodes::TypeName(old.type), WasmOpcodes::TypeName(val.type));
1652 return;
1653 }
1654 }
1655 }
1656
MergeValuesInto(Control * c)1657 void MergeValuesInto(Control* c) {
1658 SsaEnv* target = c->end_env;
1659 bool first = target->state == SsaEnv::kUnreachable;
1660 bool reachable = ssa_env_->go();
1661 Goto(ssa_env_, target);
1662
1663 size_t avail = stack_.size() - control_.back().stack_depth;
1664 for (size_t i = avail >= c->merge.arity ? 0 : c->merge.arity - avail;
1665 i < c->merge.arity; i++) {
1666 Value& val = GetMergeValueFromStack(c, i);
1667 Value& old = c->merge[i];
1668 if (val.type != old.type && val.type != kWasmVar) {
1669 error(pc_, pc_, "type error in merge[%zu] (expected %s, got %s)", i,
1670 WasmOpcodes::TypeName(old.type), WasmOpcodes::TypeName(val.type));
1671 return;
1672 }
1673 if (builder_ && reachable) {
1674 DCHECK_NOT_NULL(val.node);
1675 old.node =
1676 first ? val.node : CreateOrMergeIntoPhi(old.type, target->control,
1677 old.node, val.node);
1678 }
1679 }
1680 }
1681
SetEnv(const char * reason,SsaEnv * env)1682 void SetEnv(const char* reason, SsaEnv* env) {
1683 #if DEBUG
1684 if (FLAG_trace_wasm_decoder) {
1685 char state = 'X';
1686 if (env) {
1687 switch (env->state) {
1688 case SsaEnv::kReached:
1689 state = 'R';
1690 break;
1691 case SsaEnv::kUnreachable:
1692 state = 'U';
1693 break;
1694 case SsaEnv::kMerged:
1695 state = 'M';
1696 break;
1697 case SsaEnv::kControlEnd:
1698 state = 'E';
1699 break;
1700 }
1701 }
1702 PrintF("{set_env = %p, state = %c, reason = %s", static_cast<void*>(env),
1703 state, reason);
1704 if (env && env->control) {
1705 PrintF(", control = ");
1706 compiler::WasmGraphBuilder::PrintDebugName(env->control);
1707 }
1708 PrintF("}");
1709 }
1710 #endif
1711 ssa_env_ = env;
1712 if (builder_) {
1713 builder_->set_control_ptr(&env->control);
1714 builder_->set_effect_ptr(&env->effect);
1715 }
1716 }
1717
CheckForException(TFNode * node)1718 TFNode* CheckForException(TFNode* node) {
1719 if (node == nullptr) {
1720 return nullptr;
1721 }
1722
1723 const bool inside_try_scope = current_catch_ != kNullCatch;
1724
1725 if (!inside_try_scope) {
1726 return node;
1727 }
1728
1729 TFNode* if_success = nullptr;
1730 TFNode* if_exception = nullptr;
1731 if (!builder_->ThrowsException(node, &if_success, &if_exception)) {
1732 return node;
1733 }
1734
1735 SsaEnv* success_env = Steal(ssa_env_);
1736 success_env->control = if_success;
1737
1738 SsaEnv* exception_env = Split(success_env);
1739 exception_env->control = if_exception;
1740 TryInfo* try_info = current_try_info();
1741 Goto(exception_env, try_info->catch_env);
1742 TFNode* exception = try_info->exception;
1743 if (exception == nullptr) {
1744 DCHECK_EQ(SsaEnv::kReached, try_info->catch_env->state);
1745 try_info->exception = if_exception;
1746 } else {
1747 DCHECK_EQ(SsaEnv::kMerged, try_info->catch_env->state);
1748 try_info->exception =
1749 CreateOrMergeIntoPhi(kWasmI32, try_info->catch_env->control,
1750 try_info->exception, if_exception);
1751 }
1752
1753 SetEnv("if_success", success_env);
1754 return node;
1755 }
1756
Goto(SsaEnv * from,SsaEnv * to)1757 void Goto(SsaEnv* from, SsaEnv* to) {
1758 DCHECK_NOT_NULL(to);
1759 if (!from->go()) return;
1760 switch (to->state) {
1761 case SsaEnv::kUnreachable: { // Overwrite destination.
1762 to->state = SsaEnv::kReached;
1763 to->locals = from->locals;
1764 to->control = from->control;
1765 to->effect = from->effect;
1766 break;
1767 }
1768 case SsaEnv::kReached: { // Create a new merge.
1769 to->state = SsaEnv::kMerged;
1770 if (!builder_) break;
1771 // Merge control.
1772 TFNode* controls[] = {to->control, from->control};
1773 TFNode* merge = builder_->Merge(2, controls);
1774 to->control = merge;
1775 // Merge effects.
1776 if (from->effect != to->effect) {
1777 TFNode* effects[] = {to->effect, from->effect, merge};
1778 to->effect = builder_->EffectPhi(2, effects, merge);
1779 }
1780 // Merge SSA values.
1781 for (int i = EnvironmentCount() - 1; i >= 0; i--) {
1782 TFNode* a = to->locals[i];
1783 TFNode* b = from->locals[i];
1784 if (a != b) {
1785 TFNode* vals[] = {a, b};
1786 to->locals[i] = builder_->Phi(local_type_vec_[i], 2, vals, merge);
1787 }
1788 }
1789 break;
1790 }
1791 case SsaEnv::kMerged: {
1792 if (!builder_) break;
1793 TFNode* merge = to->control;
1794 // Extend the existing merge.
1795 builder_->AppendToMerge(merge, from->control);
1796 // Merge effects.
1797 if (builder_->IsPhiWithMerge(to->effect, merge)) {
1798 builder_->AppendToPhi(to->effect, from->effect);
1799 } else if (to->effect != from->effect) {
1800 uint32_t count = builder_->InputCount(merge);
1801 TFNode** effects = builder_->Buffer(count);
1802 for (uint32_t j = 0; j < count - 1; j++) {
1803 effects[j] = to->effect;
1804 }
1805 effects[count - 1] = from->effect;
1806 to->effect = builder_->EffectPhi(count, effects, merge);
1807 }
1808 // Merge locals.
1809 for (int i = EnvironmentCount() - 1; i >= 0; i--) {
1810 TFNode* tnode = to->locals[i];
1811 TFNode* fnode = from->locals[i];
1812 if (builder_->IsPhiWithMerge(tnode, merge)) {
1813 builder_->AppendToPhi(tnode, fnode);
1814 } else if (tnode != fnode) {
1815 uint32_t count = builder_->InputCount(merge);
1816 TFNode** vals = builder_->Buffer(count);
1817 for (uint32_t j = 0; j < count - 1; j++) {
1818 vals[j] = tnode;
1819 }
1820 vals[count - 1] = fnode;
1821 to->locals[i] =
1822 builder_->Phi(local_type_vec_[i], count, vals, merge);
1823 }
1824 }
1825 break;
1826 }
1827 default:
1828 UNREACHABLE();
1829 }
1830 return from->Kill();
1831 }
1832
CreateOrMergeIntoPhi(ValueType type,TFNode * merge,TFNode * tnode,TFNode * fnode)1833 TFNode* CreateOrMergeIntoPhi(ValueType type, TFNode* merge, TFNode* tnode,
1834 TFNode* fnode) {
1835 DCHECK_NOT_NULL(builder_);
1836 if (builder_->IsPhiWithMerge(tnode, merge)) {
1837 builder_->AppendToPhi(tnode, fnode);
1838 } else if (tnode != fnode) {
1839 uint32_t count = builder_->InputCount(merge);
1840 TFNode** vals = builder_->Buffer(count);
1841 for (uint32_t j = 0; j < count - 1; j++) vals[j] = tnode;
1842 vals[count - 1] = fnode;
1843 return builder_->Phi(type, count, vals, merge);
1844 }
1845 return tnode;
1846 }
1847
PrepareForLoop(const byte * pc,SsaEnv * env)1848 SsaEnv* PrepareForLoop(const byte* pc, SsaEnv* env) {
1849 if (!builder_) return Split(env);
1850 if (!env->go()) return Split(env);
1851 env->state = SsaEnv::kMerged;
1852
1853 env->control = builder_->Loop(env->control);
1854 env->effect = builder_->EffectPhi(1, &env->effect, env->control);
1855 builder_->Terminate(env->effect, env->control);
1856 if (FLAG_wasm_loop_assignment_analysis) {
1857 BitVector* assigned = AnalyzeLoopAssignment(
1858 this, pc, static_cast<int>(total_locals()), zone_);
1859 if (failed()) return env;
1860 if (assigned != nullptr) {
1861 // Only introduce phis for variables assigned in this loop.
1862 for (int i = EnvironmentCount() - 1; i >= 0; i--) {
1863 if (!assigned->Contains(i)) continue;
1864 env->locals[i] = builder_->Phi(local_type_vec_[i], 1, &env->locals[i],
1865 env->control);
1866 }
1867 SsaEnv* loop_body_env = Split(env);
1868 builder_->StackCheck(position(), &(loop_body_env->effect),
1869 &(loop_body_env->control));
1870 return loop_body_env;
1871 }
1872 }
1873
1874 // Conservatively introduce phis for all local variables.
1875 for (int i = EnvironmentCount() - 1; i >= 0; i--) {
1876 env->locals[i] =
1877 builder_->Phi(local_type_vec_[i], 1, &env->locals[i], env->control);
1878 }
1879
1880 SsaEnv* loop_body_env = Split(env);
1881 builder_->StackCheck(position(), &(loop_body_env->effect),
1882 &(loop_body_env->control));
1883 return loop_body_env;
1884 }
1885
1886 // Create a complete copy of the {from}.
Split(SsaEnv * from)1887 SsaEnv* Split(SsaEnv* from) {
1888 DCHECK_NOT_NULL(from);
1889 SsaEnv* result = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv)));
1890 size_t size = sizeof(TFNode*) * EnvironmentCount();
1891 result->control = from->control;
1892 result->effect = from->effect;
1893
1894 if (from->go()) {
1895 result->state = SsaEnv::kReached;
1896 result->locals =
1897 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr;
1898 memcpy(result->locals, from->locals, size);
1899 } else {
1900 result->state = SsaEnv::kUnreachable;
1901 result->locals = nullptr;
1902 }
1903
1904 return result;
1905 }
1906
1907 // Create a copy of {from} that steals its state and leaves {from}
1908 // unreachable.
Steal(SsaEnv * from)1909 SsaEnv* Steal(SsaEnv* from) {
1910 DCHECK_NOT_NULL(from);
1911 if (!from->go()) return UnreachableEnv();
1912 SsaEnv* result = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv)));
1913 result->state = SsaEnv::kReached;
1914 result->locals = from->locals;
1915 result->control = from->control;
1916 result->effect = from->effect;
1917 from->Kill(SsaEnv::kUnreachable);
1918 return result;
1919 }
1920
1921 // Create an unreachable environment.
UnreachableEnv()1922 SsaEnv* UnreachableEnv() {
1923 SsaEnv* result = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv)));
1924 result->state = SsaEnv::kUnreachable;
1925 result->control = nullptr;
1926 result->effect = nullptr;
1927 result->locals = nullptr;
1928 return result;
1929 }
1930
EnvironmentCount()1931 int EnvironmentCount() {
1932 if (builder_) return static_cast<int>(local_type_vec_.size());
1933 return 0; // if we aren't building a graph, don't bother with SSA renaming.
1934 }
1935
onFirstError()1936 virtual void onFirstError() {
1937 end_ = start_; // Terminate decoding loop.
1938 builder_ = nullptr; // Don't build any more nodes.
1939 TRACE(" !%s\n", error_msg_.get());
1940 }
1941
position()1942 inline wasm::WasmCodePosition position() {
1943 int offset = static_cast<int>(pc_ - start_);
1944 DCHECK_EQ(pc_ - start_, offset); // overflows cannot happen
1945 return offset;
1946 }
1947
BuildSimpleOperator(WasmOpcode opcode,FunctionSig * sig)1948 inline void BuildSimpleOperator(WasmOpcode opcode, FunctionSig* sig) {
1949 TFNode* node;
1950 switch (sig->parameter_count()) {
1951 case 1: {
1952 Value val = Pop(0, sig->GetParam(0));
1953 node = BUILD(Unop, opcode, val.node, position());
1954 break;
1955 }
1956 case 2: {
1957 Value rval = Pop(1, sig->GetParam(1));
1958 Value lval = Pop(0, sig->GetParam(0));
1959 node = BUILD(Binop, opcode, lval.node, rval.node, position());
1960 break;
1961 }
1962 default:
1963 UNREACHABLE();
1964 node = nullptr;
1965 break;
1966 }
1967 Push(GetReturnType(sig), node);
1968 }
1969 };
1970
DecodeLocalDecls(BodyLocalDecls * decls,const byte * start,const byte * end)1971 bool DecodeLocalDecls(BodyLocalDecls* decls, const byte* start,
1972 const byte* end) {
1973 Decoder decoder(start, end);
1974 if (WasmDecoder::DecodeLocals(&decoder, nullptr, &decls->type_list)) {
1975 DCHECK(decoder.ok());
1976 decls->encoded_size = decoder.pc_offset();
1977 return true;
1978 }
1979 return false;
1980 }
1981
BytecodeIterator(const byte * start,const byte * end,BodyLocalDecls * decls)1982 BytecodeIterator::BytecodeIterator(const byte* start, const byte* end,
1983 BodyLocalDecls* decls)
1984 : Decoder(start, end) {
1985 if (decls != nullptr) {
1986 if (DecodeLocalDecls(decls, start, end)) {
1987 pc_ += decls->encoded_size;
1988 if (pc_ > end_) pc_ = end_;
1989 }
1990 }
1991 }
1992
VerifyWasmCode(AccountingAllocator * allocator,const wasm::WasmModule * module,FunctionBody & body)1993 DecodeResult VerifyWasmCode(AccountingAllocator* allocator,
1994 const wasm::WasmModule* module,
1995 FunctionBody& body) {
1996 Zone zone(allocator, ZONE_NAME);
1997 WasmFullDecoder decoder(&zone, module, body);
1998 decoder.Decode();
1999 return decoder.toResult<DecodeStruct*>(nullptr);
2000 }
2001
BuildTFGraph(AccountingAllocator * allocator,TFBuilder * builder,FunctionBody & body)2002 DecodeResult BuildTFGraph(AccountingAllocator* allocator, TFBuilder* builder,
2003 FunctionBody& body) {
2004 Zone zone(allocator, ZONE_NAME);
2005 WasmFullDecoder decoder(&zone, builder, body);
2006 decoder.Decode();
2007 return decoder.toResult<DecodeStruct*>(nullptr);
2008 }
2009
OpcodeLength(const byte * pc,const byte * end)2010 unsigned OpcodeLength(const byte* pc, const byte* end) {
2011 Decoder decoder(pc, end);
2012 return WasmDecoder::OpcodeLength(&decoder, pc);
2013 }
2014
PrintRawWasmCode(const byte * start,const byte * end)2015 void PrintRawWasmCode(const byte* start, const byte* end) {
2016 AccountingAllocator allocator;
2017 PrintRawWasmCode(&allocator, FunctionBodyForTesting(start, end), nullptr);
2018 }
2019
2020 namespace {
RawOpcodeName(WasmOpcode opcode)2021 const char* RawOpcodeName(WasmOpcode opcode) {
2022 switch (opcode) {
2023 #define DECLARE_NAME_CASE(name, opcode, sig) \
2024 case kExpr##name: \
2025 return "kExpr" #name;
2026 FOREACH_OPCODE(DECLARE_NAME_CASE)
2027 #undef DECLARE_NAME_CASE
2028 default:
2029 break;
2030 }
2031 return "Unknown";
2032 }
2033 } // namespace
2034
PrintRawWasmCode(AccountingAllocator * allocator,const FunctionBody & body,const wasm::WasmModule * module)2035 bool PrintRawWasmCode(AccountingAllocator* allocator, const FunctionBody& body,
2036 const wasm::WasmModule* module) {
2037 OFStream os(stdout);
2038 Zone zone(allocator, ZONE_NAME);
2039 WasmFullDecoder decoder(&zone, module, body);
2040 int line_nr = 0;
2041
2042 // Print the function signature.
2043 if (body.sig) {
2044 os << "// signature: " << *body.sig << std::endl;
2045 ++line_nr;
2046 }
2047
2048 // Print the local declarations.
2049 BodyLocalDecls decls(&zone);
2050 BytecodeIterator i(body.start, body.end, &decls);
2051 if (body.start != i.pc() && !FLAG_wasm_code_fuzzer_gen_test) {
2052 os << "// locals: ";
2053 if (!decls.type_list.empty()) {
2054 ValueType type = decls.type_list[0];
2055 uint32_t count = 0;
2056 for (size_t pos = 0; pos < decls.type_list.size(); ++pos) {
2057 if (decls.type_list[pos] == type) {
2058 ++count;
2059 } else {
2060 os << " " << count << " " << WasmOpcodes::TypeName(type);
2061 type = decls.type_list[pos];
2062 count = 1;
2063 }
2064 }
2065 }
2066 os << std::endl;
2067 ++line_nr;
2068
2069 for (const byte* locals = body.start; locals < i.pc(); locals++) {
2070 os << (locals == body.start ? "0x" : " 0x") << AsHex(*locals, 2) << ",";
2071 }
2072 os << std::endl;
2073 ++line_nr;
2074 }
2075
2076 os << "// body: " << std::endl;
2077 ++line_nr;
2078 unsigned control_depth = 0;
2079 for (; i.has_next(); i.next()) {
2080 unsigned length = WasmDecoder::OpcodeLength(&decoder, i.pc());
2081
2082 WasmOpcode opcode = i.current();
2083 if (opcode == kExprElse) control_depth--;
2084
2085 int num_whitespaces = control_depth < 32 ? 2 * control_depth : 64;
2086
2087 // 64 whitespaces
2088 const char* padding =
2089 " ";
2090 os.write(padding, num_whitespaces);
2091
2092 os << RawOpcodeName(opcode) << ",";
2093
2094 for (size_t j = 1; j < length; ++j) {
2095 os << " 0x" << AsHex(i.pc()[j], 2) << ",";
2096 }
2097
2098 switch (opcode) {
2099 case kExprElse:
2100 os << " // @" << i.pc_offset();
2101 control_depth++;
2102 break;
2103 case kExprLoop:
2104 case kExprIf:
2105 case kExprBlock:
2106 case kExprTry: {
2107 BlockTypeOperand operand(&i, i.pc());
2108 os << " // @" << i.pc_offset();
2109 for (unsigned i = 0; i < operand.arity; i++) {
2110 os << " " << WasmOpcodes::TypeName(operand.read_entry(i));
2111 }
2112 control_depth++;
2113 break;
2114 }
2115 case kExprEnd:
2116 os << " // @" << i.pc_offset();
2117 control_depth--;
2118 break;
2119 case kExprBr: {
2120 BreakDepthOperand operand(&i, i.pc());
2121 os << " // depth=" << operand.depth;
2122 break;
2123 }
2124 case kExprBrIf: {
2125 BreakDepthOperand operand(&i, i.pc());
2126 os << " // depth=" << operand.depth;
2127 break;
2128 }
2129 case kExprBrTable: {
2130 BranchTableOperand operand(&i, i.pc());
2131 os << " // entries=" << operand.table_count;
2132 break;
2133 }
2134 case kExprCallIndirect: {
2135 CallIndirectOperand operand(&i, i.pc());
2136 os << " // sig #" << operand.index;
2137 if (decoder.Complete(i.pc(), operand)) {
2138 os << ": " << *operand.sig;
2139 }
2140 break;
2141 }
2142 case kExprCallFunction: {
2143 CallFunctionOperand operand(&i, i.pc());
2144 os << " // function #" << operand.index;
2145 if (decoder.Complete(i.pc(), operand)) {
2146 os << ": " << *operand.sig;
2147 }
2148 break;
2149 }
2150 default:
2151 break;
2152 }
2153 os << std::endl;
2154 ++line_nr;
2155 }
2156
2157 return decoder.ok();
2158 }
2159
AnalyzeLoopAssignmentForTesting(Zone * zone,size_t num_locals,const byte * start,const byte * end)2160 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, size_t num_locals,
2161 const byte* start, const byte* end) {
2162 Decoder decoder(start, end);
2163 return WasmDecoder::AnalyzeLoopAssignment(&decoder, start,
2164 static_cast<int>(num_locals), zone);
2165 }
2166
2167 } // namespace wasm
2168 } // namespace internal
2169 } // namespace v8
2170