• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/wasm/graph-builder-interface.h"
6 
7 #include "src/compiler/wasm-compiler.h"
8 #include "src/flags/flags.h"
9 #include "src/handles/handles.h"
10 #include "src/objects/objects-inl.h"
11 #include "src/utils/ostreams.h"
12 #include "src/wasm/branch-hint-map.h"
13 #include "src/wasm/decoder.h"
14 #include "src/wasm/function-body-decoder-impl.h"
15 #include "src/wasm/function-body-decoder.h"
16 #include "src/wasm/value-type.h"
17 #include "src/wasm/wasm-limits.h"
18 #include "src/wasm/wasm-linkage.h"
19 #include "src/wasm/wasm-module.h"
20 #include "src/wasm/wasm-opcodes-inl.h"
21 
22 namespace v8 {
23 namespace internal {
24 namespace wasm {
25 
26 namespace {
27 
28 // An SsaEnv environment carries the current local variable renaming
29 // as well as the current effect and control dependency in the TF graph.
30 // It maintains a control state that tracks whether the environment
31 // is reachable, has reached a control end, or has been merged.
32 struct SsaEnv : public ZoneObject {
33   enum State { kUnreachable, kReached, kMerged };
34 
35   State state;
36   TFNode* control;
37   TFNode* effect;
38   compiler::WasmInstanceCacheNodes instance_cache;
39   ZoneVector<TFNode*> locals;
40 
SsaEnvv8::internal::wasm::__anoned4d3a260111::SsaEnv41   SsaEnv(Zone* zone, State state, TFNode* control, TFNode* effect,
42          uint32_t locals_size)
43       : state(state),
44         control(control),
45         effect(effect),
46         locals(locals_size, zone) {}
47 
48   SsaEnv(const SsaEnv& other) V8_NOEXCEPT = default;
SsaEnvv8::internal::wasm::__anoned4d3a260111::SsaEnv49   SsaEnv(SsaEnv&& other) V8_NOEXCEPT : state(other.state),
50                                        control(other.control),
51                                        effect(other.effect),
52                                        instance_cache(other.instance_cache),
53                                        locals(std::move(other.locals)) {
54     other.Kill();
55   }
56 
Killv8::internal::wasm::__anoned4d3a260111::SsaEnv57   void Kill() {
58     state = kUnreachable;
59     for (TFNode*& local : locals) {
60       local = nullptr;
61     }
62     control = nullptr;
63     effect = nullptr;
64     instance_cache = {};
65   }
SetNotMergedv8::internal::wasm::__anoned4d3a260111::SsaEnv66   void SetNotMerged() {
67     if (state == kMerged) state = kReached;
68   }
69 };
70 
71 class WasmGraphBuildingInterface {
72  public:
73   static constexpr Decoder::ValidateFlag validate = Decoder::kFullValidation;
74   using FullDecoder = WasmFullDecoder<validate, WasmGraphBuildingInterface>;
75   using CheckForNull = compiler::WasmGraphBuilder::CheckForNull;
76 
77   struct Value : public ValueBase<validate> {
78     TFNode* node = nullptr;
79 
80     template <typename... Args>
Valuev8::internal::wasm::__anoned4d3a260111::WasmGraphBuildingInterface::Value81     explicit Value(Args&&... args) V8_NOEXCEPT
82         : ValueBase(std::forward<Args>(args)...) {}
83   };
84   using ValueVector = base::SmallVector<Value, 8>;
85   using NodeVector = base::SmallVector<TFNode*, 8>;
86 
87   struct TryInfo : public ZoneObject {
88     SsaEnv* catch_env;
89     TFNode* exception = nullptr;
90 
might_throwv8::internal::wasm::__anoned4d3a260111::WasmGraphBuildingInterface::TryInfo91     bool might_throw() const { return exception != nullptr; }
92 
93     MOVE_ONLY_NO_DEFAULT_CONSTRUCTOR(TryInfo);
94 
TryInfov8::internal::wasm::__anoned4d3a260111::WasmGraphBuildingInterface::TryInfo95     explicit TryInfo(SsaEnv* c) : catch_env(c) {}
96   };
97 
98   struct Control : public ControlBase<Value, validate> {
99     SsaEnv* merge_env = nullptr;  // merge environment for the construct.
100     SsaEnv* false_env = nullptr;  // false environment (only for if).
101     TryInfo* try_info = nullptr;  // information about try statements.
102     int32_t previous_catch = -1;  // previous Control with a catch.
103     BitVector* loop_assignments = nullptr;  // locals assigned in this loop.
104     TFNode* loop_node = nullptr;            // loop header of this loop.
105     MOVE_ONLY_NO_DEFAULT_CONSTRUCTOR(Control);
106 
107     template <typename... Args>
Controlv8::internal::wasm::__anoned4d3a260111::WasmGraphBuildingInterface::Control108     explicit Control(Args&&... args) V8_NOEXCEPT
109         : ControlBase(std::forward<Args>(args)...) {}
110   };
111 
WasmGraphBuildingInterface(compiler::WasmGraphBuilder * builder,int func_index,InlinedStatus inlined_status)112   WasmGraphBuildingInterface(compiler::WasmGraphBuilder* builder,
113                              int func_index, InlinedStatus inlined_status)
114       : builder_(builder),
115         func_index_(func_index),
116         inlined_status_(inlined_status) {}
117 
StartFunction(FullDecoder * decoder)118   void StartFunction(FullDecoder* decoder) {
119     // Get the branch hints map and type feedback for this function (if
120     // available).
121     if (decoder->module_) {
122       auto branch_hints_it = decoder->module_->branch_hints.find(func_index_);
123       if (branch_hints_it != decoder->module_->branch_hints.end()) {
124         branch_hints_ = &branch_hints_it->second;
125       }
126       TypeFeedbackStorage& feedbacks = decoder->module_->type_feedback;
127       base::MutexGuard mutex_guard(&feedbacks.mutex);
128       auto feedback = feedbacks.feedback_for_function.find(func_index_);
129       if (feedback != feedbacks.feedback_for_function.end()) {
130         type_feedback_ = feedback->second.feedback_vector;
131         // We need to keep the feedback in the module to inline later. However,
132         // this means we are stuck with it forever.
133         // TODO(jkummerow): Reconsider our options here.
134       }
135     }
136     // The first '+ 1' is needed by TF Start node, the second '+ 1' is for the
137     // instance parameter.
138     builder_->Start(static_cast<int>(decoder->sig_->parameter_count() + 1 + 1));
139     uint32_t num_locals = decoder->num_locals();
140     SsaEnv* ssa_env = decoder->zone()->New<SsaEnv>(
141         decoder->zone(), SsaEnv::kReached, effect(), control(), num_locals);
142     SetEnv(ssa_env);
143 
144     // Initialize local variables. Parameters are shifted by 1 because of the
145     // the instance parameter.
146     uint32_t index = 0;
147     for (; index < decoder->sig_->parameter_count(); ++index) {
148       ssa_env->locals[index] = builder_->Param(index + 1);
149     }
150     while (index < num_locals) {
151       ValueType type = decoder->local_type(index);
152       TFNode* node;
153       if ((decoder->enabled_.has_nn_locals() ||
154            decoder->enabled_.has_unsafe_nn_locals()) &&
155           !type.is_defaultable()) {
156         DCHECK(type.is_reference());
157         // TODO(jkummerow): Consider using "the hole" instead, to make any
158         // illegal uses more obvious.
159         node = builder_->RefNull();
160       } else {
161         node = DefaultValue(type);
162       }
163       while (index < num_locals && decoder->local_type(index) == type) {
164         // Do a whole run of like-typed locals at a time.
165         ssa_env->locals[index++] = node;
166       }
167     }
168     LoadContextIntoSsa(ssa_env);
169 
170     if (FLAG_trace_wasm && inlined_status_ == kRegularFunction) {
171       builder_->TraceFunctionEntry(decoder->position());
172     }
173   }
174 
175   // Reload the instance cache entries into the Ssa Environment.
LoadContextIntoSsa(SsaEnv * ssa_env)176   void LoadContextIntoSsa(SsaEnv* ssa_env) {
177     if (ssa_env) builder_->InitInstanceCache(&ssa_env->instance_cache);
178   }
179 
StartFunctionBody(FullDecoder * decoder,Control * block)180   void StartFunctionBody(FullDecoder* decoder, Control* block) {}
181 
FinishFunction(FullDecoder *)182   void FinishFunction(FullDecoder*) {
183     if (inlined_status_ == kRegularFunction) {
184       builder_->PatchInStackCheckIfNeeded();
185     }
186   }
187 
OnFirstError(FullDecoder *)188   void OnFirstError(FullDecoder*) {}
189 
NextInstruction(FullDecoder *,WasmOpcode)190   void NextInstruction(FullDecoder*, WasmOpcode) {}
191 
Block(FullDecoder * decoder,Control * block)192   void Block(FullDecoder* decoder, Control* block) {
193     // The branch environment is the outer environment.
194     block->merge_env = ssa_env_;
195     SetEnv(Steal(decoder->zone(), ssa_env_));
196   }
197 
Loop(FullDecoder * decoder,Control * block)198   void Loop(FullDecoder* decoder, Control* block) {
199     // This is the merge environment at the beginning of the loop.
200     SsaEnv* merge_env = Steal(decoder->zone(), ssa_env_);
201     block->merge_env = merge_env;
202     SetEnv(merge_env);
203 
204     ssa_env_->state = SsaEnv::kMerged;
205 
206     TFNode* loop_node = builder_->Loop(control());
207 
208     if (emit_loop_exits()) {
209       uint32_t nesting_depth = 0;
210       for (uint32_t depth = 1; depth < decoder->control_depth(); depth++) {
211         if (decoder->control_at(depth)->is_loop()) {
212           nesting_depth++;
213         }
214       }
215       // If this loop is nested, the parent loop's can_be_innermost field needs
216       // to be false. If the last loop in loop_infos_ has less depth, it has to
217       // be the parent loop. If it does not, it means another loop has been
218       // found within the parent loop, and that loop will have set the parent's
219       // can_be_innermost to false, so we do not need to do anything.
220       if (nesting_depth > 0 &&
221           loop_infos_.back().nesting_depth < nesting_depth) {
222         loop_infos_.back().can_be_innermost = false;
223       }
224       loop_infos_.emplace_back(loop_node, nesting_depth, true);
225     }
226 
227     builder_->SetControl(loop_node);
228     decoder->control_at(0)->loop_node = loop_node;
229 
230     TFNode* effect_inputs[] = {effect(), control()};
231     builder_->SetEffect(builder_->EffectPhi(1, effect_inputs));
232     builder_->TerminateLoop(effect(), control());
233     // Doing a preprocessing pass to analyze loop assignments seems to pay off
234     // compared to reallocating Nodes when rearranging Phis in Goto.
235     BitVector* assigned = WasmDecoder<validate>::AnalyzeLoopAssignment(
236         decoder, decoder->pc(), decoder->num_locals(), decoder->zone());
237     if (decoder->failed()) return;
238     int instance_cache_index = decoder->num_locals();
239     // If the module has shared memory, the stack guard might reallocate the
240     // shared memory. We have to assume the instance cache will be updated.
241     if (decoder->module_->has_shared_memory) {
242       assigned->Add(instance_cache_index);
243     }
244     DCHECK_NOT_NULL(assigned);
245     decoder->control_at(0)->loop_assignments = assigned;
246 
247     // Only introduce phis for variables assigned in this loop.
248     for (int i = decoder->num_locals() - 1; i >= 0; i--) {
249       if (!assigned->Contains(i)) continue;
250       TFNode* inputs[] = {ssa_env_->locals[i], control()};
251       ssa_env_->locals[i] = builder_->Phi(decoder->local_type(i), 1, inputs);
252     }
253     // Introduce phis for instance cache pointers if necessary.
254     if (assigned->Contains(instance_cache_index)) {
255       builder_->PrepareInstanceCacheForLoop(&ssa_env_->instance_cache,
256                                             control());
257     }
258 
259     // Now we setup a new environment for the inside of the loop.
260     SetEnv(Split(decoder->zone(), ssa_env_));
261     builder_->StackCheck(decoder->module_->has_shared_memory
262                              ? &ssa_env_->instance_cache
263                              : nullptr,
264                          decoder->position());
265     ssa_env_->SetNotMerged();
266 
267     // Wrap input merge into phis.
268     for (uint32_t i = 0; i < block->start_merge.arity; ++i) {
269       Value& val = block->start_merge[i];
270       TFNode* inputs[] = {val.node, block->merge_env->control};
271       val.node = builder_->Phi(val.type, 1, inputs);
272     }
273   }
274 
Try(FullDecoder * decoder,Control * block)275   void Try(FullDecoder* decoder, Control* block) {
276     SsaEnv* outer_env = ssa_env_;
277     SsaEnv* catch_env = Split(decoder->zone(), outer_env);
278     // Mark catch environment as unreachable, since only accessable
279     // through catch unwinding (i.e. landing pads).
280     catch_env->state = SsaEnv::kUnreachable;
281     SsaEnv* try_env = Steal(decoder->zone(), outer_env);
282     SetEnv(try_env);
283     TryInfo* try_info = decoder->zone()->New<TryInfo>(catch_env);
284     block->merge_env = outer_env;
285     block->try_info = try_info;
286   }
287 
If(FullDecoder * decoder,const Value & cond,Control * if_block)288   void If(FullDecoder* decoder, const Value& cond, Control* if_block) {
289     TFNode* if_true = nullptr;
290     TFNode* if_false = nullptr;
291     WasmBranchHint hint = WasmBranchHint::kNoHint;
292     if (branch_hints_) {
293       hint = branch_hints_->GetHintFor(decoder->pc_relative_offset());
294     }
295     switch (hint) {
296       case WasmBranchHint::kNoHint:
297         builder_->BranchNoHint(cond.node, &if_true, &if_false);
298         break;
299       case WasmBranchHint::kUnlikely:
300         builder_->BranchExpectFalse(cond.node, &if_true, &if_false);
301         break;
302       case WasmBranchHint::kLikely:
303         builder_->BranchExpectTrue(cond.node, &if_true, &if_false);
304         break;
305     }
306     SsaEnv* merge_env = ssa_env_;
307     SsaEnv* false_env = Split(decoder->zone(), ssa_env_);
308     false_env->control = if_false;
309     SsaEnv* true_env = Steal(decoder->zone(), ssa_env_);
310     true_env->control = if_true;
311     if_block->merge_env = merge_env;
312     if_block->false_env = false_env;
313     SetEnv(true_env);
314   }
315 
FallThruTo(FullDecoder * decoder,Control * c)316   void FallThruTo(FullDecoder* decoder, Control* c) {
317     DCHECK(!c->is_loop());
318     MergeValuesInto(decoder, c, &c->end_merge);
319   }
320 
PopControl(FullDecoder * decoder,Control * block)321   void PopControl(FullDecoder* decoder, Control* block) {
322     // A loop just continues with the end environment. There is no merge.
323     // However, if loop unrolling is enabled, we must create a loop exit and
324     // wrap the fallthru values on the stack.
325     if (block->is_loop()) {
326       if (emit_loop_exits() && block->reachable()) {
327         BuildLoopExits(decoder, block);
328         WrapLocalsAtLoopExit(decoder, block);
329         uint32_t arity = block->end_merge.arity;
330         if (arity > 0) {
331           Value* stack_base = decoder->stack_value(arity);
332           for (uint32_t i = 0; i < arity; i++) {
333             Value* val = stack_base + i;
334             val->node = builder_->LoopExitValue(
335                 val->node, val->type.machine_representation());
336           }
337         }
338       }
339       return;
340     }
341     // Any other block falls through to the parent block.
342     if (block->reachable()) FallThruTo(decoder, block);
343     if (block->is_onearmed_if()) {
344       // Merge the else branch into the end merge.
345       SetEnv(block->false_env);
346       DCHECK_EQ(block->start_merge.arity, block->end_merge.arity);
347       Value* values =
348           block->start_merge.arity > 0 ? &block->start_merge[0] : nullptr;
349       MergeValuesInto(decoder, block, &block->end_merge, values);
350     }
351     // Now continue with the merged environment.
352     SetEnv(block->merge_env);
353   }
354 
UnOp(FullDecoder * decoder,WasmOpcode opcode,const Value & value,Value * result)355   void UnOp(FullDecoder* decoder, WasmOpcode opcode, const Value& value,
356             Value* result) {
357     result->node = builder_->Unop(opcode, value.node, decoder->position());
358   }
359 
BinOp(FullDecoder * decoder,WasmOpcode opcode,const Value & lhs,const Value & rhs,Value * result)360   void BinOp(FullDecoder* decoder, WasmOpcode opcode, const Value& lhs,
361              const Value& rhs, Value* result) {
362     TFNode* node =
363         builder_->Binop(opcode, lhs.node, rhs.node, decoder->position());
364     if (result) result->node = node;
365   }
366 
I32Const(FullDecoder * decoder,Value * result,int32_t value)367   void I32Const(FullDecoder* decoder, Value* result, int32_t value) {
368     result->node = builder_->Int32Constant(value);
369   }
370 
I64Const(FullDecoder * decoder,Value * result,int64_t value)371   void I64Const(FullDecoder* decoder, Value* result, int64_t value) {
372     result->node = builder_->Int64Constant(value);
373   }
374 
F32Const(FullDecoder * decoder,Value * result,float value)375   void F32Const(FullDecoder* decoder, Value* result, float value) {
376     result->node = builder_->Float32Constant(value);
377   }
378 
F64Const(FullDecoder * decoder,Value * result,double value)379   void F64Const(FullDecoder* decoder, Value* result, double value) {
380     result->node = builder_->Float64Constant(value);
381   }
382 
S128Const(FullDecoder * decoder,const Simd128Immediate<validate> & imm,Value * result)383   void S128Const(FullDecoder* decoder, const Simd128Immediate<validate>& imm,
384                  Value* result) {
385     result->node = builder_->Simd128Constant(imm.value);
386   }
387 
RefNull(FullDecoder * decoder,ValueType type,Value * result)388   void RefNull(FullDecoder* decoder, ValueType type, Value* result) {
389     result->node = builder_->RefNull();
390   }
391 
RefFunc(FullDecoder * decoder,uint32_t function_index,Value * result)392   void RefFunc(FullDecoder* decoder, uint32_t function_index, Value* result) {
393     result->node = builder_->RefFunc(function_index);
394   }
395 
RefAsNonNull(FullDecoder * decoder,const Value & arg,Value * result)396   void RefAsNonNull(FullDecoder* decoder, const Value& arg, Value* result) {
397     result->node = builder_->RefAsNonNull(arg.node, decoder->position());
398   }
399 
Drop(FullDecoder * decoder)400   void Drop(FullDecoder* decoder) {}
401 
LocalGet(FullDecoder * decoder,Value * result,const IndexImmediate<validate> & imm)402   void LocalGet(FullDecoder* decoder, Value* result,
403                 const IndexImmediate<validate>& imm) {
404     result->node = ssa_env_->locals[imm.index];
405   }
406 
LocalSet(FullDecoder * decoder,const Value & value,const IndexImmediate<validate> & imm)407   void LocalSet(FullDecoder* decoder, const Value& value,
408                 const IndexImmediate<validate>& imm) {
409     ssa_env_->locals[imm.index] = value.node;
410   }
411 
LocalTee(FullDecoder * decoder,const Value & value,Value * result,const IndexImmediate<validate> & imm)412   void LocalTee(FullDecoder* decoder, const Value& value, Value* result,
413                 const IndexImmediate<validate>& imm) {
414     result->node = value.node;
415     ssa_env_->locals[imm.index] = value.node;
416   }
417 
AllocateLocals(FullDecoder * decoder,base::Vector<Value> local_values)418   void AllocateLocals(FullDecoder* decoder, base::Vector<Value> local_values) {
419     ZoneVector<TFNode*>* locals = &ssa_env_->locals;
420     locals->insert(locals->begin(), local_values.size(), nullptr);
421     for (uint32_t i = 0; i < local_values.size(); i++) {
422       (*locals)[i] = local_values[i].node;
423     }
424   }
425 
DeallocateLocals(FullDecoder * decoder,uint32_t count)426   void DeallocateLocals(FullDecoder* decoder, uint32_t count) {
427     ZoneVector<TFNode*>* locals = &ssa_env_->locals;
428     locals->erase(locals->begin(), locals->begin() + count);
429   }
430 
GlobalGet(FullDecoder * decoder,Value * result,const GlobalIndexImmediate<validate> & imm)431   void GlobalGet(FullDecoder* decoder, Value* result,
432                  const GlobalIndexImmediate<validate>& imm) {
433     result->node = builder_->GlobalGet(imm.index);
434   }
435 
GlobalSet(FullDecoder * decoder,const Value & value,const GlobalIndexImmediate<validate> & imm)436   void GlobalSet(FullDecoder* decoder, const Value& value,
437                  const GlobalIndexImmediate<validate>& imm) {
438     builder_->GlobalSet(imm.index, value.node);
439   }
440 
TableGet(FullDecoder * decoder,const Value & index,Value * result,const IndexImmediate<validate> & imm)441   void TableGet(FullDecoder* decoder, const Value& index, Value* result,
442                 const IndexImmediate<validate>& imm) {
443     result->node =
444         builder_->TableGet(imm.index, index.node, decoder->position());
445   }
446 
TableSet(FullDecoder * decoder,const Value & index,const Value & value,const IndexImmediate<validate> & imm)447   void TableSet(FullDecoder* decoder, const Value& index, const Value& value,
448                 const IndexImmediate<validate>& imm) {
449     builder_->TableSet(imm.index, index.node, value.node, decoder->position());
450   }
451 
Trap(FullDecoder * decoder,TrapReason reason)452   void Trap(FullDecoder* decoder, TrapReason reason) {
453     builder_->Trap(reason, decoder->position());
454   }
455 
AssertNull(FullDecoder * decoder,const Value & obj,Value * result)456   void AssertNull(FullDecoder* decoder, const Value& obj, Value* result) {
457     builder_->TrapIfFalse(
458         wasm::TrapReason::kTrapIllegalCast,
459         builder_->Binop(kExprRefEq, obj.node, builder_->RefNull(),
460                         decoder->position()),
461         decoder->position());
462     result->node = obj.node;
463   }
464 
NopForTestingUnsupportedInLiftoff(FullDecoder * decoder)465   void NopForTestingUnsupportedInLiftoff(FullDecoder* decoder) {}
466 
Select(FullDecoder * decoder,const Value & cond,const Value & fval,const Value & tval,Value * result)467   void Select(FullDecoder* decoder, const Value& cond, const Value& fval,
468               const Value& tval, Value* result) {
469     result->node =
470       builder_->Select(cond.node, tval.node, fval.node, result->type);
471   }
472 
CopyStackValues(FullDecoder * decoder,uint32_t count,uint32_t drop_values)473   ValueVector CopyStackValues(FullDecoder* decoder, uint32_t count,
474                               uint32_t drop_values) {
475     Value* stack_base =
476         count > 0 ? decoder->stack_value(count + drop_values) : nullptr;
477     ValueVector stack_values(count);
478     for (uint32_t i = 0; i < count; i++) {
479       stack_values[i] = stack_base[i];
480     }
481     return stack_values;
482   }
483 
DoReturn(FullDecoder * decoder,uint32_t drop_values)484   void DoReturn(FullDecoder* decoder, uint32_t drop_values) {
485     uint32_t ret_count = static_cast<uint32_t>(decoder->sig_->return_count());
486     NodeVector values(ret_count);
487     SsaEnv* internal_env = ssa_env_;
488     if (emit_loop_exits()) {
489       SsaEnv* exit_env = Split(decoder->zone(), ssa_env_);
490       SetEnv(exit_env);
491       auto stack_values = CopyStackValues(decoder, ret_count, drop_values);
492       BuildNestedLoopExits(decoder, decoder->control_depth() - 1, false,
493                            stack_values);
494       GetNodes(values.begin(), base::VectorOf(stack_values));
495     } else {
496       Value* stack_base = ret_count == 0
497                               ? nullptr
498                               : decoder->stack_value(ret_count + drop_values);
499       GetNodes(values.begin(), stack_base, ret_count);
500     }
501     if (FLAG_trace_wasm && inlined_status_ == kRegularFunction) {
502       builder_->TraceFunctionExit(base::VectorOf(values), decoder->position());
503     }
504     builder_->Return(base::VectorOf(values));
505     SetEnv(internal_env);
506   }
507 
BrOrRet(FullDecoder * decoder,uint32_t depth,uint32_t drop_values)508   void BrOrRet(FullDecoder* decoder, uint32_t depth, uint32_t drop_values) {
509     if (depth == decoder->control_depth() - 1) {
510       DoReturn(decoder, drop_values);
511     } else {
512       Control* target = decoder->control_at(depth);
513       if (emit_loop_exits()) {
514         SsaEnv* internal_env = ssa_env_;
515         SsaEnv* exit_env = Split(decoder->zone(), ssa_env_);
516         SetEnv(exit_env);
517         uint32_t value_count = target->br_merge()->arity;
518         auto stack_values = CopyStackValues(decoder, value_count, drop_values);
519         BuildNestedLoopExits(decoder, depth, true, stack_values);
520         MergeValuesInto(decoder, target, target->br_merge(),
521                         stack_values.data());
522         SetEnv(internal_env);
523       } else {
524         MergeValuesInto(decoder, target, target->br_merge(), drop_values);
525       }
526     }
527   }
528 
BrIf(FullDecoder * decoder,const Value & cond,uint32_t depth)529   void BrIf(FullDecoder* decoder, const Value& cond, uint32_t depth) {
530     SsaEnv* fenv = ssa_env_;
531     SsaEnv* tenv = Split(decoder->zone(), fenv);
532     fenv->SetNotMerged();
533     WasmBranchHint hint = WasmBranchHint::kNoHint;
534     if (branch_hints_) {
535       hint = branch_hints_->GetHintFor(decoder->pc_relative_offset());
536     }
537     switch (hint) {
538       case WasmBranchHint::kNoHint:
539         builder_->BranchNoHint(cond.node, &tenv->control, &fenv->control);
540         break;
541       case WasmBranchHint::kUnlikely:
542         builder_->BranchExpectFalse(cond.node, &tenv->control, &fenv->control);
543         break;
544       case WasmBranchHint::kLikely:
545         builder_->BranchExpectTrue(cond.node, &tenv->control, &fenv->control);
546         break;
547     }
548     builder_->SetControl(fenv->control);
549     SetEnv(tenv);
550     BrOrRet(decoder, depth, 1);
551     SetEnv(fenv);
552   }
553 
BrTable(FullDecoder * decoder,const BranchTableImmediate<validate> & imm,const Value & key)554   void BrTable(FullDecoder* decoder, const BranchTableImmediate<validate>& imm,
555                const Value& key) {
556     if (imm.table_count == 0) {
557       // Only a default target. Do the equivalent of br.
558       uint32_t target = BranchTableIterator<validate>(decoder, imm).next();
559       BrOrRet(decoder, target, 1);
560       return;
561     }
562 
563     SsaEnv* branch_env = ssa_env_;
564     // Build branches to the various blocks based on the table.
565     TFNode* sw = builder_->Switch(imm.table_count + 1, key.node);
566 
567     SsaEnv* copy = Steal(decoder->zone(), branch_env);
568     SetEnv(copy);
569     BranchTableIterator<validate> iterator(decoder, imm);
570     while (iterator.has_next()) {
571       uint32_t i = iterator.cur_index();
572       uint32_t target = iterator.next();
573       SetEnv(Split(decoder->zone(), copy));
574       builder_->SetControl(i == imm.table_count ? builder_->IfDefault(sw)
575                                                 : builder_->IfValue(i, sw));
576       BrOrRet(decoder, target, 1);
577     }
578     DCHECK(decoder->ok());
579     SetEnv(branch_env);
580   }
581 
Else(FullDecoder * decoder,Control * if_block)582   void Else(FullDecoder* decoder, Control* if_block) {
583     if (if_block->reachable()) {
584       // Merge the if branch into the end merge.
585       MergeValuesInto(decoder, if_block, &if_block->end_merge);
586     }
587     SetEnv(if_block->false_env);
588   }
589 
LoadMem(FullDecoder * decoder,LoadType type,const MemoryAccessImmediate<validate> & imm,const Value & index,Value * result)590   void LoadMem(FullDecoder* decoder, LoadType type,
591                const MemoryAccessImmediate<validate>& imm, const Value& index,
592                Value* result) {
593     result->node =
594         builder_->LoadMem(type.value_type(), type.mem_type(), index.node,
595                           imm.offset, imm.alignment, decoder->position());
596   }
597 
LoadTransform(FullDecoder * decoder,LoadType type,LoadTransformationKind transform,const MemoryAccessImmediate<validate> & imm,const Value & index,Value * result)598   void LoadTransform(FullDecoder* decoder, LoadType type,
599                      LoadTransformationKind transform,
600                      const MemoryAccessImmediate<validate>& imm,
601                      const Value& index, Value* result) {
602     result->node = builder_->LoadTransform(type.value_type(), type.mem_type(),
603                                            transform, index.node, imm.offset,
604                                            imm.alignment, decoder->position());
605   }
606 
LoadLane(FullDecoder * decoder,LoadType type,const Value & value,const Value & index,const MemoryAccessImmediate<validate> & imm,const uint8_t laneidx,Value * result)607   void LoadLane(FullDecoder* decoder, LoadType type, const Value& value,
608                 const Value& index, const MemoryAccessImmediate<validate>& imm,
609                 const uint8_t laneidx, Value* result) {
610     result->node = builder_->LoadLane(
611         type.value_type(), type.mem_type(), value.node, index.node, imm.offset,
612         imm.alignment, laneidx, decoder->position());
613   }
614 
StoreMem(FullDecoder * decoder,StoreType type,const MemoryAccessImmediate<validate> & imm,const Value & index,const Value & value)615   void StoreMem(FullDecoder* decoder, StoreType type,
616                 const MemoryAccessImmediate<validate>& imm, const Value& index,
617                 const Value& value) {
618     builder_->StoreMem(type.mem_rep(), index.node, imm.offset, imm.alignment,
619                        value.node, decoder->position(), type.value_type());
620   }
621 
StoreLane(FullDecoder * decoder,StoreType type,const MemoryAccessImmediate<validate> & imm,const Value & index,const Value & value,const uint8_t laneidx)622   void StoreLane(FullDecoder* decoder, StoreType type,
623                  const MemoryAccessImmediate<validate>& imm, const Value& index,
624                  const Value& value, const uint8_t laneidx) {
625     builder_->StoreLane(type.mem_rep(), index.node, imm.offset, imm.alignment,
626                         value.node, laneidx, decoder->position(),
627                         type.value_type());
628   }
629 
CurrentMemoryPages(FullDecoder * decoder,Value * result)630   void CurrentMemoryPages(FullDecoder* decoder, Value* result) {
631     result->node = builder_->CurrentMemoryPages();
632   }
633 
MemoryGrow(FullDecoder * decoder,const Value & value,Value * result)634   void MemoryGrow(FullDecoder* decoder, const Value& value, Value* result) {
635     result->node = builder_->MemoryGrow(value.node);
636     // Always reload the instance cache after growing memory.
637     LoadContextIntoSsa(ssa_env_);
638   }
639 
CallDirect(FullDecoder * decoder,const CallFunctionImmediate<validate> & imm,const Value args[],Value returns[])640   void CallDirect(FullDecoder* decoder,
641                   const CallFunctionImmediate<validate>& imm,
642                   const Value args[], Value returns[]) {
643     if (FLAG_wasm_speculative_inlining && type_feedback_.size() > 0) {
644       DCHECK_LT(feedback_instruction_index_, type_feedback_.size());
645       feedback_instruction_index_++;
646     }
647     DoCall(decoder, CallInfo::CallDirect(imm.index), imm.sig, args, returns);
648   }
649 
ReturnCall(FullDecoder * decoder,const CallFunctionImmediate<validate> & imm,const Value args[])650   void ReturnCall(FullDecoder* decoder,
651                   const CallFunctionImmediate<validate>& imm,
652                   const Value args[]) {
653     if (FLAG_wasm_speculative_inlining && type_feedback_.size() > 0) {
654       DCHECK_LT(feedback_instruction_index_, type_feedback_.size());
655       feedback_instruction_index_++;
656     }
657     DoReturnCall(decoder, CallInfo::CallDirect(imm.index), imm.sig, args);
658   }
659 
CallIndirect(FullDecoder * decoder,const Value & index,const CallIndirectImmediate<validate> & imm,const Value args[],Value returns[])660   void CallIndirect(FullDecoder* decoder, const Value& index,
661                     const CallIndirectImmediate<validate>& imm,
662                     const Value args[], Value returns[]) {
663     DoCall(
664         decoder,
665         CallInfo::CallIndirect(index, imm.table_imm.index, imm.sig_imm.index),
666         imm.sig, args, returns);
667   }
668 
ReturnCallIndirect(FullDecoder * decoder,const Value & index,const CallIndirectImmediate<validate> & imm,const Value args[])669   void ReturnCallIndirect(FullDecoder* decoder, const Value& index,
670                           const CallIndirectImmediate<validate>& imm,
671                           const Value args[]) {
672     DoReturnCall(
673         decoder,
674         CallInfo::CallIndirect(index, imm.table_imm.index, imm.sig_imm.index),
675         imm.sig, args);
676   }
677 
CallRef(FullDecoder * decoder,const Value & func_ref,const FunctionSig * sig,uint32_t sig_index,const Value args[],Value returns[])678   void CallRef(FullDecoder* decoder, const Value& func_ref,
679                const FunctionSig* sig, uint32_t sig_index, const Value args[],
680                Value returns[]) {
681     int maybe_feedback = -1;
682     if (FLAG_wasm_speculative_inlining && type_feedback_.size() > 0) {
683       DCHECK_LT(feedback_instruction_index_, type_feedback_.size());
684       maybe_feedback =
685           type_feedback_[feedback_instruction_index_].function_index;
686       feedback_instruction_index_++;
687     }
688     if (maybe_feedback == -1) {
689       DoCall(decoder, CallInfo::CallRef(func_ref, NullCheckFor(func_ref.type)),
690              sig, args, returns);
691       return;
692     }
693 
694     // Check for equality against a function at a specific index, and if
695     // successful, just emit a direct call.
696     DCHECK_GE(maybe_feedback, 0);
697     const uint32_t expected_function_index = maybe_feedback;
698 
699     if (FLAG_trace_wasm_speculative_inlining) {
700       PrintF("[Function #%d call #%d: graph support for inlining target #%d]\n",
701              func_index_, feedback_instruction_index_ - 1,
702              expected_function_index);
703     }
704 
705     TFNode* success_control;
706     TFNode* failure_control;
707     builder_->CompareToInternalFunctionAtIndex(
708         func_ref.node, expected_function_index, &success_control,
709         &failure_control);
710     TFNode* initial_effect = effect();
711 
712     builder_->SetControl(success_control);
713     ssa_env_->control = success_control;
714     Value* returns_direct =
715         decoder->zone()->NewArray<Value>(sig->return_count());
716     DoCall(decoder, CallInfo::CallDirect(expected_function_index),
717            decoder->module_->signature(sig_index), args, returns_direct);
718     TFNode* control_direct = control();
719     TFNode* effect_direct = effect();
720 
721     builder_->SetEffectControl(initial_effect, failure_control);
722     ssa_env_->effect = initial_effect;
723     ssa_env_->control = failure_control;
724     Value* returns_ref = decoder->zone()->NewArray<Value>(sig->return_count());
725     DoCall(decoder, CallInfo::CallRef(func_ref, NullCheckFor(func_ref.type)),
726            sig, args, returns_ref);
727 
728     TFNode* control_ref = control();
729     TFNode* effect_ref = effect();
730 
731     TFNode* control_args[] = {control_direct, control_ref};
732     TFNode* control = builder_->Merge(2, control_args);
733 
734     TFNode* effect_args[] = {effect_direct, effect_ref, control};
735     TFNode* effect = builder_->EffectPhi(2, effect_args);
736 
737     ssa_env_->control = control;
738     ssa_env_->effect = effect;
739     builder_->SetEffectControl(effect, control);
740 
741     for (uint32_t i = 0; i < sig->return_count(); i++) {
742       TFNode* phi_args[] = {returns_direct[i].node, returns_ref[i].node,
743                             control};
744       returns[i].node = builder_->Phi(sig->GetReturn(i), 2, phi_args);
745     }
746   }
747 
ReturnCallRef(FullDecoder * decoder,const Value & func_ref,const FunctionSig * sig,uint32_t sig_index,const Value args[])748   void ReturnCallRef(FullDecoder* decoder, const Value& func_ref,
749                      const FunctionSig* sig, uint32_t sig_index,
750                      const Value args[]) {
751     int maybe_feedback = -1;
752     if (FLAG_wasm_speculative_inlining && type_feedback_.size() > 0) {
753       DCHECK_LT(feedback_instruction_index_, type_feedback_.size());
754       maybe_feedback =
755           type_feedback_[feedback_instruction_index_].function_index;
756       feedback_instruction_index_++;
757     }
758     if (maybe_feedback == -1) {
759       DoReturnCall(decoder,
760                    CallInfo::CallRef(func_ref, NullCheckFor(func_ref.type)),
761                    sig, args);
762       return;
763     }
764 
765     // Check for equality against a function at a specific index, and if
766     // successful, just emit a direct call.
767     DCHECK_GE(maybe_feedback, 0);
768     const uint32_t expected_function_index = maybe_feedback;
769 
770     if (FLAG_trace_wasm_speculative_inlining) {
771       PrintF("[Function #%d call #%d: graph support for inlining target #%d]\n",
772              func_index_, feedback_instruction_index_ - 1,
773              expected_function_index);
774     }
775 
776     TFNode* success_control;
777     TFNode* failure_control;
778     builder_->CompareToInternalFunctionAtIndex(
779         func_ref.node, expected_function_index, &success_control,
780         &failure_control);
781     TFNode* initial_effect = effect();
782 
783     builder_->SetControl(success_control);
784     ssa_env_->control = success_control;
785     DoReturnCall(decoder, CallInfo::CallDirect(expected_function_index), sig,
786                  args);
787 
788     builder_->SetEffectControl(initial_effect, failure_control);
789     ssa_env_->effect = initial_effect;
790     ssa_env_->control = failure_control;
791     DoReturnCall(decoder,
792                  CallInfo::CallRef(func_ref, NullCheckFor(func_ref.type)), sig,
793                  args);
794   }
795 
BrOnNull(FullDecoder * decoder,const Value & ref_object,uint32_t depth,bool pass_null_along_branch,Value * result_on_fallthrough)796   void BrOnNull(FullDecoder* decoder, const Value& ref_object, uint32_t depth,
797                 bool pass_null_along_branch, Value* result_on_fallthrough) {
798     SsaEnv* false_env = ssa_env_;
799     SsaEnv* true_env = Split(decoder->zone(), false_env);
800     false_env->SetNotMerged();
801     builder_->BrOnNull(ref_object.node, &true_env->control,
802                        &false_env->control);
803     builder_->SetControl(false_env->control);
804     SetEnv(true_env);
805     BrOrRet(decoder, depth, pass_null_along_branch ? 0 : 1);
806     SetEnv(false_env);
807     result_on_fallthrough->node = ref_object.node;
808   }
809 
BrOnNonNull(FullDecoder * decoder,const Value & ref_object,uint32_t depth)810   void BrOnNonNull(FullDecoder* decoder, const Value& ref_object,
811                    uint32_t depth) {
812     SsaEnv* false_env = ssa_env_;
813     SsaEnv* true_env = Split(decoder->zone(), false_env);
814     false_env->SetNotMerged();
815     builder_->BrOnNull(ref_object.node, &false_env->control,
816                        &true_env->control);
817     builder_->SetControl(false_env->control);
818     SetEnv(true_env);
819     BrOrRet(decoder, depth, 0);
820     SetEnv(false_env);
821   }
822 
SimdOp(FullDecoder * decoder,WasmOpcode opcode,base::Vector<Value> args,Value * result)823   void SimdOp(FullDecoder* decoder, WasmOpcode opcode, base::Vector<Value> args,
824               Value* result) {
825     NodeVector inputs(args.size());
826     GetNodes(inputs.begin(), args);
827     TFNode* node = builder_->SimdOp(opcode, inputs.begin());
828     if (result) result->node = node;
829   }
830 
SimdLaneOp(FullDecoder * decoder,WasmOpcode opcode,const SimdLaneImmediate<validate> & imm,base::Vector<Value> inputs,Value * result)831   void SimdLaneOp(FullDecoder* decoder, WasmOpcode opcode,
832                   const SimdLaneImmediate<validate>& imm,
833                   base::Vector<Value> inputs, Value* result) {
834     NodeVector nodes(inputs.size());
835     GetNodes(nodes.begin(), inputs);
836     result->node = builder_->SimdLaneOp(opcode, imm.lane, nodes.begin());
837   }
838 
Simd8x16ShuffleOp(FullDecoder * decoder,const Simd128Immediate<validate> & imm,const Value & input0,const Value & input1,Value * result)839   void Simd8x16ShuffleOp(FullDecoder* decoder,
840                          const Simd128Immediate<validate>& imm,
841                          const Value& input0, const Value& input1,
842                          Value* result) {
843     TFNode* input_nodes[] = {input0.node, input1.node};
844     result->node = builder_->Simd8x16ShuffleOp(imm.value, input_nodes);
845   }
846 
Throw(FullDecoder * decoder,const TagIndexImmediate<validate> & imm,const base::Vector<Value> & value_args)847   void Throw(FullDecoder* decoder, const TagIndexImmediate<validate>& imm,
848              const base::Vector<Value>& value_args) {
849     int count = value_args.length();
850     ZoneVector<TFNode*> args(count, decoder->zone());
851     for (int i = 0; i < count; ++i) {
852       args[i] = value_args[i].node;
853     }
854     CheckForException(decoder,
855                       builder_->Throw(imm.index, imm.tag, base::VectorOf(args),
856                                       decoder->position()));
857     builder_->TerminateThrow(effect(), control());
858   }
859 
Rethrow(FullDecoder * decoder,Control * block)860   void Rethrow(FullDecoder* decoder, Control* block) {
861     DCHECK(block->is_try_catchall() || block->is_try_catch());
862     TFNode* exception = block->try_info->exception;
863     DCHECK_NOT_NULL(exception);
864     CheckForException(decoder, builder_->Rethrow(exception));
865     builder_->TerminateThrow(effect(), control());
866   }
867 
CatchException(FullDecoder * decoder,const TagIndexImmediate<validate> & imm,Control * block,base::Vector<Value> values)868   void CatchException(FullDecoder* decoder,
869                       const TagIndexImmediate<validate>& imm, Control* block,
870                       base::Vector<Value> values) {
871     DCHECK(block->is_try_catch());
872     // The catch block is unreachable if no possible throws in the try block
873     // exist. We only build a landing pad if some node in the try block can
874     // (possibly) throw. Otherwise the catch environments remain empty.
875     if (!block->try_info->might_throw()) {
876       block->reachability = kSpecOnlyReachable;
877       return;
878     }
879 
880     TFNode* exception = block->try_info->exception;
881     SetEnv(block->try_info->catch_env);
882 
883     TFNode* if_catch = nullptr;
884     TFNode* if_no_catch = nullptr;
885 
886     // Get the exception tag and see if it matches the expected one.
887     TFNode* caught_tag = builder_->GetExceptionTag(exception);
888     TFNode* exception_tag = builder_->LoadTagFromTable(imm.index);
889     TFNode* compare = builder_->ExceptionTagEqual(caught_tag, exception_tag);
890     builder_->BranchNoHint(compare, &if_catch, &if_no_catch);
891 
892     // If the tags don't match we continue with the next tag by setting the
893     // false environment as the new {TryInfo::catch_env} here.
894     SsaEnv* if_no_catch_env = Split(decoder->zone(), ssa_env_);
895     if_no_catch_env->control = if_no_catch;
896     SsaEnv* if_catch_env = Steal(decoder->zone(), ssa_env_);
897     if_catch_env->control = if_catch;
898     block->try_info->catch_env = if_no_catch_env;
899 
900     // If the tags match we extract the values from the exception object and
901     // push them onto the operand stack using the passed {values} vector.
902     SetEnv(if_catch_env);
903     NodeVector caught_values(values.size());
904     base::Vector<TFNode*> caught_vector = base::VectorOf(caught_values);
905     builder_->GetExceptionValues(exception, imm.tag, caught_vector);
906     for (size_t i = 0, e = values.size(); i < e; ++i) {
907       values[i].node = caught_values[i];
908     }
909   }
910 
Delegate(FullDecoder * decoder,uint32_t depth,Control * block)911   void Delegate(FullDecoder* decoder, uint32_t depth, Control* block) {
912     DCHECK_EQ(decoder->control_at(0), block);
913     DCHECK(block->is_incomplete_try());
914 
915     if (block->try_info->might_throw()) {
916       // Merge the current env into the target handler's env.
917       SetEnv(block->try_info->catch_env);
918       if (depth == decoder->control_depth() - 1) {
919         // We just throw to the caller here, so no need to generate IfSuccess
920         // and IfFailure nodes.
921         builder_->Rethrow(block->try_info->exception);
922         builder_->TerminateThrow(effect(), control());
923         return;
924       }
925       DCHECK(decoder->control_at(depth)->is_try());
926       TryInfo* target_try = decoder->control_at(depth)->try_info;
927       if (emit_loop_exits()) {
928         ValueVector stack_values;
929         BuildNestedLoopExits(decoder, depth, true, stack_values,
930                              &block->try_info->exception);
931       }
932       Goto(decoder, target_try->catch_env);
933 
934       // Create or merge the exception.
935       if (target_try->catch_env->state == SsaEnv::kReached) {
936         target_try->exception = block->try_info->exception;
937       } else {
938         DCHECK_EQ(target_try->catch_env->state, SsaEnv::kMerged);
939         target_try->exception = builder_->CreateOrMergeIntoPhi(
940             MachineRepresentation::kTagged, target_try->catch_env->control,
941             target_try->exception, block->try_info->exception);
942       }
943     }
944   }
945 
CatchAll(FullDecoder * decoder,Control * block)946   void CatchAll(FullDecoder* decoder, Control* block) {
947     DCHECK(block->is_try_catchall() || block->is_try_catch());
948     DCHECK_EQ(decoder->control_at(0), block);
949 
950     // The catch block is unreachable if no possible throws in the try block
951     // exist. We only build a landing pad if some node in the try block can
952     // (possibly) throw. Otherwise the catch environments remain empty.
953     if (!block->try_info->might_throw()) {
954       decoder->SetSucceedingCodeDynamicallyUnreachable();
955       return;
956     }
957 
958     SetEnv(block->try_info->catch_env);
959   }
960 
AtomicOp(FullDecoder * decoder,WasmOpcode opcode,base::Vector<Value> args,const MemoryAccessImmediate<validate> & imm,Value * result)961   void AtomicOp(FullDecoder* decoder, WasmOpcode opcode,
962                 base::Vector<Value> args,
963                 const MemoryAccessImmediate<validate>& imm, Value* result) {
964     NodeVector inputs(args.size());
965     GetNodes(inputs.begin(), args);
966     TFNode* node = builder_->AtomicOp(opcode, inputs.begin(), imm.alignment,
967                                       imm.offset, decoder->position());
968     if (result) result->node = node;
969   }
970 
AtomicFence(FullDecoder * decoder)971   void AtomicFence(FullDecoder* decoder) { builder_->AtomicFence(); }
972 
MemoryInit(FullDecoder * decoder,const MemoryInitImmediate<validate> & imm,const Value & dst,const Value & src,const Value & size)973   void MemoryInit(FullDecoder* decoder,
974                   const MemoryInitImmediate<validate>& imm, const Value& dst,
975                   const Value& src, const Value& size) {
976     builder_->MemoryInit(imm.data_segment.index, dst.node, src.node, size.node,
977                          decoder->position());
978   }
979 
DataDrop(FullDecoder * decoder,const IndexImmediate<validate> & imm)980   void DataDrop(FullDecoder* decoder, const IndexImmediate<validate>& imm) {
981     builder_->DataDrop(imm.index, decoder->position());
982   }
983 
MemoryCopy(FullDecoder * decoder,const MemoryCopyImmediate<validate> & imm,const Value & dst,const Value & src,const Value & size)984   void MemoryCopy(FullDecoder* decoder,
985                   const MemoryCopyImmediate<validate>& imm, const Value& dst,
986                   const Value& src, const Value& size) {
987     builder_->MemoryCopy(dst.node, src.node, size.node, decoder->position());
988   }
989 
MemoryFill(FullDecoder * decoder,const MemoryIndexImmediate<validate> & imm,const Value & dst,const Value & value,const Value & size)990   void MemoryFill(FullDecoder* decoder,
991                   const MemoryIndexImmediate<validate>& imm, const Value& dst,
992                   const Value& value, const Value& size) {
993     builder_->MemoryFill(dst.node, value.node, size.node, decoder->position());
994   }
995 
TableInit(FullDecoder * decoder,const TableInitImmediate<validate> & imm,base::Vector<Value> args)996   void TableInit(FullDecoder* decoder, const TableInitImmediate<validate>& imm,
997                  base::Vector<Value> args) {
998     builder_->TableInit(imm.table.index, imm.element_segment.index,
999                         args[0].node, args[1].node, args[2].node,
1000                         decoder->position());
1001   }
1002 
ElemDrop(FullDecoder * decoder,const IndexImmediate<validate> & imm)1003   void ElemDrop(FullDecoder* decoder, const IndexImmediate<validate>& imm) {
1004     builder_->ElemDrop(imm.index, decoder->position());
1005   }
1006 
TableCopy(FullDecoder * decoder,const TableCopyImmediate<validate> & imm,base::Vector<Value> args)1007   void TableCopy(FullDecoder* decoder, const TableCopyImmediate<validate>& imm,
1008                  base::Vector<Value> args) {
1009     builder_->TableCopy(imm.table_dst.index, imm.table_src.index, args[0].node,
1010                         args[1].node, args[2].node, decoder->position());
1011   }
1012 
TableGrow(FullDecoder * decoder,const IndexImmediate<validate> & imm,const Value & value,const Value & delta,Value * result)1013   void TableGrow(FullDecoder* decoder, const IndexImmediate<validate>& imm,
1014                  const Value& value, const Value& delta, Value* result) {
1015     result->node = builder_->TableGrow(imm.index, value.node, delta.node);
1016   }
1017 
TableSize(FullDecoder * decoder,const IndexImmediate<validate> & imm,Value * result)1018   void TableSize(FullDecoder* decoder, const IndexImmediate<validate>& imm,
1019                  Value* result) {
1020     result->node = builder_->TableSize(imm.index);
1021   }
1022 
TableFill(FullDecoder * decoder,const IndexImmediate<validate> & imm,const Value & start,const Value & value,const Value & count)1023   void TableFill(FullDecoder* decoder, const IndexImmediate<validate>& imm,
1024                  const Value& start, const Value& value, const Value& count) {
1025     builder_->TableFill(imm.index, start.node, value.node, count.node);
1026   }
1027 
StructNewWithRtt(FullDecoder * decoder,const StructIndexImmediate<validate> & imm,const Value & rtt,const Value args[],Value * result)1028   void StructNewWithRtt(FullDecoder* decoder,
1029                         const StructIndexImmediate<validate>& imm,
1030                         const Value& rtt, const Value args[], Value* result) {
1031     uint32_t field_count = imm.struct_type->field_count();
1032     NodeVector arg_nodes(field_count);
1033     for (uint32_t i = 0; i < field_count; i++) {
1034       arg_nodes[i] = args[i].node;
1035     }
1036     result->node = builder_->StructNewWithRtt(
1037         imm.index, imm.struct_type, rtt.node, base::VectorOf(arg_nodes));
1038   }
StructNewDefault(FullDecoder * decoder,const StructIndexImmediate<validate> & imm,const Value & rtt,Value * result)1039   void StructNewDefault(FullDecoder* decoder,
1040                         const StructIndexImmediate<validate>& imm,
1041                         const Value& rtt, Value* result) {
1042     uint32_t field_count = imm.struct_type->field_count();
1043     NodeVector arg_nodes(field_count);
1044     for (uint32_t i = 0; i < field_count; i++) {
1045       arg_nodes[i] = DefaultValue(imm.struct_type->field(i));
1046     }
1047     result->node = builder_->StructNewWithRtt(
1048         imm.index, imm.struct_type, rtt.node, base::VectorOf(arg_nodes));
1049   }
1050 
StructGet(FullDecoder * decoder,const Value & struct_object,const FieldImmediate<validate> & field,bool is_signed,Value * result)1051   void StructGet(FullDecoder* decoder, const Value& struct_object,
1052                  const FieldImmediate<validate>& field, bool is_signed,
1053                  Value* result) {
1054     result->node = builder_->StructGet(
1055         struct_object.node, field.struct_imm.struct_type, field.field_imm.index,
1056         NullCheckFor(struct_object.type), is_signed, decoder->position());
1057   }
1058 
StructSet(FullDecoder * decoder,const Value & struct_object,const FieldImmediate<validate> & field,const Value & field_value)1059   void StructSet(FullDecoder* decoder, const Value& struct_object,
1060                  const FieldImmediate<validate>& field,
1061                  const Value& field_value) {
1062     builder_->StructSet(struct_object.node, field.struct_imm.struct_type,
1063                         field.field_imm.index, field_value.node,
1064                         NullCheckFor(struct_object.type), decoder->position());
1065   }
1066 
ArrayNewWithRtt(FullDecoder * decoder,const ArrayIndexImmediate<validate> & imm,const Value & length,const Value & initial_value,const Value & rtt,Value * result)1067   void ArrayNewWithRtt(FullDecoder* decoder,
1068                        const ArrayIndexImmediate<validate>& imm,
1069                        const Value& length, const Value& initial_value,
1070                        const Value& rtt, Value* result) {
1071     result->node = builder_->ArrayNewWithRtt(imm.index, imm.array_type,
1072                                              length.node, initial_value.node,
1073                                              rtt.node, decoder->position());
1074     // array.new_with_rtt introduces a loop. Therefore, we have to mark the
1075     // immediately nesting loop (if any) as non-innermost.
1076     if (!loop_infos_.empty()) loop_infos_.back().can_be_innermost = false;
1077   }
1078 
ArrayNewDefault(FullDecoder * decoder,const ArrayIndexImmediate<validate> & imm,const Value & length,const Value & rtt,Value * result)1079   void ArrayNewDefault(FullDecoder* decoder,
1080                        const ArrayIndexImmediate<validate>& imm,
1081                        const Value& length, const Value& rtt, Value* result) {
1082     // This will cause the default value to be chosen automatically based
1083     // on the element type.
1084     TFNode* initial_value = nullptr;
1085     result->node =
1086         builder_->ArrayNewWithRtt(imm.index, imm.array_type, length.node,
1087                                   initial_value, rtt.node, decoder->position());
1088   }
1089 
ArrayGet(FullDecoder * decoder,const Value & array_obj,const ArrayIndexImmediate<validate> & imm,const Value & index,bool is_signed,Value * result)1090   void ArrayGet(FullDecoder* decoder, const Value& array_obj,
1091                 const ArrayIndexImmediate<validate>& imm, const Value& index,
1092                 bool is_signed, Value* result) {
1093     result->node = builder_->ArrayGet(array_obj.node, imm.array_type,
1094                                       index.node, NullCheckFor(array_obj.type),
1095                                       is_signed, decoder->position());
1096   }
1097 
ArraySet(FullDecoder * decoder,const Value & array_obj,const ArrayIndexImmediate<validate> & imm,const Value & index,const Value & value)1098   void ArraySet(FullDecoder* decoder, const Value& array_obj,
1099                 const ArrayIndexImmediate<validate>& imm, const Value& index,
1100                 const Value& value) {
1101     builder_->ArraySet(array_obj.node, imm.array_type, index.node, value.node,
1102                        NullCheckFor(array_obj.type), decoder->position());
1103   }
1104 
ArrayLen(FullDecoder * decoder,const Value & array_obj,Value * result)1105   void ArrayLen(FullDecoder* decoder, const Value& array_obj, Value* result) {
1106     result->node = builder_->ArrayLen(
1107         array_obj.node, NullCheckFor(array_obj.type), decoder->position());
1108   }
1109 
ArrayCopy(FullDecoder * decoder,const Value & dst,const Value & dst_index,const Value & src,const Value & src_index,const Value & length)1110   void ArrayCopy(FullDecoder* decoder, const Value& dst, const Value& dst_index,
1111                  const Value& src, const Value& src_index,
1112                  const Value& length) {
1113     builder_->ArrayCopy(dst.node, dst_index.node, NullCheckFor(dst.type),
1114                         src.node, src_index.node, NullCheckFor(src.type),
1115                         length.node, decoder->position());
1116   }
1117 
ArrayInit(FullDecoder * decoder,const ArrayIndexImmediate<validate> & imm,const base::Vector<Value> & elements,const Value & rtt,Value * result)1118   void ArrayInit(FullDecoder* decoder, const ArrayIndexImmediate<validate>& imm,
1119                  const base::Vector<Value>& elements, const Value& rtt,
1120                  Value* result) {
1121     NodeVector element_nodes(elements.size());
1122     for (uint32_t i = 0; i < elements.size(); i++) {
1123       element_nodes[i] = elements[i].node;
1124     }
1125     result->node =
1126         builder_->ArrayInit(imm.array_type, rtt.node, VectorOf(element_nodes));
1127   }
1128 
ArrayInitFromData(FullDecoder * decoder,const ArrayIndexImmediate<validate> & array_imm,const IndexImmediate<validate> & data_segment,const Value & offset,const Value & length,const Value & rtt,Value * result)1129   void ArrayInitFromData(FullDecoder* decoder,
1130                          const ArrayIndexImmediate<validate>& array_imm,
1131                          const IndexImmediate<validate>& data_segment,
1132                          const Value& offset, const Value& length,
1133                          const Value& rtt, Value* result) {
1134     result->node = builder_->ArrayInitFromData(
1135         array_imm.array_type, data_segment.index, offset.node, length.node,
1136         rtt.node, decoder->position());
1137   }
1138 
I31New(FullDecoder * decoder,const Value & input,Value * result)1139   void I31New(FullDecoder* decoder, const Value& input, Value* result) {
1140     result->node = builder_->I31New(input.node);
1141   }
1142 
I31GetS(FullDecoder * decoder,const Value & input,Value * result)1143   void I31GetS(FullDecoder* decoder, const Value& input, Value* result) {
1144     result->node = builder_->I31GetS(input.node);
1145   }
1146 
I31GetU(FullDecoder * decoder,const Value & input,Value * result)1147   void I31GetU(FullDecoder* decoder, const Value& input, Value* result) {
1148     result->node = builder_->I31GetU(input.node);
1149   }
1150 
RttCanon(FullDecoder * decoder,uint32_t type_index,Value * result)1151   void RttCanon(FullDecoder* decoder, uint32_t type_index, Value* result) {
1152     result->node = builder_->RttCanon(type_index);
1153   }
1154 
1155   using StaticKnowledge = compiler::WasmGraphBuilder::ObjectReferenceKnowledge;
1156 
ComputeStaticKnowledge(ValueType object_type,ValueType rtt_type,const WasmModule * module)1157   StaticKnowledge ComputeStaticKnowledge(ValueType object_type,
1158                                          ValueType rtt_type,
1159                                          const WasmModule* module) {
1160     StaticKnowledge result;
1161     result.object_can_be_null = object_type.is_nullable();
1162     DCHECK(object_type.is_object_reference());  // Checked by validation.
1163     // In the bottom case, the result is irrelevant.
1164     result.rtt_depth = rtt_type.is_bottom()
1165                            ? 0 /* unused */
1166                            : static_cast<uint8_t>(GetSubtypingDepth(
1167                                  module, rtt_type.ref_index()));
1168     return result;
1169   }
1170 
RefTest(FullDecoder * decoder,const Value & object,const Value & rtt,Value * result)1171   void RefTest(FullDecoder* decoder, const Value& object, const Value& rtt,
1172                Value* result) {
1173     StaticKnowledge config =
1174         ComputeStaticKnowledge(object.type, rtt.type, decoder->module_);
1175     result->node = builder_->RefTest(object.node, rtt.node, config);
1176   }
1177 
RefCast(FullDecoder * decoder,const Value & object,const Value & rtt,Value * result)1178   void RefCast(FullDecoder* decoder, const Value& object, const Value& rtt,
1179                Value* result) {
1180     StaticKnowledge config =
1181         ComputeStaticKnowledge(object.type, rtt.type, decoder->module_);
1182     result->node =
1183         builder_->RefCast(object.node, rtt.node, config, decoder->position());
1184   }
1185 
1186   template <void (compiler::WasmGraphBuilder::*branch_function)(
1187       TFNode*, TFNode*, StaticKnowledge, TFNode**, TFNode**, TFNode**,
1188       TFNode**)>
BrOnCastAbs(FullDecoder * decoder,const Value & object,const Value & rtt,Value * forwarding_value,uint32_t br_depth,bool branch_on_match)1189   void BrOnCastAbs(FullDecoder* decoder, const Value& object, const Value& rtt,
1190                    Value* forwarding_value, uint32_t br_depth,
1191                    bool branch_on_match) {
1192     StaticKnowledge config =
1193         ComputeStaticKnowledge(object.type, rtt.type, decoder->module_);
1194     SsaEnv* branch_env = Split(decoder->zone(), ssa_env_);
1195     SsaEnv* no_branch_env = Steal(decoder->zone(), ssa_env_);
1196     no_branch_env->SetNotMerged();
1197     SsaEnv* match_env = branch_on_match ? branch_env : no_branch_env;
1198     SsaEnv* no_match_env = branch_on_match ? no_branch_env : branch_env;
1199     (builder_->*branch_function)(object.node, rtt.node, config,
1200                                  &match_env->control, &match_env->effect,
1201                                  &no_match_env->control, &no_match_env->effect);
1202     builder_->SetControl(no_branch_env->control);
1203     SetEnv(branch_env);
1204     forwarding_value->node = object.node;
1205     // Currently, br_on_* instructions modify the value stack before calling
1206     // the interface function, so we don't need to drop any values here.
1207     BrOrRet(decoder, br_depth, 0);
1208     SetEnv(no_branch_env);
1209   }
1210 
BrOnCast(FullDecoder * decoder,const Value & object,const Value & rtt,Value * value_on_branch,uint32_t br_depth)1211   void BrOnCast(FullDecoder* decoder, const Value& object, const Value& rtt,
1212                 Value* value_on_branch, uint32_t br_depth) {
1213     BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnCast>(
1214         decoder, object, rtt, value_on_branch, br_depth, true);
1215   }
1216 
BrOnCastFail(FullDecoder * decoder,const Value & object,const Value & rtt,Value * value_on_fallthrough,uint32_t br_depth)1217   void BrOnCastFail(FullDecoder* decoder, const Value& object, const Value& rtt,
1218                     Value* value_on_fallthrough, uint32_t br_depth) {
1219     BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnCast>(
1220         decoder, object, rtt, value_on_fallthrough, br_depth, false);
1221   }
1222 
RefIsData(FullDecoder * decoder,const Value & object,Value * result)1223   void RefIsData(FullDecoder* decoder, const Value& object, Value* result) {
1224     result->node = builder_->RefIsData(object.node, object.type.is_nullable());
1225   }
1226 
RefAsData(FullDecoder * decoder,const Value & object,Value * result)1227   void RefAsData(FullDecoder* decoder, const Value& object, Value* result) {
1228     result->node = builder_->RefAsData(object.node, object.type.is_nullable(),
1229                                        decoder->position());
1230   }
1231 
BrOnData(FullDecoder * decoder,const Value & object,Value * value_on_branch,uint32_t br_depth)1232   void BrOnData(FullDecoder* decoder, const Value& object,
1233                 Value* value_on_branch, uint32_t br_depth) {
1234     BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnData>(
1235         decoder, object, Value{nullptr, kWasmBottom}, value_on_branch, br_depth,
1236         true);
1237   }
1238 
BrOnNonData(FullDecoder * decoder,const Value & object,Value * value_on_fallthrough,uint32_t br_depth)1239   void BrOnNonData(FullDecoder* decoder, const Value& object,
1240                    Value* value_on_fallthrough, uint32_t br_depth) {
1241     BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnData>(
1242         decoder, object, Value{nullptr, kWasmBottom}, value_on_fallthrough,
1243         br_depth, false);
1244   }
1245 
RefIsFunc(FullDecoder * decoder,const Value & object,Value * result)1246   void RefIsFunc(FullDecoder* decoder, const Value& object, Value* result) {
1247     result->node = builder_->RefIsFunc(object.node, object.type.is_nullable());
1248   }
1249 
RefAsFunc(FullDecoder * decoder,const Value & object,Value * result)1250   void RefAsFunc(FullDecoder* decoder, const Value& object, Value* result) {
1251     result->node = builder_->RefAsFunc(object.node, object.type.is_nullable(),
1252                                        decoder->position());
1253   }
1254 
BrOnFunc(FullDecoder * decoder,const Value & object,Value * value_on_branch,uint32_t br_depth)1255   void BrOnFunc(FullDecoder* decoder, const Value& object,
1256                 Value* value_on_branch, uint32_t br_depth) {
1257     BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnFunc>(
1258         decoder, object, Value{nullptr, kWasmBottom}, value_on_branch, br_depth,
1259         true);
1260   }
1261 
BrOnNonFunc(FullDecoder * decoder,const Value & object,Value * value_on_fallthrough,uint32_t br_depth)1262   void BrOnNonFunc(FullDecoder* decoder, const Value& object,
1263                    Value* value_on_fallthrough, uint32_t br_depth) {
1264     BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnFunc>(
1265         decoder, object, Value{nullptr, kWasmBottom}, value_on_fallthrough,
1266         br_depth, false);
1267   }
1268 
RefIsArray(FullDecoder * decoder,const Value & object,Value * result)1269   void RefIsArray(FullDecoder* decoder, const Value& object, Value* result) {
1270     result->node = builder_->RefIsArray(object.node, object.type.is_nullable());
1271   }
1272 
RefAsArray(FullDecoder * decoder,const Value & object,Value * result)1273   void RefAsArray(FullDecoder* decoder, const Value& object, Value* result) {
1274     result->node = builder_->RefAsArray(object.node, object.type.is_nullable(),
1275                                         decoder->position());
1276   }
1277 
BrOnArray(FullDecoder * decoder,const Value & object,Value * value_on_branch,uint32_t br_depth)1278   void BrOnArray(FullDecoder* decoder, const Value& object,
1279                  Value* value_on_branch, uint32_t br_depth) {
1280     BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnArray>(
1281         decoder, object, Value{nullptr, kWasmBottom}, value_on_branch, br_depth,
1282         true);
1283   }
1284 
BrOnNonArray(FullDecoder * decoder,const Value & object,Value * value_on_fallthrough,uint32_t br_depth)1285   void BrOnNonArray(FullDecoder* decoder, const Value& object,
1286                     Value* value_on_fallthrough, uint32_t br_depth) {
1287     BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnArray>(
1288         decoder, object, Value{nullptr, kWasmBottom}, value_on_fallthrough,
1289         br_depth, false);
1290   }
1291 
RefIsI31(FullDecoder * decoder,const Value & object,Value * result)1292   void RefIsI31(FullDecoder* decoder, const Value& object, Value* result) {
1293     result->node = builder_->RefIsI31(object.node);
1294   }
1295 
RefAsI31(FullDecoder * decoder,const Value & object,Value * result)1296   void RefAsI31(FullDecoder* decoder, const Value& object, Value* result) {
1297     result->node = builder_->RefAsI31(object.node, decoder->position());
1298   }
1299 
BrOnI31(FullDecoder * decoder,const Value & object,Value * value_on_branch,uint32_t br_depth)1300   void BrOnI31(FullDecoder* decoder, const Value& object,
1301                Value* value_on_branch, uint32_t br_depth) {
1302     BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnI31>(
1303         decoder, object, Value{nullptr, kWasmBottom}, value_on_branch, br_depth,
1304         true);
1305   }
1306 
BrOnNonI31(FullDecoder * decoder,const Value & object,Value * value_on_fallthrough,uint32_t br_depth)1307   void BrOnNonI31(FullDecoder* decoder, const Value& object,
1308                   Value* value_on_fallthrough, uint32_t br_depth) {
1309     BrOnCastAbs<&compiler::WasmGraphBuilder::BrOnI31>(
1310         decoder, object, Value{nullptr, kWasmBottom}, value_on_fallthrough,
1311         br_depth, false);
1312   }
1313 
Forward(FullDecoder * decoder,const Value & from,Value * to)1314   void Forward(FullDecoder* decoder, const Value& from, Value* to) {
1315     to->node = from.node;
1316   }
1317 
loop_infos()1318   std::vector<compiler::WasmLoopInfo> loop_infos() { return loop_infos_; }
1319 
1320  private:
1321   SsaEnv* ssa_env_ = nullptr;
1322   compiler::WasmGraphBuilder* builder_;
1323   int func_index_;
1324   const BranchHintMap* branch_hints_ = nullptr;
1325   // Tracks loop data for loop unrolling.
1326   std::vector<compiler::WasmLoopInfo> loop_infos_;
1327   InlinedStatus inlined_status_;
1328   // The entries in {type_feedback_} are indexed by the position of feedback-
1329   // consuming instructions (currently only call_ref).
1330   int feedback_instruction_index_ = 0;
1331   std::vector<CallSiteFeedback> type_feedback_;
1332 
effect()1333   TFNode* effect() { return builder_->effect(); }
1334 
control()1335   TFNode* control() { return builder_->control(); }
1336 
current_try_info(FullDecoder * decoder)1337   TryInfo* current_try_info(FullDecoder* decoder) {
1338     DCHECK_LT(decoder->current_catch(), decoder->control_depth());
1339     return decoder->control_at(decoder->control_depth_of_current_catch())
1340         ->try_info;
1341   }
1342 
1343   // If {emit_loop_exits()} returns true, we need to emit LoopExit,
1344   // LoopExitEffect, and LoopExit nodes whenever a control resp. effect resp.
1345   // value escapes a loop. We emit loop exits in the following cases:
1346   // - When popping the control of a loop.
1347   // - At some nodes which connect to the graph's end. We do not always need to
1348   //   emit loop exits for such nodes, since the wasm loop analysis algorithm
1349   //   can handle a loop body which connects directly to the graph's end.
1350   //   However, we need to emit them anyway for nodes that may be rewired to
1351   //   different nodes during inlining. These are Return and TailCall nodes.
1352   // - After IfFailure nodes.
1353   // - When exiting a loop through Delegate.
emit_loop_exits()1354   bool emit_loop_exits() {
1355     return FLAG_wasm_loop_unrolling || FLAG_wasm_loop_peeling;
1356   }
1357 
GetNodes(TFNode ** nodes,Value * values,size_t count)1358   void GetNodes(TFNode** nodes, Value* values, size_t count) {
1359     for (size_t i = 0; i < count; ++i) {
1360       nodes[i] = values[i].node;
1361     }
1362   }
1363 
GetNodes(TFNode ** nodes,base::Vector<Value> values)1364   void GetNodes(TFNode** nodes, base::Vector<Value> values) {
1365     GetNodes(nodes, values.begin(), values.size());
1366   }
1367 
SetEnv(SsaEnv * env)1368   void SetEnv(SsaEnv* env) {
1369     if (FLAG_trace_wasm_decoder) {
1370       char state = 'X';
1371       if (env) {
1372         switch (env->state) {
1373           case SsaEnv::kReached:
1374             state = 'R';
1375             break;
1376           case SsaEnv::kUnreachable:
1377             state = 'U';
1378             break;
1379           case SsaEnv::kMerged:
1380             state = 'M';
1381             break;
1382         }
1383       }
1384       PrintF("{set_env = %p, state = %c", env, state);
1385       if (env && env->control) {
1386         PrintF(", control = ");
1387         compiler::WasmGraphBuilder::PrintDebugName(env->control);
1388       }
1389       PrintF("}\n");
1390     }
1391     if (ssa_env_) {
1392       ssa_env_->control = control();
1393       ssa_env_->effect = effect();
1394     }
1395     ssa_env_ = env;
1396     builder_->SetEffectControl(env->effect, env->control);
1397     builder_->set_instance_cache(&env->instance_cache);
1398   }
1399 
CheckForException(FullDecoder * decoder,TFNode * node)1400   TFNode* CheckForException(FullDecoder* decoder, TFNode* node) {
1401     DCHECK_NOT_NULL(node);
1402 
1403     // We need to emit IfSuccess/IfException nodes if this node throws and has
1404     // an exception handler. An exception handler can either be a try-scope
1405     // around this node, or if this function is being inlined, the IfException
1406     // output of the inlined Call node.
1407     const bool inside_try_scope = decoder->current_catch() != -1;
1408     if (inlined_status_ != kInlinedHandledCall && !inside_try_scope) {
1409       return node;
1410     }
1411 
1412     TFNode* if_success = nullptr;
1413     TFNode* if_exception = nullptr;
1414     if (!builder_->ThrowsException(node, &if_success, &if_exception)) {
1415       return node;
1416     }
1417 
1418     SsaEnv* success_env = Steal(decoder->zone(), ssa_env_);
1419     success_env->control = if_success;
1420 
1421     SsaEnv* exception_env = Split(decoder->zone(), success_env);
1422     exception_env->control = if_exception;
1423     exception_env->effect = if_exception;
1424     SetEnv(exception_env);
1425 
1426     if (emit_loop_exits()) {
1427       ValueVector values;
1428       BuildNestedLoopExits(decoder,
1429                            inside_try_scope
1430                                ? decoder->control_depth_of_current_catch()
1431                                : decoder->control_depth() - 1,
1432                            true, values, &if_exception);
1433     }
1434     if (inside_try_scope) {
1435       TryInfo* try_info = current_try_info(decoder);
1436       Goto(decoder, try_info->catch_env);
1437       if (try_info->exception == nullptr) {
1438         DCHECK_EQ(SsaEnv::kReached, try_info->catch_env->state);
1439         try_info->exception = if_exception;
1440       } else {
1441         DCHECK_EQ(SsaEnv::kMerged, try_info->catch_env->state);
1442         try_info->exception = builder_->CreateOrMergeIntoPhi(
1443             MachineRepresentation::kTaggedPointer, try_info->catch_env->control,
1444             try_info->exception, if_exception);
1445       }
1446     } else {
1447       DCHECK_EQ(inlined_status_, kInlinedHandledCall);
1448       // Leave the IfException/LoopExit node dangling. We will connect it during
1449       // inlining to the handler of the inlined call.
1450       // Note: We have to generate the handler now since we have no way of
1451       // generating a LoopExit if needed in the inlining code.
1452     }
1453 
1454     SetEnv(success_env);
1455     return node;
1456   }
1457 
DefaultValue(ValueType type)1458   TFNode* DefaultValue(ValueType type) {
1459     DCHECK(type.is_defaultable());
1460     switch (type.kind()) {
1461       case kI8:
1462       case kI16:
1463       case kI32:
1464         return builder_->Int32Constant(0);
1465       case kI64:
1466         return builder_->Int64Constant(0);
1467       case kF32:
1468         return builder_->Float32Constant(0);
1469       case kF64:
1470         return builder_->Float64Constant(0);
1471       case kS128:
1472         return builder_->S128Zero();
1473       case kOptRef:
1474         return builder_->RefNull();
1475       case kRtt:
1476       case kVoid:
1477       case kBottom:
1478       case kRef:
1479         UNREACHABLE();
1480     }
1481   }
1482 
MergeValuesInto(FullDecoder * decoder,Control * c,Merge<Value> * merge,Value * values)1483   void MergeValuesInto(FullDecoder* decoder, Control* c, Merge<Value>* merge,
1484                        Value* values) {
1485     DCHECK(merge == &c->start_merge || merge == &c->end_merge);
1486 
1487     SsaEnv* target = c->merge_env;
1488     // This has to be computed before calling Goto().
1489     const bool first = target->state == SsaEnv::kUnreachable;
1490 
1491     Goto(decoder, target);
1492 
1493     if (merge->arity == 0) return;
1494 
1495     for (uint32_t i = 0; i < merge->arity; ++i) {
1496       Value& val = values[i];
1497       Value& old = (*merge)[i];
1498       DCHECK_NOT_NULL(val.node);
1499       DCHECK(val.type == kWasmBottom || val.type.machine_representation() ==
1500                                             old.type.machine_representation());
1501       old.node = first ? val.node
1502                        : builder_->CreateOrMergeIntoPhi(
1503                              old.type.machine_representation(), target->control,
1504                              old.node, val.node);
1505     }
1506   }
1507 
MergeValuesInto(FullDecoder * decoder,Control * c,Merge<Value> * merge,uint32_t drop_values=0)1508   void MergeValuesInto(FullDecoder* decoder, Control* c, Merge<Value>* merge,
1509                        uint32_t drop_values = 0) {
1510 #ifdef DEBUG
1511     uint32_t avail = decoder->stack_size() -
1512                      decoder->control_at(0)->stack_depth - drop_values;
1513     DCHECK_GE(avail, merge->arity);
1514 #endif
1515     Value* stack_values = merge->arity > 0
1516                               ? decoder->stack_value(merge->arity + drop_values)
1517                               : nullptr;
1518     MergeValuesInto(decoder, c, merge, stack_values);
1519   }
1520 
Goto(FullDecoder * decoder,SsaEnv * to)1521   void Goto(FullDecoder* decoder, SsaEnv* to) {
1522     DCHECK_NOT_NULL(to);
1523     switch (to->state) {
1524       case SsaEnv::kUnreachable: {  // Overwrite destination.
1525         to->state = SsaEnv::kReached;
1526         // There might be an offset in the locals due to a 'let'.
1527         DCHECK_EQ(ssa_env_->locals.size(), decoder->num_locals());
1528         DCHECK_GE(ssa_env_->locals.size(), to->locals.size());
1529         uint32_t local_count_diff =
1530             static_cast<uint32_t>(ssa_env_->locals.size() - to->locals.size());
1531         to->locals = ssa_env_->locals;
1532         to->locals.erase(to->locals.begin(),
1533                          to->locals.begin() + local_count_diff);
1534         to->control = control();
1535         to->effect = effect();
1536         to->instance_cache = ssa_env_->instance_cache;
1537         break;
1538       }
1539       case SsaEnv::kReached: {  // Create a new merge.
1540         to->state = SsaEnv::kMerged;
1541         // Merge control.
1542         TFNode* controls[] = {to->control, control()};
1543         TFNode* merge = builder_->Merge(2, controls);
1544         to->control = merge;
1545         // Merge effects.
1546         TFNode* old_effect = effect();
1547         if (old_effect != to->effect) {
1548           TFNode* inputs[] = {to->effect, old_effect, merge};
1549           to->effect = builder_->EffectPhi(2, inputs);
1550         }
1551         // Merge locals.
1552         // There might be an offset in the locals due to a 'let'.
1553         DCHECK_EQ(ssa_env_->locals.size(), decoder->num_locals());
1554         DCHECK_GE(ssa_env_->locals.size(), to->locals.size());
1555         uint32_t local_count_diff =
1556             static_cast<uint32_t>(ssa_env_->locals.size() - to->locals.size());
1557         for (uint32_t i = 0; i < to->locals.size(); i++) {
1558           TFNode* a = to->locals[i];
1559           TFNode* b = ssa_env_->locals[i + local_count_diff];
1560           if (a != b) {
1561             TFNode* inputs[] = {a, b, merge};
1562             to->locals[i] = builder_->Phi(
1563                 decoder->local_type(i + local_count_diff), 2, inputs);
1564           }
1565         }
1566         // Start a new merge from the instance cache.
1567         builder_->NewInstanceCacheMerge(&to->instance_cache,
1568                                         &ssa_env_->instance_cache, merge);
1569         break;
1570       }
1571       case SsaEnv::kMerged: {
1572         TFNode* merge = to->control;
1573         // Extend the existing merge control node.
1574         builder_->AppendToMerge(merge, control());
1575         // Merge effects.
1576         to->effect =
1577             builder_->CreateOrMergeIntoEffectPhi(merge, to->effect, effect());
1578         // Merge locals.
1579         // There might be an offset in the locals due to a 'let'.
1580         DCHECK_EQ(ssa_env_->locals.size(), decoder->num_locals());
1581         DCHECK_GE(ssa_env_->locals.size(), to->locals.size());
1582         uint32_t local_count_diff =
1583             static_cast<uint32_t>(ssa_env_->locals.size() - to->locals.size());
1584         for (uint32_t i = 0; i < to->locals.size(); i++) {
1585           to->locals[i] = builder_->CreateOrMergeIntoPhi(
1586               decoder->local_type(i + local_count_diff)
1587                   .machine_representation(),
1588               merge, to->locals[i], ssa_env_->locals[i + local_count_diff]);
1589         }
1590         // Merge the instance caches.
1591         builder_->MergeInstanceCacheInto(&to->instance_cache,
1592                                          &ssa_env_->instance_cache, merge);
1593         break;
1594       }
1595       default:
1596         UNREACHABLE();
1597     }
1598   }
1599 
1600   // Create a complete copy of {from}.
Split(Zone * zone,SsaEnv * from)1601   SsaEnv* Split(Zone* zone, SsaEnv* from) {
1602     DCHECK_NOT_NULL(from);
1603     if (from == ssa_env_) {
1604       ssa_env_->control = control();
1605       ssa_env_->effect = effect();
1606     }
1607     SsaEnv* result = zone->New<SsaEnv>(*from);
1608     result->state = SsaEnv::kReached;
1609     return result;
1610   }
1611 
1612   // Create a copy of {from} that steals its state and leaves {from}
1613   // unreachable.
Steal(Zone * zone,SsaEnv * from)1614   SsaEnv* Steal(Zone* zone, SsaEnv* from) {
1615     DCHECK_NOT_NULL(from);
1616     if (from == ssa_env_) {
1617       ssa_env_->control = control();
1618       ssa_env_->effect = effect();
1619     }
1620     SsaEnv* result = zone->New<SsaEnv>(std::move(*from));
1621     // Restore the length of {from->locals} after applying move-constructor.
1622     from->locals.resize(result->locals.size());
1623     result->state = SsaEnv::kReached;
1624     return result;
1625   }
1626 
1627   class CallInfo {
1628    public:
1629     enum CallMode { kCallDirect, kCallIndirect, kCallRef };
1630 
CallDirect(uint32_t callee_index)1631     static CallInfo CallDirect(uint32_t callee_index) {
1632       return {kCallDirect, callee_index, nullptr, 0,
1633               CheckForNull::kWithoutNullCheck};
1634     }
1635 
CallIndirect(const Value & index_value,uint32_t table_index,uint32_t sig_index)1636     static CallInfo CallIndirect(const Value& index_value, uint32_t table_index,
1637                                  uint32_t sig_index) {
1638       return {kCallIndirect, sig_index, &index_value, table_index,
1639               CheckForNull::kWithoutNullCheck};
1640     }
1641 
CallRef(const Value & funcref_value,CheckForNull null_check)1642     static CallInfo CallRef(const Value& funcref_value,
1643                             CheckForNull null_check) {
1644       return {kCallRef, 0, &funcref_value, 0, null_check};
1645     }
1646 
call_mode()1647     CallMode call_mode() { return call_mode_; }
1648 
sig_index()1649     uint32_t sig_index() {
1650       DCHECK_EQ(call_mode_, kCallIndirect);
1651       return callee_or_sig_index_;
1652     }
1653 
callee_index()1654     uint32_t callee_index() {
1655       DCHECK_EQ(call_mode_, kCallDirect);
1656       return callee_or_sig_index_;
1657     }
1658 
null_check()1659     CheckForNull null_check() {
1660       DCHECK_EQ(call_mode_, kCallRef);
1661       return null_check_;
1662     }
1663 
index_or_callee_value()1664     const Value* index_or_callee_value() {
1665       DCHECK_NE(call_mode_, kCallDirect);
1666       return index_or_callee_value_;
1667     }
1668 
table_index()1669     uint32_t table_index() {
1670       DCHECK_EQ(call_mode_, kCallIndirect);
1671       return table_index_;
1672     }
1673 
1674    private:
CallInfo(CallMode call_mode,uint32_t callee_or_sig_index,const Value * index_or_callee_value,uint32_t table_index,CheckForNull null_check)1675     CallInfo(CallMode call_mode, uint32_t callee_or_sig_index,
1676              const Value* index_or_callee_value, uint32_t table_index,
1677              CheckForNull null_check)
1678         : call_mode_(call_mode),
1679           callee_or_sig_index_(callee_or_sig_index),
1680           index_or_callee_value_(index_or_callee_value),
1681           table_index_(table_index),
1682           null_check_(null_check) {}
1683     CallMode call_mode_;
1684     uint32_t callee_or_sig_index_;
1685     const Value* index_or_callee_value_;
1686     uint32_t table_index_;
1687     CheckForNull null_check_;
1688   };
1689 
DoCall(FullDecoder * decoder,CallInfo call_info,const FunctionSig * sig,const Value args[],Value returns[])1690   void DoCall(FullDecoder* decoder, CallInfo call_info, const FunctionSig* sig,
1691               const Value args[], Value returns[]) {
1692     size_t param_count = sig->parameter_count();
1693     size_t return_count = sig->return_count();
1694 
1695     // Construct a function signature based on the real function parameters.
1696     FunctionSig::Builder real_sig_builder(builder_->graph_zone(), return_count,
1697                                           param_count);
1698     for (size_t i = 0; i < param_count; i++) {
1699       real_sig_builder.AddParam(args[i].type);
1700     }
1701     for (size_t i = 0; i < return_count; i++) {
1702       real_sig_builder.AddReturn(sig->GetReturn(i));
1703     }
1704     FunctionSig* real_sig = real_sig_builder.Build();
1705 
1706     NodeVector arg_nodes(param_count + 1);
1707     base::SmallVector<TFNode*, 1> return_nodes(return_count);
1708     arg_nodes[0] = (call_info.call_mode() == CallInfo::kCallDirect)
1709                        ? nullptr
1710                        : call_info.index_or_callee_value()->node;
1711 
1712     for (size_t i = 0; i < param_count; ++i) {
1713       arg_nodes[i + 1] = args[i].node;
1714     }
1715     switch (call_info.call_mode()) {
1716       case CallInfo::kCallIndirect:
1717         CheckForException(
1718             decoder, builder_->CallIndirect(
1719                          call_info.table_index(), call_info.sig_index(),
1720                          real_sig, base::VectorOf(arg_nodes),
1721                          base::VectorOf(return_nodes), decoder->position()));
1722         break;
1723       case CallInfo::kCallDirect:
1724         CheckForException(
1725             decoder, builder_->CallDirect(call_info.callee_index(), real_sig,
1726                                           base::VectorOf(arg_nodes),
1727                                           base::VectorOf(return_nodes),
1728                                           decoder->position()));
1729         break;
1730       case CallInfo::kCallRef:
1731         CheckForException(
1732             decoder,
1733             builder_->CallRef(real_sig, base::VectorOf(arg_nodes),
1734                               base::VectorOf(return_nodes),
1735                               call_info.null_check(), decoder->position()));
1736         break;
1737     }
1738     for (size_t i = 0; i < return_count; ++i) {
1739       returns[i].node = return_nodes[i];
1740     }
1741     if (decoder->module_->initial_pages != decoder->module_->maximum_pages) {
1742       // The invoked function could have used grow_memory, so we need to
1743       // reload mem_size and mem_start.
1744       LoadContextIntoSsa(ssa_env_);
1745     }
1746   }
1747 
DoReturnCall(FullDecoder * decoder,CallInfo call_info,const FunctionSig * sig,const Value args[])1748   void DoReturnCall(FullDecoder* decoder, CallInfo call_info,
1749                     const FunctionSig* sig, const Value args[]) {
1750     size_t arg_count = sig->parameter_count();
1751 
1752     // Construct a function signature based on the real function parameters.
1753     FunctionSig::Builder real_sig_builder(builder_->graph_zone(),
1754                                           sig->return_count(), arg_count);
1755     for (size_t i = 0; i < arg_count; i++) {
1756       real_sig_builder.AddParam(args[i].type);
1757     }
1758     for (size_t i = 0; i < sig->return_count(); i++) {
1759       real_sig_builder.AddReturn(sig->GetReturn(i));
1760     }
1761     FunctionSig* real_sig = real_sig_builder.Build();
1762 
1763     ValueVector arg_values(arg_count + 1);
1764     if (call_info.call_mode() == CallInfo::kCallDirect) {
1765       arg_values[0].node = nullptr;
1766     } else {
1767       arg_values[0] = *call_info.index_or_callee_value();
1768       // This is not done by copy assignment.
1769       arg_values[0].node = call_info.index_or_callee_value()->node;
1770     }
1771     if (arg_count > 0) {
1772       std::memcpy(arg_values.data() + 1, args, arg_count * sizeof(Value));
1773     }
1774 
1775     if (emit_loop_exits()) {
1776       BuildNestedLoopExits(decoder, decoder->control_depth(), false,
1777                            arg_values);
1778     }
1779 
1780     NodeVector arg_nodes(arg_count + 1);
1781     GetNodes(arg_nodes.data(), base::VectorOf(arg_values));
1782 
1783     switch (call_info.call_mode()) {
1784       case CallInfo::kCallIndirect:
1785         builder_->ReturnCallIndirect(
1786             call_info.table_index(), call_info.sig_index(), real_sig,
1787             base::VectorOf(arg_nodes), decoder->position());
1788         break;
1789       case CallInfo::kCallDirect:
1790         builder_->ReturnCall(call_info.callee_index(), real_sig,
1791                              base::VectorOf(arg_nodes), decoder->position());
1792         break;
1793       case CallInfo::kCallRef:
1794         builder_->ReturnCallRef(real_sig, base::VectorOf(arg_nodes),
1795                                 call_info.null_check(), decoder->position());
1796         break;
1797     }
1798   }
1799 
BuildLoopExits(FullDecoder * decoder,Control * loop)1800   void BuildLoopExits(FullDecoder* decoder, Control* loop) {
1801     builder_->LoopExit(loop->loop_node);
1802     ssa_env_->control = control();
1803     ssa_env_->effect = effect();
1804   }
1805 
WrapLocalsAtLoopExit(FullDecoder * decoder,Control * loop)1806   void WrapLocalsAtLoopExit(FullDecoder* decoder, Control* loop) {
1807     for (uint32_t index = 0; index < decoder->num_locals(); index++) {
1808       if (loop->loop_assignments->Contains(static_cast<int>(index))) {
1809         ssa_env_->locals[index] = builder_->LoopExitValue(
1810             ssa_env_->locals[index],
1811             decoder->local_type(index).machine_representation());
1812       }
1813     }
1814     if (loop->loop_assignments->Contains(decoder->num_locals())) {
1815 #define WRAP_CACHE_FIELD(field)                                                \
1816   if (ssa_env_->instance_cache.field != nullptr) {                             \
1817     ssa_env_->instance_cache.field = builder_->LoopExitValue(                  \
1818         ssa_env_->instance_cache.field, MachineType::PointerRepresentation()); \
1819   }
1820 
1821       WRAP_CACHE_FIELD(mem_start);
1822       WRAP_CACHE_FIELD(mem_size);
1823 #undef WRAP_CACHE_FIELD
1824     }
1825   }
1826 
BuildNestedLoopExits(FullDecoder * decoder,uint32_t depth_limit,bool wrap_exit_values,ValueVector & stack_values,TFNode ** exception_value=nullptr)1827   void BuildNestedLoopExits(FullDecoder* decoder, uint32_t depth_limit,
1828                             bool wrap_exit_values, ValueVector& stack_values,
1829                             TFNode** exception_value = nullptr) {
1830     DCHECK(emit_loop_exits());
1831     Control* control = nullptr;
1832     // We are only interested in exits from the innermost loop.
1833     for (uint32_t i = 0; i < depth_limit; i++) {
1834       Control* c = decoder->control_at(i);
1835       if (c->is_loop()) {
1836         control = c;
1837         break;
1838       }
1839     }
1840     if (control != nullptr) {
1841       BuildLoopExits(decoder, control);
1842       for (Value& value : stack_values) {
1843         if (value.node != nullptr) {
1844           value.node = builder_->LoopExitValue(
1845               value.node, value.type.machine_representation());
1846         }
1847       }
1848       if (exception_value != nullptr) {
1849         *exception_value = builder_->LoopExitValue(
1850             *exception_value, MachineRepresentation::kWord32);
1851       }
1852       if (wrap_exit_values) {
1853         WrapLocalsAtLoopExit(decoder, control);
1854       }
1855     }
1856   }
1857 
NullCheckFor(ValueType type)1858   CheckForNull NullCheckFor(ValueType type) {
1859     DCHECK(type.is_object_reference());
1860     return (!FLAG_experimental_wasm_skip_null_checks && type.is_nullable())
1861                ? CheckForNull::kWithNullCheck
1862                : CheckForNull::kWithoutNullCheck;
1863   }
1864 };
1865 
1866 }  // namespace
1867 
BuildTFGraph(AccountingAllocator * allocator,const WasmFeatures & enabled,const WasmModule * module,compiler::WasmGraphBuilder * builder,WasmFeatures * detected,const FunctionBody & body,std::vector<compiler::WasmLoopInfo> * loop_infos,compiler::NodeOriginTable * node_origins,int func_index,InlinedStatus inlined_status)1868 DecodeResult BuildTFGraph(AccountingAllocator* allocator,
1869                           const WasmFeatures& enabled, const WasmModule* module,
1870                           compiler::WasmGraphBuilder* builder,
1871                           WasmFeatures* detected, const FunctionBody& body,
1872                           std::vector<compiler::WasmLoopInfo>* loop_infos,
1873                           compiler::NodeOriginTable* node_origins,
1874                           int func_index, InlinedStatus inlined_status) {
1875   Zone zone(allocator, ZONE_NAME);
1876   WasmFullDecoder<Decoder::kFullValidation, WasmGraphBuildingInterface> decoder(
1877       &zone, module, enabled, detected, body, builder, func_index,
1878       inlined_status);
1879   if (node_origins) {
1880     builder->AddBytecodePositionDecorator(node_origins, &decoder);
1881   }
1882   decoder.Decode();
1883   if (node_origins) {
1884     builder->RemoveBytecodePositionDecorator();
1885   }
1886   *loop_infos = decoder.interface().loop_infos();
1887 
1888   return decoder.toResult(nullptr);
1889 }
1890 
1891 }  // namespace wasm
1892 }  // namespace internal
1893 }  // namespace v8
1894