• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/wasm-compiler.h"
6 
7 #include <memory>
8 
9 #include "src/base/optional.h"
10 #include "src/base/platform/elapsed-timer.h"
11 #include "src/base/platform/platform.h"
12 #include "src/base/small-vector.h"
13 #include "src/base/v8-fallthrough.h"
14 #include "src/codegen/assembler-inl.h"
15 #include "src/codegen/assembler.h"
16 #include "src/codegen/code-factory.h"
17 #include "src/codegen/compiler.h"
18 #include "src/codegen/interface-descriptors.h"
19 #include "src/codegen/machine-type.h"
20 #include "src/codegen/optimized-compilation-info.h"
21 #include "src/compiler/backend/code-generator.h"
22 #include "src/compiler/backend/instruction-selector.h"
23 #include "src/compiler/common-operator.h"
24 #include "src/compiler/compiler-source-position-table.h"
25 #include "src/compiler/diamond.h"
26 #include "src/compiler/graph-assembler.h"
27 #include "src/compiler/graph-visualizer.h"
28 #include "src/compiler/graph.h"
29 #include "src/compiler/int64-lowering.h"
30 #include "src/compiler/linkage.h"
31 #include "src/compiler/machine-operator.h"
32 #include "src/compiler/node-matchers.h"
33 #include "src/compiler/node-origin-table.h"
34 #include "src/compiler/node-properties.h"
35 #include "src/compiler/pipeline.h"
36 #include "src/compiler/simd-scalar-lowering.h"
37 #include "src/compiler/zone-stats.h"
38 #include "src/execution/isolate-inl.h"
39 #include "src/heap/factory.h"
40 #include "src/logging/counters.h"
41 #include "src/logging/log.h"
42 #include "src/objects/heap-number.h"
43 #include "src/roots/roots.h"
44 #include "src/tracing/trace-event.h"
45 #include "src/trap-handler/trap-handler.h"
46 #include "src/utils/vector.h"
47 #include "src/wasm/function-body-decoder-impl.h"
48 #include "src/wasm/function-compiler.h"
49 #include "src/wasm/graph-builder-interface.h"
50 #include "src/wasm/jump-table-assembler.h"
51 #include "src/wasm/memory-tracing.h"
52 #include "src/wasm/object-access.h"
53 #include "src/wasm/wasm-code-manager.h"
54 #include "src/wasm/wasm-constants.h"
55 #include "src/wasm/wasm-limits.h"
56 #include "src/wasm/wasm-linkage.h"
57 #include "src/wasm/wasm-module.h"
58 #include "src/wasm/wasm-objects-inl.h"
59 #include "src/wasm/wasm-opcodes-inl.h"
60 
61 namespace v8 {
62 namespace internal {
63 namespace compiler {
64 
65 namespace {
66 
67 #define FATAL_UNSUPPORTED_OPCODE(opcode)        \
68   FATAL("Unsupported opcode 0x%x:%s", (opcode), \
69         wasm::WasmOpcodes::OpcodeName(opcode));
70 
assert_size(int expected_size,MachineType type)71 MachineType assert_size(int expected_size, MachineType type) {
72   DCHECK_EQ(expected_size, ElementSizeInBytes(type.representation()));
73   return type;
74 }
75 
76 #define WASM_INSTANCE_OBJECT_SIZE(name)     \
77   (WasmInstanceObject::k##name##OffsetEnd - \
78    WasmInstanceObject::k##name##Offset + 1)  // NOLINT(whitespace/indent)
79 
80 #define WASM_INSTANCE_OBJECT_OFFSET(name) \
81   wasm::ObjectAccess::ToTagged(WasmInstanceObject::k##name##Offset)
82 
83 // We would like to use gasm_->Call() to implement this macro,
84 // but this doesn't work currently when we try to call it from functions
85 // which set IfSuccess/IfFailure control paths (e.g. within Throw()).
86 // TODO(manoskouk): Maybe clean this up at some point?
87 #define CALL_BUILTIN(name, ...)                                             \
88   SetEffect(graph()->NewNode(                                               \
89       mcgraph()->common()->Call(GetBuiltinCallDescriptor<name##Descriptor>( \
90           this, StubCallMode::kCallBuiltinPointer)),                        \
91       GetBuiltinPointerTarget(Builtins::k##name), ##__VA_ARGS__, effect(),  \
92       control()))
93 
94 #define LOAD_INSTANCE_FIELD(name, type)                           \
95   gasm_->Load(assert_size(WASM_INSTANCE_OBJECT_SIZE(name), type), \
96               instance_node_.get(), WASM_INSTANCE_OBJECT_OFFSET(name))
97 
98 #define LOAD_FULL_POINTER(base_pointer, byte_offset) \
99   gasm_->Load(MachineType::Pointer(), base_pointer, byte_offset)
100 
101 #define LOAD_TAGGED_POINTER(base_pointer, byte_offset) \
102   gasm_->Load(MachineType::TaggedPointer(), base_pointer, byte_offset)
103 
104 #define LOAD_TAGGED_ANY(base_pointer, byte_offset) \
105   gasm_->Load(MachineType::AnyTagged(), base_pointer, byte_offset)
106 
107 #define LOAD_FIXED_ARRAY_SLOT(array_node, index, type) \
108   gasm_->Load(type, array_node,                        \
109               wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(index))
110 
111 #define LOAD_FIXED_ARRAY_SLOT_SMI(array_node, index) \
112   LOAD_FIXED_ARRAY_SLOT(array_node, index, MachineType::TaggedSigned())
113 
114 #define LOAD_FIXED_ARRAY_SLOT_PTR(array_node, index) \
115   LOAD_FIXED_ARRAY_SLOT(array_node, index, MachineType::TaggedPointer())
116 
117 #define LOAD_FIXED_ARRAY_SLOT_ANY(array_node, index) \
118   LOAD_FIXED_ARRAY_SLOT(array_node, index, MachineType::AnyTagged())
119 
120 #define STORE_RAW(base, offset, val, rep, barrier) \
121   STORE_RAW_NODE_OFFSET(base, gasm_->Int32Constant(offset), val, rep, barrier)
122 
123 #define STORE_RAW_NODE_OFFSET(base, node_offset, val, rep, barrier) \
124   gasm_->Store(StoreRepresentation(rep, barrier), base, node_offset, val)
125 
126 // This can be used to store tagged Smi values only.
127 #define STORE_FIXED_ARRAY_SLOT_SMI(array_node, index, value)                   \
128   STORE_RAW(array_node,                                                        \
129             wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(index), value, \
130             MachineRepresentation::kTaggedSigned, kNoWriteBarrier)
131 
132 // This can be used to store any tagged (Smi and HeapObject) value.
133 #define STORE_FIXED_ARRAY_SLOT_ANY(array_node, index, value)                   \
134   STORE_RAW(array_node,                                                        \
135             wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(index), value, \
136             MachineRepresentation::kTagged, kFullWriteBarrier)
137 
EnsureEnd(MachineGraph * mcgraph)138 void EnsureEnd(MachineGraph* mcgraph) {
139   Graph* g = mcgraph->graph();
140   if (g->end() == nullptr) {
141     g->SetEnd(g->NewNode(mcgraph->common()->End(0)));
142   }
143 }
144 
MergeControlToEnd(MachineGraph * mcgraph,Node * node)145 void MergeControlToEnd(MachineGraph* mcgraph, Node* node) {
146   EnsureEnd(mcgraph);
147   NodeProperties::MergeControlToEnd(mcgraph->graph(), mcgraph->common(), node);
148 }
149 
ContainsSimd(const wasm::FunctionSig * sig)150 bool ContainsSimd(const wasm::FunctionSig* sig) {
151   for (auto type : sig->all()) {
152     if (type == wasm::kWasmS128) return true;
153   }
154   return false;
155 }
156 
ContainsInt64(const wasm::FunctionSig * sig)157 bool ContainsInt64(const wasm::FunctionSig* sig) {
158   for (auto type : sig->all()) {
159     if (type == wasm::kWasmI64) return true;
160   }
161   return false;
162 }
163 
164 template <typename BuiltinDescriptor>
GetBuiltinCallDescriptor(WasmGraphBuilder * builder,StubCallMode stub_mode)165 CallDescriptor* GetBuiltinCallDescriptor(WasmGraphBuilder* builder,
166                                          StubCallMode stub_mode) {
167   BuiltinDescriptor interface_descriptor;
168   return Linkage::GetStubCallDescriptor(
169       builder->mcgraph()->zone(),                     // zone
170       interface_descriptor,                           // descriptor
171       interface_descriptor.GetStackParameterCount(),  // stack parameter count
172       CallDescriptor::kNoFlags,                       // flags
173       Operator::kNoProperties,                        // properties
174       stub_mode);                                     // stub call mode
175 }
176 }  // namespace
177 
178 class WasmGraphAssembler : public GraphAssembler {
179  public:
WasmGraphAssembler(MachineGraph * mcgraph,Zone * zone)180   WasmGraphAssembler(MachineGraph* mcgraph, Zone* zone)
181       : GraphAssembler(mcgraph, zone) {}
182 };
183 
WasmGraphBuilder(wasm::CompilationEnv * env,Zone * zone,MachineGraph * mcgraph,const wasm::FunctionSig * sig,compiler::SourcePositionTable * source_position_table)184 WasmGraphBuilder::WasmGraphBuilder(
185     wasm::CompilationEnv* env, Zone* zone, MachineGraph* mcgraph,
186     const wasm::FunctionSig* sig,
187     compiler::SourcePositionTable* source_position_table)
188     : gasm_(std::make_unique<WasmGraphAssembler>(mcgraph, zone)),
189       zone_(zone),
190       mcgraph_(mcgraph),
191       env_(env),
192       has_simd_(ContainsSimd(sig)),
193       untrusted_code_mitigations_(FLAG_untrusted_code_mitigations),
194       sig_(sig),
195       source_position_table_(source_position_table) {
196   DCHECK_IMPLIES(use_trap_handler(), trap_handler::IsTrapHandlerEnabled());
197   DCHECK_NOT_NULL(mcgraph_);
198 }
199 
200 // Destructor define here where the definition of {WasmGraphAssembler} is
201 // available.
202 WasmGraphBuilder::~WasmGraphBuilder() = default;
203 
Error()204 Node* WasmGraphBuilder::Error() { return mcgraph()->Dead(); }
205 
Start(unsigned params)206 Node* WasmGraphBuilder::Start(unsigned params) {
207   Node* start = graph()->NewNode(mcgraph()->common()->Start(params));
208   graph()->SetStart(start);
209   return start;
210 }
211 
Param(unsigned index)212 Node* WasmGraphBuilder::Param(unsigned index) {
213   return graph()->NewNode(mcgraph()->common()->Parameter(index),
214                           graph()->start());
215 }
216 
Loop(Node * entry)217 Node* WasmGraphBuilder::Loop(Node* entry) {
218   return graph()->NewNode(mcgraph()->common()->Loop(1), entry);
219 }
220 
TerminateLoop(Node * effect,Node * control)221 Node* WasmGraphBuilder::TerminateLoop(Node* effect, Node* control) {
222   Node* terminate =
223       graph()->NewNode(mcgraph()->common()->Terminate(), effect, control);
224   MergeControlToEnd(mcgraph(), terminate);
225   return terminate;
226 }
227 
TerminateThrow(Node * effect,Node * control)228 Node* WasmGraphBuilder::TerminateThrow(Node* effect, Node* control) {
229   Node* terminate =
230       graph()->NewNode(mcgraph()->common()->Throw(), effect, control);
231   MergeControlToEnd(mcgraph(), terminate);
232   return terminate;
233 }
234 
IsPhiWithMerge(Node * phi,Node * merge)235 bool WasmGraphBuilder::IsPhiWithMerge(Node* phi, Node* merge) {
236   return phi && IrOpcode::IsPhiOpcode(phi->opcode()) &&
237          NodeProperties::GetControlInput(phi) == merge;
238 }
239 
ThrowsException(Node * node,Node ** if_success,Node ** if_exception)240 bool WasmGraphBuilder::ThrowsException(Node* node, Node** if_success,
241                                        Node** if_exception) {
242   if (node->op()->HasProperty(compiler::Operator::kNoThrow)) {
243     return false;
244   }
245 
246   *if_success = graph()->NewNode(mcgraph()->common()->IfSuccess(), node);
247   *if_exception =
248       graph()->NewNode(mcgraph()->common()->IfException(), node, node);
249 
250   return true;
251 }
252 
AppendToMerge(Node * merge,Node * from)253 void WasmGraphBuilder::AppendToMerge(Node* merge, Node* from) {
254   DCHECK(IrOpcode::IsMergeOpcode(merge->opcode()));
255   merge->AppendInput(mcgraph()->zone(), from);
256   int new_size = merge->InputCount();
257   NodeProperties::ChangeOp(
258       merge, mcgraph()->common()->ResizeMergeOrPhi(merge->op(), new_size));
259 }
260 
AppendToPhi(Node * phi,Node * from)261 void WasmGraphBuilder::AppendToPhi(Node* phi, Node* from) {
262   DCHECK(IrOpcode::IsPhiOpcode(phi->opcode()));
263   int new_size = phi->InputCount();
264   phi->InsertInput(mcgraph()->zone(), phi->InputCount() - 1, from);
265   NodeProperties::ChangeOp(
266       phi, mcgraph()->common()->ResizeMergeOrPhi(phi->op(), new_size));
267 }
268 
Merge(unsigned count,Node ** controls)269 Node* WasmGraphBuilder::Merge(unsigned count, Node** controls) {
270   return graph()->NewNode(mcgraph()->common()->Merge(count), count, controls);
271 }
272 
Phi(wasm::ValueType type,unsigned count,Node ** vals_and_control)273 Node* WasmGraphBuilder::Phi(wasm::ValueType type, unsigned count,
274                             Node** vals_and_control) {
275   DCHECK(IrOpcode::IsMergeOpcode(vals_and_control[count]->opcode()));
276   return graph()->NewNode(
277       mcgraph()->common()->Phi(type.machine_representation(), count), count + 1,
278       vals_and_control);
279 }
280 
EffectPhi(unsigned count,Node ** effects_and_control)281 Node* WasmGraphBuilder::EffectPhi(unsigned count, Node** effects_and_control) {
282   DCHECK(IrOpcode::IsMergeOpcode(effects_and_control[count]->opcode()));
283   return graph()->NewNode(mcgraph()->common()->EffectPhi(count), count + 1,
284                           effects_and_control);
285 }
286 
RefNull()287 Node* WasmGraphBuilder::RefNull() {
288   return LOAD_FULL_POINTER(
289       BuildLoadIsolateRoot(),
290       IsolateData::root_slot_offset(RootIndex::kNullValue));
291 }
292 
RefFunc(uint32_t function_index)293 Node* WasmGraphBuilder::RefFunc(uint32_t function_index) {
294   auto call_descriptor = GetBuiltinCallDescriptor<WasmRefFuncDescriptor>(
295       this, StubCallMode::kCallWasmRuntimeStub);
296   // A direct call to a wasm runtime stub defined in this module.
297   // Just encode the stub index. This will be patched at relocation.
298   Node* call_target = mcgraph()->RelocatableIntPtrConstant(
299       wasm::WasmCode::kWasmRefFunc, RelocInfo::WASM_STUB_CALL);
300 
301   return SetEffectControl(graph()->NewNode(
302       mcgraph()->common()->Call(call_descriptor), call_target,
303       mcgraph()->Uint32Constant(function_index), effect(), control()));
304 }
305 
RefAsNonNull(Node * arg,wasm::WasmCodePosition position)306 Node* WasmGraphBuilder::RefAsNonNull(Node* arg,
307                                      wasm::WasmCodePosition position) {
308   TrapIfTrue(wasm::kTrapIllegalCast, gasm_->WordEqual(arg, RefNull()),
309              position);
310   return arg;
311 }
312 
NoContextConstant()313 Node* WasmGraphBuilder::NoContextConstant() {
314   return mcgraph()->IntPtrConstant(0);
315 }
316 
BuildLoadIsolateRoot()317 Node* WasmGraphBuilder::BuildLoadIsolateRoot() {
318   // The IsolateRoot is loaded from the instance node so that the generated
319   // code is Isolate independent. This can be overridden by setting a specific
320   // node in {isolate_root_node_} beforehand.
321   if (isolate_root_node_.is_set()) return isolate_root_node_.get();
322   return LOAD_INSTANCE_FIELD(IsolateRoot, MachineType::Pointer());
323 }
324 
Int32Constant(int32_t value)325 Node* WasmGraphBuilder::Int32Constant(int32_t value) {
326   return mcgraph()->Int32Constant(value);
327 }
328 
Int64Constant(int64_t value)329 Node* WasmGraphBuilder::Int64Constant(int64_t value) {
330   return mcgraph()->Int64Constant(value);
331 }
332 
StackCheck(wasm::WasmCodePosition position)333 void WasmGraphBuilder::StackCheck(wasm::WasmCodePosition position) {
334   DCHECK_NOT_NULL(env_);  // Wrappers don't get stack checks.
335   if (!FLAG_wasm_stack_checks || !env_->runtime_exception_support) {
336     return;
337   }
338 
339   Node* limit_address = graph()->NewNode(
340       mcgraph()->machine()->Load(MachineType::Pointer()), instance_node_.get(),
341       mcgraph()->Int32Constant(WASM_INSTANCE_OBJECT_OFFSET(StackLimitAddress)),
342       effect(), control());
343   Node* limit = SetEffect(graph()->NewNode(
344       mcgraph()->machine()->Load(MachineType::Pointer()), limit_address,
345       mcgraph()->IntPtrConstant(0), limit_address, control()));
346 
347   Node* check = SetEffect(graph()->NewNode(
348       mcgraph()->machine()->StackPointerGreaterThan(StackCheckKind::kWasm),
349       limit, effect()));
350 
351   Diamond stack_check(graph(), mcgraph()->common(), check, BranchHint::kTrue);
352   stack_check.Chain(control());
353 
354   if (stack_check_call_operator_ == nullptr) {
355     // Build and cache the stack check call operator and the constant
356     // representing the stack check code.
357     auto call_descriptor = Linkage::GetStubCallDescriptor(
358         mcgraph()->zone(),                    // zone
359         NoContextDescriptor{},                // descriptor
360         0,                                    // stack parameter count
361         CallDescriptor::kNoFlags,             // flags
362         Operator::kNoProperties,              // properties
363         StubCallMode::kCallWasmRuntimeStub);  // stub call mode
364     // A direct call to a wasm runtime stub defined in this module.
365     // Just encode the stub index. This will be patched at relocation.
366     stack_check_code_node_.set(mcgraph()->RelocatableIntPtrConstant(
367         wasm::WasmCode::kWasmStackGuard, RelocInfo::WASM_STUB_CALL));
368     stack_check_call_operator_ = mcgraph()->common()->Call(call_descriptor);
369   }
370 
371   Node* call = graph()->NewNode(stack_check_call_operator_.get(),
372                                 stack_check_code_node_.get(), effect(),
373                                 stack_check.if_false);
374 
375   SetSourcePosition(call, position);
376 
377   Node* ephi = stack_check.EffectPhi(effect(), call);
378 
379   SetEffectControl(ephi, stack_check.merge);
380 }
381 
PatchInStackCheckIfNeeded()382 void WasmGraphBuilder::PatchInStackCheckIfNeeded() {
383   if (!needs_stack_check_) return;
384 
385   Node* start = graph()->start();
386   // Place a stack check which uses a dummy node as control and effect.
387   Node* dummy = graph()->NewNode(mcgraph()->common()->Dead());
388   SetEffectControl(dummy);
389   // The function-prologue stack check is associated with position 0, which
390   // is never a position of any instruction in the function.
391   StackCheck(0);
392 
393   // In testing, no steck checks were emitted. Nothing to rewire then.
394   if (effect() == dummy) return;
395 
396   // Now patch all control uses of {start} to use {control} and all effect uses
397   // to use {effect} instead. Then rewire the dummy node to use start instead.
398   NodeProperties::ReplaceUses(start, start, effect(), control());
399   NodeProperties::ReplaceUses(dummy, nullptr, start, start);
400 }
401 
Binop(wasm::WasmOpcode opcode,Node * left,Node * right,wasm::WasmCodePosition position)402 Node* WasmGraphBuilder::Binop(wasm::WasmOpcode opcode, Node* left, Node* right,
403                               wasm::WasmCodePosition position) {
404   const Operator* op;
405   MachineOperatorBuilder* m = mcgraph()->machine();
406   switch (opcode) {
407     case wasm::kExprI32Add:
408       op = m->Int32Add();
409       break;
410     case wasm::kExprI32Sub:
411       op = m->Int32Sub();
412       break;
413     case wasm::kExprI32Mul:
414       op = m->Int32Mul();
415       break;
416     case wasm::kExprI32DivS:
417       return BuildI32DivS(left, right, position);
418     case wasm::kExprI32DivU:
419       return BuildI32DivU(left, right, position);
420     case wasm::kExprI32RemS:
421       return BuildI32RemS(left, right, position);
422     case wasm::kExprI32RemU:
423       return BuildI32RemU(left, right, position);
424     case wasm::kExprI32And:
425       op = m->Word32And();
426       break;
427     case wasm::kExprI32Ior:
428       op = m->Word32Or();
429       break;
430     case wasm::kExprI32Xor:
431       op = m->Word32Xor();
432       break;
433     case wasm::kExprI32Shl:
434       op = m->Word32Shl();
435       right = MaskShiftCount32(right);
436       break;
437     case wasm::kExprI32ShrU:
438       op = m->Word32Shr();
439       right = MaskShiftCount32(right);
440       break;
441     case wasm::kExprI32ShrS:
442       op = m->Word32Sar();
443       right = MaskShiftCount32(right);
444       break;
445     case wasm::kExprI32Ror:
446       op = m->Word32Ror();
447       right = MaskShiftCount32(right);
448       break;
449     case wasm::kExprI32Rol:
450       if (m->Word32Rol().IsSupported()) {
451         op = m->Word32Rol().op();
452         right = MaskShiftCount32(right);
453         break;
454       }
455       return BuildI32Rol(left, right);
456     case wasm::kExprI32Eq:
457       op = m->Word32Equal();
458       break;
459     case wasm::kExprI32Ne:
460       return Invert(Binop(wasm::kExprI32Eq, left, right));
461     case wasm::kExprI32LtS:
462       op = m->Int32LessThan();
463       break;
464     case wasm::kExprI32LeS:
465       op = m->Int32LessThanOrEqual();
466       break;
467     case wasm::kExprI32LtU:
468       op = m->Uint32LessThan();
469       break;
470     case wasm::kExprI32LeU:
471       op = m->Uint32LessThanOrEqual();
472       break;
473     case wasm::kExprI32GtS:
474       op = m->Int32LessThan();
475       std::swap(left, right);
476       break;
477     case wasm::kExprI32GeS:
478       op = m->Int32LessThanOrEqual();
479       std::swap(left, right);
480       break;
481     case wasm::kExprI32GtU:
482       op = m->Uint32LessThan();
483       std::swap(left, right);
484       break;
485     case wasm::kExprI32GeU:
486       op = m->Uint32LessThanOrEqual();
487       std::swap(left, right);
488       break;
489     case wasm::kExprI64And:
490       op = m->Word64And();
491       break;
492     case wasm::kExprI64Add:
493       op = m->Int64Add();
494       break;
495     case wasm::kExprI64Sub:
496       op = m->Int64Sub();
497       break;
498     case wasm::kExprI64Mul:
499       op = m->Int64Mul();
500       break;
501     case wasm::kExprI64DivS:
502       return BuildI64DivS(left, right, position);
503     case wasm::kExprI64DivU:
504       return BuildI64DivU(left, right, position);
505     case wasm::kExprI64RemS:
506       return BuildI64RemS(left, right, position);
507     case wasm::kExprI64RemU:
508       return BuildI64RemU(left, right, position);
509     case wasm::kExprI64Ior:
510       op = m->Word64Or();
511       break;
512     case wasm::kExprI64Xor:
513       op = m->Word64Xor();
514       break;
515     case wasm::kExprI64Shl:
516       op = m->Word64Shl();
517       right = MaskShiftCount64(right);
518       break;
519     case wasm::kExprI64ShrU:
520       op = m->Word64Shr();
521       right = MaskShiftCount64(right);
522       break;
523     case wasm::kExprI64ShrS:
524       op = m->Word64Sar();
525       right = MaskShiftCount64(right);
526       break;
527     case wasm::kExprI64Eq:
528       op = m->Word64Equal();
529       break;
530     case wasm::kExprI64Ne:
531       return Invert(Binop(wasm::kExprI64Eq, left, right));
532     case wasm::kExprI64LtS:
533       op = m->Int64LessThan();
534       break;
535     case wasm::kExprI64LeS:
536       op = m->Int64LessThanOrEqual();
537       break;
538     case wasm::kExprI64LtU:
539       op = m->Uint64LessThan();
540       break;
541     case wasm::kExprI64LeU:
542       op = m->Uint64LessThanOrEqual();
543       break;
544     case wasm::kExprI64GtS:
545       op = m->Int64LessThan();
546       std::swap(left, right);
547       break;
548     case wasm::kExprI64GeS:
549       op = m->Int64LessThanOrEqual();
550       std::swap(left, right);
551       break;
552     case wasm::kExprI64GtU:
553       op = m->Uint64LessThan();
554       std::swap(left, right);
555       break;
556     case wasm::kExprI64GeU:
557       op = m->Uint64LessThanOrEqual();
558       std::swap(left, right);
559       break;
560     case wasm::kExprI64Ror:
561       op = m->Word64Ror();
562       right = MaskShiftCount64(right);
563       break;
564     case wasm::kExprI64Rol:
565       if (m->Word64Rol().IsSupported()) {
566         op = m->Word64Rol().op();
567         right = MaskShiftCount64(right);
568         break;
569       } else if (m->Word32Rol().IsSupported()) {
570         op = m->Word64Rol().placeholder();
571         break;
572       }
573       return BuildI64Rol(left, right);
574     case wasm::kExprF32CopySign:
575       return BuildF32CopySign(left, right);
576     case wasm::kExprF64CopySign:
577       return BuildF64CopySign(left, right);
578     case wasm::kExprF32Add:
579       op = m->Float32Add();
580       break;
581     case wasm::kExprF32Sub:
582       op = m->Float32Sub();
583       break;
584     case wasm::kExprF32Mul:
585       op = m->Float32Mul();
586       break;
587     case wasm::kExprF32Div:
588       op = m->Float32Div();
589       break;
590     case wasm::kExprF32Eq:
591       op = m->Float32Equal();
592       break;
593     case wasm::kExprF32Ne:
594       return Invert(Binop(wasm::kExprF32Eq, left, right));
595     case wasm::kExprF32Lt:
596       op = m->Float32LessThan();
597       break;
598     case wasm::kExprF32Ge:
599       op = m->Float32LessThanOrEqual();
600       std::swap(left, right);
601       break;
602     case wasm::kExprF32Gt:
603       op = m->Float32LessThan();
604       std::swap(left, right);
605       break;
606     case wasm::kExprF32Le:
607       op = m->Float32LessThanOrEqual();
608       break;
609     case wasm::kExprF64Add:
610       op = m->Float64Add();
611       break;
612     case wasm::kExprF64Sub:
613       op = m->Float64Sub();
614       break;
615     case wasm::kExprF64Mul:
616       op = m->Float64Mul();
617       break;
618     case wasm::kExprF64Div:
619       op = m->Float64Div();
620       break;
621     case wasm::kExprF64Eq:
622       op = m->Float64Equal();
623       break;
624     case wasm::kExprF64Ne:
625       return Invert(Binop(wasm::kExprF64Eq, left, right));
626     case wasm::kExprF64Lt:
627       op = m->Float64LessThan();
628       break;
629     case wasm::kExprF64Le:
630       op = m->Float64LessThanOrEqual();
631       break;
632     case wasm::kExprF64Gt:
633       op = m->Float64LessThan();
634       std::swap(left, right);
635       break;
636     case wasm::kExprF64Ge:
637       op = m->Float64LessThanOrEqual();
638       std::swap(left, right);
639       break;
640     case wasm::kExprF32Min:
641       op = m->Float32Min();
642       break;
643     case wasm::kExprF64Min:
644       op = m->Float64Min();
645       break;
646     case wasm::kExprF32Max:
647       op = m->Float32Max();
648       break;
649     case wasm::kExprF64Max:
650       op = m->Float64Max();
651       break;
652     case wasm::kExprF64Pow:
653       return BuildF64Pow(left, right);
654     case wasm::kExprF64Atan2:
655       op = m->Float64Atan2();
656       break;
657     case wasm::kExprF64Mod:
658       return BuildF64Mod(left, right);
659     case wasm::kExprRefEq:
660       return gasm_->TaggedEqual(left, right);
661     case wasm::kExprI32AsmjsDivS:
662       return BuildI32AsmjsDivS(left, right);
663     case wasm::kExprI32AsmjsDivU:
664       return BuildI32AsmjsDivU(left, right);
665     case wasm::kExprI32AsmjsRemS:
666       return BuildI32AsmjsRemS(left, right);
667     case wasm::kExprI32AsmjsRemU:
668       return BuildI32AsmjsRemU(left, right);
669     case wasm::kExprI32AsmjsStoreMem8:
670       return BuildAsmjsStoreMem(MachineType::Int8(), left, right);
671     case wasm::kExprI32AsmjsStoreMem16:
672       return BuildAsmjsStoreMem(MachineType::Int16(), left, right);
673     case wasm::kExprI32AsmjsStoreMem:
674       return BuildAsmjsStoreMem(MachineType::Int32(), left, right);
675     case wasm::kExprF32AsmjsStoreMem:
676       return BuildAsmjsStoreMem(MachineType::Float32(), left, right);
677     case wasm::kExprF64AsmjsStoreMem:
678       return BuildAsmjsStoreMem(MachineType::Float64(), left, right);
679     default:
680       FATAL_UNSUPPORTED_OPCODE(opcode);
681   }
682   return graph()->NewNode(op, left, right);
683 }
684 
Unop(wasm::WasmOpcode opcode,Node * input,wasm::WasmCodePosition position)685 Node* WasmGraphBuilder::Unop(wasm::WasmOpcode opcode, Node* input,
686                              wasm::WasmCodePosition position) {
687   const Operator* op;
688   MachineOperatorBuilder* m = mcgraph()->machine();
689   switch (opcode) {
690     case wasm::kExprI32Eqz:
691       op = m->Word32Equal();
692       return graph()->NewNode(op, input, mcgraph()->Int32Constant(0));
693     case wasm::kExprF32Abs:
694       op = m->Float32Abs();
695       break;
696     case wasm::kExprF32Neg: {
697       op = m->Float32Neg();
698       break;
699     }
700     case wasm::kExprF32Sqrt:
701       op = m->Float32Sqrt();
702       break;
703     case wasm::kExprF64Abs:
704       op = m->Float64Abs();
705       break;
706     case wasm::kExprF64Neg: {
707       op = m->Float64Neg();
708       break;
709     }
710     case wasm::kExprF64Sqrt:
711       op = m->Float64Sqrt();
712       break;
713     case wasm::kExprI32SConvertF32:
714     case wasm::kExprI32UConvertF32:
715     case wasm::kExprI32SConvertF64:
716     case wasm::kExprI32UConvertF64:
717     case wasm::kExprI32SConvertSatF64:
718     case wasm::kExprI32UConvertSatF64:
719     case wasm::kExprI32SConvertSatF32:
720     case wasm::kExprI32UConvertSatF32:
721       return BuildIntConvertFloat(input, position, opcode);
722     case wasm::kExprI32AsmjsSConvertF64:
723       return BuildI32AsmjsSConvertF64(input);
724     case wasm::kExprI32AsmjsUConvertF64:
725       return BuildI32AsmjsUConvertF64(input);
726     case wasm::kExprF32ConvertF64:
727       op = m->TruncateFloat64ToFloat32();
728       break;
729     case wasm::kExprF64SConvertI32:
730       op = m->ChangeInt32ToFloat64();
731       break;
732     case wasm::kExprF64UConvertI32:
733       op = m->ChangeUint32ToFloat64();
734       break;
735     case wasm::kExprF32SConvertI32:
736       op = m->RoundInt32ToFloat32();
737       break;
738     case wasm::kExprF32UConvertI32:
739       op = m->RoundUint32ToFloat32();
740       break;
741     case wasm::kExprI32AsmjsSConvertF32:
742       return BuildI32AsmjsSConvertF32(input);
743     case wasm::kExprI32AsmjsUConvertF32:
744       return BuildI32AsmjsUConvertF32(input);
745     case wasm::kExprF64ConvertF32:
746       op = m->ChangeFloat32ToFloat64();
747       break;
748     case wasm::kExprF32ReinterpretI32:
749       op = m->BitcastInt32ToFloat32();
750       break;
751     case wasm::kExprI32ReinterpretF32:
752       op = m->BitcastFloat32ToInt32();
753       break;
754     case wasm::kExprI32Clz:
755       op = m->Word32Clz();
756       break;
757     case wasm::kExprI32Ctz: {
758       if (m->Word32Ctz().IsSupported()) {
759         op = m->Word32Ctz().op();
760         break;
761       } else if (m->Word32ReverseBits().IsSupported()) {
762         Node* reversed = graph()->NewNode(m->Word32ReverseBits().op(), input);
763         Node* result = graph()->NewNode(m->Word32Clz(), reversed);
764         return result;
765       } else {
766         return BuildI32Ctz(input);
767       }
768     }
769     case wasm::kExprI32Popcnt: {
770       if (m->Word32Popcnt().IsSupported()) {
771         op = m->Word32Popcnt().op();
772         break;
773       } else {
774         return BuildI32Popcnt(input);
775       }
776     }
777     case wasm::kExprF32Floor: {
778       if (!m->Float32RoundDown().IsSupported()) return BuildF32Floor(input);
779       op = m->Float32RoundDown().op();
780       break;
781     }
782     case wasm::kExprF32Ceil: {
783       if (!m->Float32RoundUp().IsSupported()) return BuildF32Ceil(input);
784       op = m->Float32RoundUp().op();
785       break;
786     }
787     case wasm::kExprF32Trunc: {
788       if (!m->Float32RoundTruncate().IsSupported()) return BuildF32Trunc(input);
789       op = m->Float32RoundTruncate().op();
790       break;
791     }
792     case wasm::kExprF32NearestInt: {
793       if (!m->Float32RoundTiesEven().IsSupported())
794         return BuildF32NearestInt(input);
795       op = m->Float32RoundTiesEven().op();
796       break;
797     }
798     case wasm::kExprF64Floor: {
799       if (!m->Float64RoundDown().IsSupported()) return BuildF64Floor(input);
800       op = m->Float64RoundDown().op();
801       break;
802     }
803     case wasm::kExprF64Ceil: {
804       if (!m->Float64RoundUp().IsSupported()) return BuildF64Ceil(input);
805       op = m->Float64RoundUp().op();
806       break;
807     }
808     case wasm::kExprF64Trunc: {
809       if (!m->Float64RoundTruncate().IsSupported()) return BuildF64Trunc(input);
810       op = m->Float64RoundTruncate().op();
811       break;
812     }
813     case wasm::kExprF64NearestInt: {
814       if (!m->Float64RoundTiesEven().IsSupported())
815         return BuildF64NearestInt(input);
816       op = m->Float64RoundTiesEven().op();
817       break;
818     }
819     case wasm::kExprF64Acos: {
820       return BuildF64Acos(input);
821     }
822     case wasm::kExprF64Asin: {
823       return BuildF64Asin(input);
824     }
825     case wasm::kExprF64Atan:
826       op = m->Float64Atan();
827       break;
828     case wasm::kExprF64Cos: {
829       op = m->Float64Cos();
830       break;
831     }
832     case wasm::kExprF64Sin: {
833       op = m->Float64Sin();
834       break;
835     }
836     case wasm::kExprF64Tan: {
837       op = m->Float64Tan();
838       break;
839     }
840     case wasm::kExprF64Exp: {
841       op = m->Float64Exp();
842       break;
843     }
844     case wasm::kExprF64Log:
845       op = m->Float64Log();
846       break;
847     case wasm::kExprI32ConvertI64:
848       op = m->TruncateInt64ToInt32();
849       break;
850     case wasm::kExprI64SConvertI32:
851       op = m->ChangeInt32ToInt64();
852       break;
853     case wasm::kExprI64UConvertI32:
854       op = m->ChangeUint32ToUint64();
855       break;
856     case wasm::kExprF64ReinterpretI64:
857       op = m->BitcastInt64ToFloat64();
858       break;
859     case wasm::kExprI64ReinterpretF64:
860       op = m->BitcastFloat64ToInt64();
861       break;
862     case wasm::kExprI64Clz:
863       op = m->Word64Clz();
864       break;
865     case wasm::kExprI64Ctz: {
866       OptionalOperator ctz64 = m->Word64Ctz();
867       if (ctz64.IsSupported()) {
868         op = ctz64.op();
869         break;
870       } else if (m->Is32() && m->Word32Ctz().IsSupported()) {
871         op = ctz64.placeholder();
872         break;
873       } else if (m->Word64ReverseBits().IsSupported()) {
874         Node* reversed = graph()->NewNode(m->Word64ReverseBits().op(), input);
875         Node* result = graph()->NewNode(m->Word64Clz(), reversed);
876         return result;
877       } else {
878         return BuildI64Ctz(input);
879       }
880     }
881     case wasm::kExprI64Popcnt: {
882       OptionalOperator popcnt64 = m->Word64Popcnt();
883       if (popcnt64.IsSupported()) {
884         op = popcnt64.op();
885       } else if (m->Is32() && m->Word32Popcnt().IsSupported()) {
886         op = popcnt64.placeholder();
887       } else {
888         return BuildI64Popcnt(input);
889       }
890       break;
891     }
892     case wasm::kExprI64Eqz:
893       op = m->Word64Equal();
894       return graph()->NewNode(op, input, mcgraph()->Int64Constant(0));
895     case wasm::kExprF32SConvertI64:
896       if (m->Is32()) {
897         return BuildF32SConvertI64(input);
898       }
899       op = m->RoundInt64ToFloat32();
900       break;
901     case wasm::kExprF32UConvertI64:
902       if (m->Is32()) {
903         return BuildF32UConvertI64(input);
904       }
905       op = m->RoundUint64ToFloat32();
906       break;
907     case wasm::kExprF64SConvertI64:
908       if (m->Is32()) {
909         return BuildF64SConvertI64(input);
910       }
911       op = m->RoundInt64ToFloat64();
912       break;
913     case wasm::kExprF64UConvertI64:
914       if (m->Is32()) {
915         return BuildF64UConvertI64(input);
916       }
917       op = m->RoundUint64ToFloat64();
918       break;
919     case wasm::kExprI32SExtendI8:
920       op = m->SignExtendWord8ToInt32();
921       break;
922     case wasm::kExprI32SExtendI16:
923       op = m->SignExtendWord16ToInt32();
924       break;
925     case wasm::kExprI64SExtendI8:
926       op = m->SignExtendWord8ToInt64();
927       break;
928     case wasm::kExprI64SExtendI16:
929       op = m->SignExtendWord16ToInt64();
930       break;
931     case wasm::kExprI64SExtendI32:
932       op = m->SignExtendWord32ToInt64();
933       break;
934     case wasm::kExprI64SConvertF32:
935     case wasm::kExprI64UConvertF32:
936     case wasm::kExprI64SConvertF64:
937     case wasm::kExprI64UConvertF64:
938     case wasm::kExprI64SConvertSatF32:
939     case wasm::kExprI64UConvertSatF32:
940     case wasm::kExprI64SConvertSatF64:
941     case wasm::kExprI64UConvertSatF64:
942       return mcgraph()->machine()->Is32()
943                  ? BuildCcallConvertFloat(input, position, opcode)
944                  : BuildIntConvertFloat(input, position, opcode);
945     case wasm::kExprRefIsNull:
946       return graph()->NewNode(m->WordEqual(), input, RefNull());
947     case wasm::kExprI32AsmjsLoadMem8S:
948       return BuildAsmjsLoadMem(MachineType::Int8(), input);
949     case wasm::kExprI32AsmjsLoadMem8U:
950       return BuildAsmjsLoadMem(MachineType::Uint8(), input);
951     case wasm::kExprI32AsmjsLoadMem16S:
952       return BuildAsmjsLoadMem(MachineType::Int16(), input);
953     case wasm::kExprI32AsmjsLoadMem16U:
954       return BuildAsmjsLoadMem(MachineType::Uint16(), input);
955     case wasm::kExprI32AsmjsLoadMem:
956       return BuildAsmjsLoadMem(MachineType::Int32(), input);
957     case wasm::kExprF32AsmjsLoadMem:
958       return BuildAsmjsLoadMem(MachineType::Float32(), input);
959     case wasm::kExprF64AsmjsLoadMem:
960       return BuildAsmjsLoadMem(MachineType::Float64(), input);
961     default:
962       FATAL_UNSUPPORTED_OPCODE(opcode);
963   }
964   return graph()->NewNode(op, input);
965 }
966 
Float32Constant(float value)967 Node* WasmGraphBuilder::Float32Constant(float value) {
968   return mcgraph()->Float32Constant(value);
969 }
970 
Float64Constant(double value)971 Node* WasmGraphBuilder::Float64Constant(double value) {
972   return mcgraph()->Float64Constant(value);
973 }
974 
Simd128Constant(const uint8_t value[16])975 Node* WasmGraphBuilder::Simd128Constant(const uint8_t value[16]) {
976   has_simd_ = true;
977   return graph()->NewNode(mcgraph()->machine()->S128Const(value));
978 }
979 
980 namespace {
Branch(MachineGraph * mcgraph,Node * cond,Node ** true_node,Node ** false_node,Node * control,BranchHint hint)981 Node* Branch(MachineGraph* mcgraph, Node* cond, Node** true_node,
982              Node** false_node, Node* control, BranchHint hint) {
983   DCHECK_NOT_NULL(cond);
984   DCHECK_NOT_NULL(control);
985   Node* branch =
986       mcgraph->graph()->NewNode(mcgraph->common()->Branch(hint), cond, control);
987   *true_node = mcgraph->graph()->NewNode(mcgraph->common()->IfTrue(), branch);
988   *false_node = mcgraph->graph()->NewNode(mcgraph->common()->IfFalse(), branch);
989   return branch;
990 }
991 }  // namespace
992 
BranchNoHint(Node * cond,Node ** true_node,Node ** false_node)993 Node* WasmGraphBuilder::BranchNoHint(Node* cond, Node** true_node,
994                                      Node** false_node) {
995   return Branch(mcgraph(), cond, true_node, false_node, control(),
996                 BranchHint::kNone);
997 }
998 
BranchExpectTrue(Node * cond,Node ** true_node,Node ** false_node)999 Node* WasmGraphBuilder::BranchExpectTrue(Node* cond, Node** true_node,
1000                                          Node** false_node) {
1001   return Branch(mcgraph(), cond, true_node, false_node, control(),
1002                 BranchHint::kTrue);
1003 }
1004 
BranchExpectFalse(Node * cond,Node ** true_node,Node ** false_node)1005 Node* WasmGraphBuilder::BranchExpectFalse(Node* cond, Node** true_node,
1006                                           Node** false_node) {
1007   return Branch(mcgraph(), cond, true_node, false_node, control(),
1008                 BranchHint::kFalse);
1009 }
1010 
GetTrapIdForTrap(wasm::TrapReason reason)1011 TrapId WasmGraphBuilder::GetTrapIdForTrap(wasm::TrapReason reason) {
1012   // TODO(wasm): "!env_" should not happen when compiling an actual wasm
1013   // function.
1014   if (!env_ || !env_->runtime_exception_support) {
1015     // We use TrapId::kInvalid as a marker to tell the code generator
1016     // to generate a call to a testing c-function instead of a runtime
1017     // stub. This code should only be called from a cctest.
1018     return TrapId::kInvalid;
1019   }
1020 
1021   switch (reason) {
1022 #define TRAPREASON_TO_TRAPID(name)                                             \
1023   case wasm::k##name:                                                          \
1024     static_assert(                                                             \
1025         static_cast<int>(TrapId::k##name) == wasm::WasmCode::kThrowWasm##name, \
1026         "trap id mismatch");                                                   \
1027     return TrapId::k##name;
1028     FOREACH_WASM_TRAPREASON(TRAPREASON_TO_TRAPID)
1029 #undef TRAPREASON_TO_TRAPID
1030     default:
1031       UNREACHABLE();
1032   }
1033 }
1034 
TrapIfTrue(wasm::TrapReason reason,Node * cond,wasm::WasmCodePosition position)1035 Node* WasmGraphBuilder::TrapIfTrue(wasm::TrapReason reason, Node* cond,
1036                                    wasm::WasmCodePosition position) {
1037   TrapId trap_id = GetTrapIdForTrap(reason);
1038   Node* node = SetControl(graph()->NewNode(mcgraph()->common()->TrapIf(trap_id),
1039                                            cond, effect(), control()));
1040   SetSourcePosition(node, position);
1041   return node;
1042 }
1043 
TrapIfFalse(wasm::TrapReason reason,Node * cond,wasm::WasmCodePosition position)1044 Node* WasmGraphBuilder::TrapIfFalse(wasm::TrapReason reason, Node* cond,
1045                                     wasm::WasmCodePosition position) {
1046   TrapId trap_id = GetTrapIdForTrap(reason);
1047   Node* node = SetControl(graph()->NewNode(
1048       mcgraph()->common()->TrapUnless(trap_id), cond, effect(), control()));
1049   SetSourcePosition(node, position);
1050   return node;
1051 }
1052 
1053 // Add a check that traps if {node} is equal to {val}.
TrapIfEq32(wasm::TrapReason reason,Node * node,int32_t val,wasm::WasmCodePosition position)1054 Node* WasmGraphBuilder::TrapIfEq32(wasm::TrapReason reason, Node* node,
1055                                    int32_t val,
1056                                    wasm::WasmCodePosition position) {
1057   Int32Matcher m(node);
1058   if (m.HasResolvedValue() && !m.Is(val)) return graph()->start();
1059   if (val == 0) {
1060     return TrapIfFalse(reason, node, position);
1061   } else {
1062     return TrapIfTrue(reason,
1063                       graph()->NewNode(mcgraph()->machine()->Word32Equal(),
1064                                        node, mcgraph()->Int32Constant(val)),
1065                       position);
1066   }
1067 }
1068 
1069 // Add a check that traps if {node} is zero.
ZeroCheck32(wasm::TrapReason reason,Node * node,wasm::WasmCodePosition position)1070 Node* WasmGraphBuilder::ZeroCheck32(wasm::TrapReason reason, Node* node,
1071                                     wasm::WasmCodePosition position) {
1072   return TrapIfEq32(reason, node, 0, position);
1073 }
1074 
1075 // Add a check that traps if {node} is equal to {val}.
TrapIfEq64(wasm::TrapReason reason,Node * node,int64_t val,wasm::WasmCodePosition position)1076 Node* WasmGraphBuilder::TrapIfEq64(wasm::TrapReason reason, Node* node,
1077                                    int64_t val,
1078                                    wasm::WasmCodePosition position) {
1079   Int64Matcher m(node);
1080   if (m.HasResolvedValue() && !m.Is(val)) return graph()->start();
1081   return TrapIfTrue(reason,
1082                     graph()->NewNode(mcgraph()->machine()->Word64Equal(), node,
1083                                      mcgraph()->Int64Constant(val)),
1084                     position);
1085 }
1086 
1087 // Add a check that traps if {node} is zero.
ZeroCheck64(wasm::TrapReason reason,Node * node,wasm::WasmCodePosition position)1088 Node* WasmGraphBuilder::ZeroCheck64(wasm::TrapReason reason, Node* node,
1089                                     wasm::WasmCodePosition position) {
1090   return TrapIfEq64(reason, node, 0, position);
1091 }
1092 
Switch(unsigned count,Node * key)1093 Node* WasmGraphBuilder::Switch(unsigned count, Node* key) {
1094   // The instruction selector will use {kArchTableSwitch} for large switches,
1095   // which has limited input count, see {InstructionSelector::EmitTableSwitch}.
1096   DCHECK_LE(count, Instruction::kMaxInputCount - 2);          // value_range + 2
1097   DCHECK_LE(count, wasm::kV8MaxWasmFunctionBrTableSize + 1);  // plus IfDefault
1098   return graph()->NewNode(mcgraph()->common()->Switch(count), key, control());
1099 }
1100 
IfValue(int32_t value,Node * sw)1101 Node* WasmGraphBuilder::IfValue(int32_t value, Node* sw) {
1102   DCHECK_EQ(IrOpcode::kSwitch, sw->opcode());
1103   return graph()->NewNode(mcgraph()->common()->IfValue(value), sw);
1104 }
1105 
IfDefault(Node * sw)1106 Node* WasmGraphBuilder::IfDefault(Node* sw) {
1107   DCHECK_EQ(IrOpcode::kSwitch, sw->opcode());
1108   return graph()->NewNode(mcgraph()->common()->IfDefault(), sw);
1109 }
1110 
Return(Vector<Node * > vals)1111 Node* WasmGraphBuilder::Return(Vector<Node*> vals) {
1112   unsigned count = static_cast<unsigned>(vals.size());
1113   base::SmallVector<Node*, 8> buf(count + 3);
1114 
1115   buf[0] = mcgraph()->Int32Constant(0);
1116   if (count > 0) {
1117     memcpy(buf.data() + 1, vals.begin(), sizeof(void*) * count);
1118   }
1119   buf[count + 1] = effect();
1120   buf[count + 2] = control();
1121   Node* ret = graph()->NewNode(mcgraph()->common()->Return(count), count + 3,
1122                                buf.data());
1123 
1124   MergeControlToEnd(mcgraph(), ret);
1125   return ret;
1126 }
1127 
Trap(wasm::TrapReason reason,wasm::WasmCodePosition position)1128 Node* WasmGraphBuilder::Trap(wasm::TrapReason reason,
1129                              wasm::WasmCodePosition position) {
1130   TrapIfFalse(reason, Int32Constant(0), position);
1131   Return(Vector<Node*>{});
1132   return nullptr;
1133 }
1134 
MaskShiftCount32(Node * node)1135 Node* WasmGraphBuilder::MaskShiftCount32(Node* node) {
1136   static const int32_t kMask32 = 0x1F;
1137   if (!mcgraph()->machine()->Word32ShiftIsSafe()) {
1138     // Shifts by constants are so common we pattern-match them here.
1139     Int32Matcher match(node);
1140     if (match.HasResolvedValue()) {
1141       int32_t masked = (match.ResolvedValue() & kMask32);
1142       if (match.ResolvedValue() != masked)
1143         node = mcgraph()->Int32Constant(masked);
1144     } else {
1145       node = graph()->NewNode(mcgraph()->machine()->Word32And(), node,
1146                               mcgraph()->Int32Constant(kMask32));
1147     }
1148   }
1149   return node;
1150 }
1151 
MaskShiftCount64(Node * node)1152 Node* WasmGraphBuilder::MaskShiftCount64(Node* node) {
1153   static const int64_t kMask64 = 0x3F;
1154   if (!mcgraph()->machine()->Word32ShiftIsSafe()) {
1155     // Shifts by constants are so common we pattern-match them here.
1156     Int64Matcher match(node);
1157     if (match.HasResolvedValue()) {
1158       int64_t masked = (match.ResolvedValue() & kMask64);
1159       if (match.ResolvedValue() != masked)
1160         node = mcgraph()->Int64Constant(masked);
1161     } else {
1162       node = graph()->NewNode(mcgraph()->machine()->Word64And(), node,
1163                               mcgraph()->Int64Constant(kMask64));
1164     }
1165   }
1166   return node;
1167 }
1168 
1169 namespace {
1170 
ReverseBytesSupported(MachineOperatorBuilder * m,size_t size_in_bytes)1171 bool ReverseBytesSupported(MachineOperatorBuilder* m, size_t size_in_bytes) {
1172   switch (size_in_bytes) {
1173     case 4:
1174     case 16:
1175       return true;
1176     case 8:
1177       return m->Is64();
1178     default:
1179       break;
1180   }
1181   return false;
1182 }
1183 
1184 }  // namespace
1185 
BuildChangeEndiannessStore(Node * node,MachineRepresentation mem_rep,wasm::ValueType wasmtype)1186 Node* WasmGraphBuilder::BuildChangeEndiannessStore(
1187     Node* node, MachineRepresentation mem_rep, wasm::ValueType wasmtype) {
1188   Node* result;
1189   Node* value = node;
1190   MachineOperatorBuilder* m = mcgraph()->machine();
1191   int valueSizeInBytes = wasmtype.element_size_bytes();
1192   int valueSizeInBits = 8 * valueSizeInBytes;
1193   bool isFloat = false;
1194 
1195   switch (wasmtype.kind()) {
1196     case wasm::ValueType::kF64:
1197       value = graph()->NewNode(m->BitcastFloat64ToInt64(), node);
1198       isFloat = true;
1199       V8_FALLTHROUGH;
1200     case wasm::ValueType::kI64:
1201       result = mcgraph()->Int64Constant(0);
1202       break;
1203     case wasm::ValueType::kF32:
1204       value = graph()->NewNode(m->BitcastFloat32ToInt32(), node);
1205       isFloat = true;
1206       V8_FALLTHROUGH;
1207     case wasm::ValueType::kI32:
1208       result = mcgraph()->Int32Constant(0);
1209       break;
1210     case wasm::ValueType::kS128:
1211       DCHECK(ReverseBytesSupported(m, valueSizeInBytes));
1212       break;
1213     default:
1214       UNREACHABLE();
1215   }
1216 
1217   if (mem_rep == MachineRepresentation::kWord8) {
1218     // No need to change endianness for byte size, return original node
1219     return node;
1220   }
1221   if (wasmtype == wasm::kWasmI64 && mem_rep < MachineRepresentation::kWord64) {
1222     // In case we store lower part of WasmI64 expression, we can truncate
1223     // upper 32bits
1224     value = graph()->NewNode(m->TruncateInt64ToInt32(), value);
1225     valueSizeInBytes = wasm::kWasmI32.element_size_bytes();
1226     valueSizeInBits = 8 * valueSizeInBytes;
1227     if (mem_rep == MachineRepresentation::kWord16) {
1228       value =
1229           graph()->NewNode(m->Word32Shl(), value, mcgraph()->Int32Constant(16));
1230     }
1231   } else if (wasmtype == wasm::kWasmI32 &&
1232              mem_rep == MachineRepresentation::kWord16) {
1233     value =
1234         graph()->NewNode(m->Word32Shl(), value, mcgraph()->Int32Constant(16));
1235   }
1236 
1237   int i;
1238   uint32_t shiftCount;
1239 
1240   if (ReverseBytesSupported(m, valueSizeInBytes)) {
1241     switch (valueSizeInBytes) {
1242       case 4:
1243         result = graph()->NewNode(m->Word32ReverseBytes(), value);
1244         break;
1245       case 8:
1246         result = graph()->NewNode(m->Word64ReverseBytes(), value);
1247         break;
1248       case 16:
1249         result = graph()->NewNode(m->Simd128ReverseBytes(), value);
1250         break;
1251       default:
1252         UNREACHABLE();
1253         break;
1254     }
1255   } else {
1256     for (i = 0, shiftCount = valueSizeInBits - 8; i < valueSizeInBits / 2;
1257          i += 8, shiftCount -= 16) {
1258       Node* shiftLower;
1259       Node* shiftHigher;
1260       Node* lowerByte;
1261       Node* higherByte;
1262 
1263       DCHECK_LT(0, shiftCount);
1264       DCHECK_EQ(0, (shiftCount + 8) % 16);
1265 
1266       if (valueSizeInBits > 32) {
1267         shiftLower = graph()->NewNode(m->Word64Shl(), value,
1268                                       mcgraph()->Int64Constant(shiftCount));
1269         shiftHigher = graph()->NewNode(m->Word64Shr(), value,
1270                                        mcgraph()->Int64Constant(shiftCount));
1271         lowerByte = graph()->NewNode(
1272             m->Word64And(), shiftLower,
1273             mcgraph()->Int64Constant(static_cast<uint64_t>(0xFF)
1274                                      << (valueSizeInBits - 8 - i)));
1275         higherByte = graph()->NewNode(
1276             m->Word64And(), shiftHigher,
1277             mcgraph()->Int64Constant(static_cast<uint64_t>(0xFF) << i));
1278         result = graph()->NewNode(m->Word64Or(), result, lowerByte);
1279         result = graph()->NewNode(m->Word64Or(), result, higherByte);
1280       } else {
1281         shiftLower = graph()->NewNode(m->Word32Shl(), value,
1282                                       mcgraph()->Int32Constant(shiftCount));
1283         shiftHigher = graph()->NewNode(m->Word32Shr(), value,
1284                                        mcgraph()->Int32Constant(shiftCount));
1285         lowerByte = graph()->NewNode(
1286             m->Word32And(), shiftLower,
1287             mcgraph()->Int32Constant(static_cast<uint32_t>(0xFF)
1288                                      << (valueSizeInBits - 8 - i)));
1289         higherByte = graph()->NewNode(
1290             m->Word32And(), shiftHigher,
1291             mcgraph()->Int32Constant(static_cast<uint32_t>(0xFF) << i));
1292         result = graph()->NewNode(m->Word32Or(), result, lowerByte);
1293         result = graph()->NewNode(m->Word32Or(), result, higherByte);
1294       }
1295     }
1296   }
1297 
1298   if (isFloat) {
1299     switch (wasmtype.kind()) {
1300       case wasm::ValueType::kF64:
1301         result = graph()->NewNode(m->BitcastInt64ToFloat64(), result);
1302         break;
1303       case wasm::ValueType::kF32:
1304         result = graph()->NewNode(m->BitcastInt32ToFloat32(), result);
1305         break;
1306       default:
1307         UNREACHABLE();
1308         break;
1309     }
1310   }
1311 
1312   return result;
1313 }
1314 
BuildChangeEndiannessLoad(Node * node,MachineType memtype,wasm::ValueType wasmtype)1315 Node* WasmGraphBuilder::BuildChangeEndiannessLoad(Node* node,
1316                                                   MachineType memtype,
1317                                                   wasm::ValueType wasmtype) {
1318   Node* result;
1319   Node* value = node;
1320   MachineOperatorBuilder* m = mcgraph()->machine();
1321   int valueSizeInBytes = ElementSizeInBytes(memtype.representation());
1322   int valueSizeInBits = 8 * valueSizeInBytes;
1323   bool isFloat = false;
1324 
1325   switch (memtype.representation()) {
1326     case MachineRepresentation::kFloat64:
1327       value = graph()->NewNode(m->BitcastFloat64ToInt64(), node);
1328       isFloat = true;
1329       V8_FALLTHROUGH;
1330     case MachineRepresentation::kWord64:
1331       result = mcgraph()->Int64Constant(0);
1332       break;
1333     case MachineRepresentation::kFloat32:
1334       value = graph()->NewNode(m->BitcastFloat32ToInt32(), node);
1335       isFloat = true;
1336       V8_FALLTHROUGH;
1337     case MachineRepresentation::kWord32:
1338     case MachineRepresentation::kWord16:
1339       result = mcgraph()->Int32Constant(0);
1340       break;
1341     case MachineRepresentation::kWord8:
1342       // No need to change endianness for byte size, return original node
1343       return node;
1344       break;
1345     case MachineRepresentation::kSimd128:
1346       DCHECK(ReverseBytesSupported(m, valueSizeInBytes));
1347       break;
1348     default:
1349       UNREACHABLE();
1350   }
1351 
1352   int i;
1353   uint32_t shiftCount;
1354 
1355   if (ReverseBytesSupported(m, valueSizeInBytes < 4 ? 4 : valueSizeInBytes)) {
1356     switch (valueSizeInBytes) {
1357       case 2:
1358         result =
1359             graph()->NewNode(m->Word32ReverseBytes(),
1360                              graph()->NewNode(m->Word32Shl(), value,
1361                                               mcgraph()->Int32Constant(16)));
1362         break;
1363       case 4:
1364         result = graph()->NewNode(m->Word32ReverseBytes(), value);
1365         break;
1366       case 8:
1367         result = graph()->NewNode(m->Word64ReverseBytes(), value);
1368         break;
1369       case 16:
1370         result = graph()->NewNode(m->Simd128ReverseBytes(), value);
1371         break;
1372       default:
1373         UNREACHABLE();
1374     }
1375   } else {
1376     for (i = 0, shiftCount = valueSizeInBits - 8; i < valueSizeInBits / 2;
1377          i += 8, shiftCount -= 16) {
1378       Node* shiftLower;
1379       Node* shiftHigher;
1380       Node* lowerByte;
1381       Node* higherByte;
1382 
1383       DCHECK_LT(0, shiftCount);
1384       DCHECK_EQ(0, (shiftCount + 8) % 16);
1385 
1386       if (valueSizeInBits > 32) {
1387         shiftLower = graph()->NewNode(m->Word64Shl(), value,
1388                                       mcgraph()->Int64Constant(shiftCount));
1389         shiftHigher = graph()->NewNode(m->Word64Shr(), value,
1390                                        mcgraph()->Int64Constant(shiftCount));
1391         lowerByte = graph()->NewNode(
1392             m->Word64And(), shiftLower,
1393             mcgraph()->Int64Constant(static_cast<uint64_t>(0xFF)
1394                                      << (valueSizeInBits - 8 - i)));
1395         higherByte = graph()->NewNode(
1396             m->Word64And(), shiftHigher,
1397             mcgraph()->Int64Constant(static_cast<uint64_t>(0xFF) << i));
1398         result = graph()->NewNode(m->Word64Or(), result, lowerByte);
1399         result = graph()->NewNode(m->Word64Or(), result, higherByte);
1400       } else {
1401         shiftLower = graph()->NewNode(m->Word32Shl(), value,
1402                                       mcgraph()->Int32Constant(shiftCount));
1403         shiftHigher = graph()->NewNode(m->Word32Shr(), value,
1404                                        mcgraph()->Int32Constant(shiftCount));
1405         lowerByte = graph()->NewNode(
1406             m->Word32And(), shiftLower,
1407             mcgraph()->Int32Constant(static_cast<uint32_t>(0xFF)
1408                                      << (valueSizeInBits - 8 - i)));
1409         higherByte = graph()->NewNode(
1410             m->Word32And(), shiftHigher,
1411             mcgraph()->Int32Constant(static_cast<uint32_t>(0xFF) << i));
1412         result = graph()->NewNode(m->Word32Or(), result, lowerByte);
1413         result = graph()->NewNode(m->Word32Or(), result, higherByte);
1414       }
1415     }
1416   }
1417 
1418   if (isFloat) {
1419     switch (memtype.representation()) {
1420       case MachineRepresentation::kFloat64:
1421         result = graph()->NewNode(m->BitcastInt64ToFloat64(), result);
1422         break;
1423       case MachineRepresentation::kFloat32:
1424         result = graph()->NewNode(m->BitcastInt32ToFloat32(), result);
1425         break;
1426       default:
1427         UNREACHABLE();
1428         break;
1429     }
1430   }
1431 
1432   // We need to sign extend the value
1433   if (memtype.IsSigned()) {
1434     DCHECK(!isFloat);
1435     if (valueSizeInBits < 32) {
1436       Node* shiftBitCount;
1437       // Perform sign extension using following trick
1438       // result = (x << machine_width - type_width) >> (machine_width -
1439       // type_width)
1440       if (wasmtype == wasm::kWasmI64) {
1441         shiftBitCount = mcgraph()->Int32Constant(64 - valueSizeInBits);
1442         result = graph()->NewNode(
1443             m->Word64Sar(),
1444             graph()->NewNode(m->Word64Shl(),
1445                              graph()->NewNode(m->ChangeInt32ToInt64(), result),
1446                              shiftBitCount),
1447             shiftBitCount);
1448       } else if (wasmtype == wasm::kWasmI32) {
1449         shiftBitCount = mcgraph()->Int32Constant(32 - valueSizeInBits);
1450         result = graph()->NewNode(
1451             m->Word32Sar(),
1452             graph()->NewNode(m->Word32Shl(), result, shiftBitCount),
1453             shiftBitCount);
1454       }
1455     }
1456   }
1457 
1458   return result;
1459 }
1460 
BuildF32CopySign(Node * left,Node * right)1461 Node* WasmGraphBuilder::BuildF32CopySign(Node* left, Node* right) {
1462   Node* result = Unop(
1463       wasm::kExprF32ReinterpretI32,
1464       Binop(wasm::kExprI32Ior,
1465             Binop(wasm::kExprI32And, Unop(wasm::kExprI32ReinterpretF32, left),
1466                   mcgraph()->Int32Constant(0x7FFFFFFF)),
1467             Binop(wasm::kExprI32And, Unop(wasm::kExprI32ReinterpretF32, right),
1468                   mcgraph()->Int32Constant(0x80000000))));
1469 
1470   return result;
1471 }
1472 
BuildF64CopySign(Node * left,Node * right)1473 Node* WasmGraphBuilder::BuildF64CopySign(Node* left, Node* right) {
1474   if (mcgraph()->machine()->Is64()) {
1475     return gasm_->BitcastInt64ToFloat64(gasm_->Word64Or(
1476         gasm_->Word64And(gasm_->BitcastFloat64ToInt64(left),
1477                          gasm_->Int64Constant(0x7FFFFFFFFFFFFFFF)),
1478         gasm_->Word64And(gasm_->BitcastFloat64ToInt64(right),
1479                          gasm_->Int64Constant(0x8000000000000000))));
1480   }
1481 
1482   DCHECK(mcgraph()->machine()->Is32());
1483 
1484   Node* high_word_left = gasm_->Float64ExtractHighWord32(left);
1485   Node* high_word_right = gasm_->Float64ExtractHighWord32(right);
1486 
1487   Node* new_high_word = gasm_->Word32Or(
1488       gasm_->Word32And(high_word_left, gasm_->Int32Constant(0x7FFFFFFF)),
1489       gasm_->Word32And(high_word_right, gasm_->Int32Constant(0x80000000)));
1490 
1491   return gasm_->Float64InsertHighWord32(left, new_high_word);
1492 }
1493 
1494 namespace {
1495 
IntConvertType(wasm::WasmOpcode opcode)1496 MachineType IntConvertType(wasm::WasmOpcode opcode) {
1497   switch (opcode) {
1498     case wasm::kExprI32SConvertF32:
1499     case wasm::kExprI32SConvertF64:
1500     case wasm::kExprI32SConvertSatF32:
1501     case wasm::kExprI32SConvertSatF64:
1502       return MachineType::Int32();
1503     case wasm::kExprI32UConvertF32:
1504     case wasm::kExprI32UConvertF64:
1505     case wasm::kExprI32UConvertSatF32:
1506     case wasm::kExprI32UConvertSatF64:
1507       return MachineType::Uint32();
1508     case wasm::kExprI64SConvertF32:
1509     case wasm::kExprI64SConvertF64:
1510     case wasm::kExprI64SConvertSatF32:
1511     case wasm::kExprI64SConvertSatF64:
1512       return MachineType::Int64();
1513     case wasm::kExprI64UConvertF32:
1514     case wasm::kExprI64UConvertF64:
1515     case wasm::kExprI64UConvertSatF32:
1516     case wasm::kExprI64UConvertSatF64:
1517       return MachineType::Uint64();
1518     default:
1519       UNREACHABLE();
1520   }
1521 }
1522 
FloatConvertType(wasm::WasmOpcode opcode)1523 MachineType FloatConvertType(wasm::WasmOpcode opcode) {
1524   switch (opcode) {
1525     case wasm::kExprI32SConvertF32:
1526     case wasm::kExprI32UConvertF32:
1527     case wasm::kExprI32SConvertSatF32:
1528     case wasm::kExprI64SConvertF32:
1529     case wasm::kExprI64UConvertF32:
1530     case wasm::kExprI32UConvertSatF32:
1531     case wasm::kExprI64SConvertSatF32:
1532     case wasm::kExprI64UConvertSatF32:
1533       return MachineType::Float32();
1534     case wasm::kExprI32SConvertF64:
1535     case wasm::kExprI32UConvertF64:
1536     case wasm::kExprI64SConvertF64:
1537     case wasm::kExprI64UConvertF64:
1538     case wasm::kExprI32SConvertSatF64:
1539     case wasm::kExprI32UConvertSatF64:
1540     case wasm::kExprI64SConvertSatF64:
1541     case wasm::kExprI64UConvertSatF64:
1542       return MachineType::Float64();
1543     default:
1544       UNREACHABLE();
1545   }
1546 }
1547 
ConvertOp(WasmGraphBuilder * builder,wasm::WasmOpcode opcode)1548 const Operator* ConvertOp(WasmGraphBuilder* builder, wasm::WasmOpcode opcode) {
1549   switch (opcode) {
1550     case wasm::kExprI32SConvertF32:
1551       return builder->mcgraph()->machine()->TruncateFloat32ToInt32(
1552           TruncateKind::kSetOverflowToMin);
1553     case wasm::kExprI32SConvertSatF32:
1554       return builder->mcgraph()->machine()->TruncateFloat32ToInt32(
1555           TruncateKind::kArchitectureDefault);
1556     case wasm::kExprI32UConvertF32:
1557       return builder->mcgraph()->machine()->TruncateFloat32ToUint32(
1558           TruncateKind::kSetOverflowToMin);
1559     case wasm::kExprI32UConvertSatF32:
1560       return builder->mcgraph()->machine()->TruncateFloat32ToUint32(
1561           TruncateKind::kArchitectureDefault);
1562     case wasm::kExprI32SConvertF64:
1563     case wasm::kExprI32SConvertSatF64:
1564       return builder->mcgraph()->machine()->ChangeFloat64ToInt32();
1565     case wasm::kExprI32UConvertF64:
1566     case wasm::kExprI32UConvertSatF64:
1567       return builder->mcgraph()->machine()->TruncateFloat64ToUint32();
1568     case wasm::kExprI64SConvertF32:
1569     case wasm::kExprI64SConvertSatF32:
1570       return builder->mcgraph()->machine()->TryTruncateFloat32ToInt64();
1571     case wasm::kExprI64UConvertF32:
1572     case wasm::kExprI64UConvertSatF32:
1573       return builder->mcgraph()->machine()->TryTruncateFloat32ToUint64();
1574     case wasm::kExprI64SConvertF64:
1575     case wasm::kExprI64SConvertSatF64:
1576       return builder->mcgraph()->machine()->TryTruncateFloat64ToInt64();
1577     case wasm::kExprI64UConvertF64:
1578     case wasm::kExprI64UConvertSatF64:
1579       return builder->mcgraph()->machine()->TryTruncateFloat64ToUint64();
1580     default:
1581       UNREACHABLE();
1582   }
1583 }
1584 
ConvertBackOp(wasm::WasmOpcode opcode)1585 wasm::WasmOpcode ConvertBackOp(wasm::WasmOpcode opcode) {
1586   switch (opcode) {
1587     case wasm::kExprI32SConvertF32:
1588     case wasm::kExprI32SConvertSatF32:
1589       return wasm::kExprF32SConvertI32;
1590     case wasm::kExprI32UConvertF32:
1591     case wasm::kExprI32UConvertSatF32:
1592       return wasm::kExprF32UConvertI32;
1593     case wasm::kExprI32SConvertF64:
1594     case wasm::kExprI32SConvertSatF64:
1595       return wasm::kExprF64SConvertI32;
1596     case wasm::kExprI32UConvertF64:
1597     case wasm::kExprI32UConvertSatF64:
1598       return wasm::kExprF64UConvertI32;
1599     default:
1600       UNREACHABLE();
1601   }
1602 }
1603 
IsTrappingConvertOp(wasm::WasmOpcode opcode)1604 bool IsTrappingConvertOp(wasm::WasmOpcode opcode) {
1605   switch (opcode) {
1606     case wasm::kExprI32SConvertF32:
1607     case wasm::kExprI32UConvertF32:
1608     case wasm::kExprI32SConvertF64:
1609     case wasm::kExprI32UConvertF64:
1610     case wasm::kExprI64SConvertF32:
1611     case wasm::kExprI64UConvertF32:
1612     case wasm::kExprI64SConvertF64:
1613     case wasm::kExprI64UConvertF64:
1614       return true;
1615     case wasm::kExprI32SConvertSatF64:
1616     case wasm::kExprI32UConvertSatF64:
1617     case wasm::kExprI32SConvertSatF32:
1618     case wasm::kExprI32UConvertSatF32:
1619     case wasm::kExprI64SConvertSatF32:
1620     case wasm::kExprI64UConvertSatF32:
1621     case wasm::kExprI64SConvertSatF64:
1622     case wasm::kExprI64UConvertSatF64:
1623       return false;
1624     default:
1625       UNREACHABLE();
1626   }
1627 }
1628 
Zero(WasmGraphBuilder * builder,const MachineType & ty)1629 Node* Zero(WasmGraphBuilder* builder, const MachineType& ty) {
1630   switch (ty.representation()) {
1631     case MachineRepresentation::kWord32:
1632       return builder->Int32Constant(0);
1633     case MachineRepresentation::kWord64:
1634       return builder->Int64Constant(0);
1635     case MachineRepresentation::kFloat32:
1636       return builder->Float32Constant(0.0);
1637     case MachineRepresentation::kFloat64:
1638       return builder->Float64Constant(0.0);
1639     default:
1640       UNREACHABLE();
1641   }
1642 }
1643 
Min(WasmGraphBuilder * builder,const MachineType & ty)1644 Node* Min(WasmGraphBuilder* builder, const MachineType& ty) {
1645   switch (ty.semantic()) {
1646     case MachineSemantic::kInt32:
1647       return builder->Int32Constant(std::numeric_limits<int32_t>::min());
1648     case MachineSemantic::kUint32:
1649       return builder->Int32Constant(std::numeric_limits<uint32_t>::min());
1650     case MachineSemantic::kInt64:
1651       return builder->Int64Constant(std::numeric_limits<int64_t>::min());
1652     case MachineSemantic::kUint64:
1653       return builder->Int64Constant(std::numeric_limits<uint64_t>::min());
1654     default:
1655       UNREACHABLE();
1656   }
1657 }
1658 
Max(WasmGraphBuilder * builder,const MachineType & ty)1659 Node* Max(WasmGraphBuilder* builder, const MachineType& ty) {
1660   switch (ty.semantic()) {
1661     case MachineSemantic::kInt32:
1662       return builder->Int32Constant(std::numeric_limits<int32_t>::max());
1663     case MachineSemantic::kUint32:
1664       return builder->Int32Constant(std::numeric_limits<uint32_t>::max());
1665     case MachineSemantic::kInt64:
1666       return builder->Int64Constant(std::numeric_limits<int64_t>::max());
1667     case MachineSemantic::kUint64:
1668       return builder->Int64Constant(std::numeric_limits<uint64_t>::max());
1669     default:
1670       UNREACHABLE();
1671   }
1672 }
1673 
TruncOp(const MachineType & ty)1674 wasm::WasmOpcode TruncOp(const MachineType& ty) {
1675   switch (ty.representation()) {
1676     case MachineRepresentation::kFloat32:
1677       return wasm::kExprF32Trunc;
1678     case MachineRepresentation::kFloat64:
1679       return wasm::kExprF64Trunc;
1680     default:
1681       UNREACHABLE();
1682   }
1683 }
1684 
NeOp(const MachineType & ty)1685 wasm::WasmOpcode NeOp(const MachineType& ty) {
1686   switch (ty.representation()) {
1687     case MachineRepresentation::kFloat32:
1688       return wasm::kExprF32Ne;
1689     case MachineRepresentation::kFloat64:
1690       return wasm::kExprF64Ne;
1691     default:
1692       UNREACHABLE();
1693   }
1694 }
1695 
LtOp(const MachineType & ty)1696 wasm::WasmOpcode LtOp(const MachineType& ty) {
1697   switch (ty.representation()) {
1698     case MachineRepresentation::kFloat32:
1699       return wasm::kExprF32Lt;
1700     case MachineRepresentation::kFloat64:
1701       return wasm::kExprF64Lt;
1702     default:
1703       UNREACHABLE();
1704   }
1705 }
1706 
ConvertTrapTest(WasmGraphBuilder * builder,wasm::WasmOpcode opcode,const MachineType & int_ty,const MachineType & float_ty,Node * trunc,Node * converted_value)1707 Node* ConvertTrapTest(WasmGraphBuilder* builder, wasm::WasmOpcode opcode,
1708                       const MachineType& int_ty, const MachineType& float_ty,
1709                       Node* trunc, Node* converted_value) {
1710   if (int_ty.representation() == MachineRepresentation::kWord32) {
1711     Node* check = builder->Unop(ConvertBackOp(opcode), converted_value);
1712     return builder->Binop(NeOp(float_ty), trunc, check);
1713   }
1714   return builder->graph()->NewNode(builder->mcgraph()->common()->Projection(1),
1715                                    trunc, builder->graph()->start());
1716 }
1717 
ConvertSaturateTest(WasmGraphBuilder * builder,wasm::WasmOpcode opcode,const MachineType & int_ty,const MachineType & float_ty,Node * trunc,Node * converted_value)1718 Node* ConvertSaturateTest(WasmGraphBuilder* builder, wasm::WasmOpcode opcode,
1719                           const MachineType& int_ty,
1720                           const MachineType& float_ty, Node* trunc,
1721                           Node* converted_value) {
1722   Node* test = ConvertTrapTest(builder, opcode, int_ty, float_ty, trunc,
1723                                converted_value);
1724   if (int_ty.representation() == MachineRepresentation::kWord64) {
1725     test = builder->Binop(wasm::kExprI64Eq, test, builder->Int64Constant(0));
1726   }
1727   return test;
1728 }
1729 
1730 }  // namespace
1731 
BuildIntConvertFloat(Node * input,wasm::WasmCodePosition position,wasm::WasmOpcode opcode)1732 Node* WasmGraphBuilder::BuildIntConvertFloat(Node* input,
1733                                              wasm::WasmCodePosition position,
1734                                              wasm::WasmOpcode opcode) {
1735   const MachineType int_ty = IntConvertType(opcode);
1736   const MachineType float_ty = FloatConvertType(opcode);
1737   const Operator* conv_op = ConvertOp(this, opcode);
1738   Node* trunc = nullptr;
1739   Node* converted_value = nullptr;
1740   const bool is_int32 =
1741       int_ty.representation() == MachineRepresentation::kWord32;
1742   if (is_int32) {
1743     trunc = Unop(TruncOp(float_ty), input);
1744     converted_value = graph()->NewNode(conv_op, trunc);
1745   } else {
1746     trunc = graph()->NewNode(conv_op, input);
1747     converted_value = graph()->NewNode(mcgraph()->common()->Projection(0),
1748                                        trunc, graph()->start());
1749   }
1750   if (IsTrappingConvertOp(opcode)) {
1751     Node* test =
1752         ConvertTrapTest(this, opcode, int_ty, float_ty, trunc, converted_value);
1753     if (is_int32) {
1754       TrapIfTrue(wasm::kTrapFloatUnrepresentable, test, position);
1755     } else {
1756       ZeroCheck64(wasm::kTrapFloatUnrepresentable, test, position);
1757     }
1758     return converted_value;
1759   }
1760   if (mcgraph()->machine()->SatConversionIsSafe()) {
1761     return converted_value;
1762   }
1763   Node* test = ConvertSaturateTest(this, opcode, int_ty, float_ty, trunc,
1764                                    converted_value);
1765   Diamond tl_d(graph(), mcgraph()->common(), test, BranchHint::kFalse);
1766   tl_d.Chain(control());
1767   Node* nan_test = Binop(NeOp(float_ty), input, input);
1768   Diamond nan_d(graph(), mcgraph()->common(), nan_test, BranchHint::kFalse);
1769   nan_d.Nest(tl_d, true);
1770   Node* neg_test = Binop(LtOp(float_ty), input, Zero(this, float_ty));
1771   Diamond sat_d(graph(), mcgraph()->common(), neg_test, BranchHint::kNone);
1772   sat_d.Nest(nan_d, false);
1773   Node* sat_val =
1774       sat_d.Phi(int_ty.representation(), Min(this, int_ty), Max(this, int_ty));
1775   Node* nan_val =
1776       nan_d.Phi(int_ty.representation(), Zero(this, int_ty), sat_val);
1777   return tl_d.Phi(int_ty.representation(), nan_val, converted_value);
1778 }
1779 
BuildI32AsmjsSConvertF32(Node * input)1780 Node* WasmGraphBuilder::BuildI32AsmjsSConvertF32(Node* input) {
1781   MachineOperatorBuilder* m = mcgraph()->machine();
1782   // asm.js must use the wacky JS semantics.
1783   input = graph()->NewNode(m->ChangeFloat32ToFloat64(), input);
1784   return graph()->NewNode(m->TruncateFloat64ToWord32(), input);
1785 }
1786 
BuildI32AsmjsSConvertF64(Node * input)1787 Node* WasmGraphBuilder::BuildI32AsmjsSConvertF64(Node* input) {
1788   MachineOperatorBuilder* m = mcgraph()->machine();
1789   // asm.js must use the wacky JS semantics.
1790   return graph()->NewNode(m->TruncateFloat64ToWord32(), input);
1791 }
1792 
BuildI32AsmjsUConvertF32(Node * input)1793 Node* WasmGraphBuilder::BuildI32AsmjsUConvertF32(Node* input) {
1794   MachineOperatorBuilder* m = mcgraph()->machine();
1795   // asm.js must use the wacky JS semantics.
1796   input = graph()->NewNode(m->ChangeFloat32ToFloat64(), input);
1797   return graph()->NewNode(m->TruncateFloat64ToWord32(), input);
1798 }
1799 
BuildI32AsmjsUConvertF64(Node * input)1800 Node* WasmGraphBuilder::BuildI32AsmjsUConvertF64(Node* input) {
1801   MachineOperatorBuilder* m = mcgraph()->machine();
1802   // asm.js must use the wacky JS semantics.
1803   return graph()->NewNode(m->TruncateFloat64ToWord32(), input);
1804 }
1805 
BuildBitCountingCall(Node * input,ExternalReference ref,MachineRepresentation input_type)1806 Node* WasmGraphBuilder::BuildBitCountingCall(Node* input, ExternalReference ref,
1807                                              MachineRepresentation input_type) {
1808   Node* stack_slot_param = StoreArgsInStackSlot({{input_type, input}});
1809 
1810   MachineType sig_types[] = {MachineType::Int32(), MachineType::Pointer()};
1811   MachineSignature sig(1, 1, sig_types);
1812 
1813   Node* function = graph()->NewNode(mcgraph()->common()->ExternalConstant(ref));
1814 
1815   return BuildCCall(&sig, function, stack_slot_param);
1816 }
1817 
BuildI32Ctz(Node * input)1818 Node* WasmGraphBuilder::BuildI32Ctz(Node* input) {
1819   return BuildBitCountingCall(input, ExternalReference::wasm_word32_ctz(),
1820                               MachineRepresentation::kWord32);
1821 }
1822 
BuildI64Ctz(Node * input)1823 Node* WasmGraphBuilder::BuildI64Ctz(Node* input) {
1824   return Unop(wasm::kExprI64UConvertI32,
1825               BuildBitCountingCall(input, ExternalReference::wasm_word64_ctz(),
1826                                    MachineRepresentation::kWord64));
1827 }
1828 
BuildI32Popcnt(Node * input)1829 Node* WasmGraphBuilder::BuildI32Popcnt(Node* input) {
1830   return BuildBitCountingCall(input, ExternalReference::wasm_word32_popcnt(),
1831                               MachineRepresentation::kWord32);
1832 }
1833 
BuildI64Popcnt(Node * input)1834 Node* WasmGraphBuilder::BuildI64Popcnt(Node* input) {
1835   return Unop(
1836       wasm::kExprI64UConvertI32,
1837       BuildBitCountingCall(input, ExternalReference::wasm_word64_popcnt(),
1838                            MachineRepresentation::kWord64));
1839 }
1840 
BuildF32Trunc(Node * input)1841 Node* WasmGraphBuilder::BuildF32Trunc(Node* input) {
1842   MachineType type = MachineType::Float32();
1843   ExternalReference ref = ExternalReference::wasm_f32_trunc();
1844 
1845   return BuildCFuncInstruction(ref, type, input);
1846 }
1847 
BuildF32Floor(Node * input)1848 Node* WasmGraphBuilder::BuildF32Floor(Node* input) {
1849   MachineType type = MachineType::Float32();
1850   ExternalReference ref = ExternalReference::wasm_f32_floor();
1851   return BuildCFuncInstruction(ref, type, input);
1852 }
1853 
BuildF32Ceil(Node * input)1854 Node* WasmGraphBuilder::BuildF32Ceil(Node* input) {
1855   MachineType type = MachineType::Float32();
1856   ExternalReference ref = ExternalReference::wasm_f32_ceil();
1857   return BuildCFuncInstruction(ref, type, input);
1858 }
1859 
BuildF32NearestInt(Node * input)1860 Node* WasmGraphBuilder::BuildF32NearestInt(Node* input) {
1861   MachineType type = MachineType::Float32();
1862   ExternalReference ref = ExternalReference::wasm_f32_nearest_int();
1863   return BuildCFuncInstruction(ref, type, input);
1864 }
1865 
BuildF64Trunc(Node * input)1866 Node* WasmGraphBuilder::BuildF64Trunc(Node* input) {
1867   MachineType type = MachineType::Float64();
1868   ExternalReference ref = ExternalReference::wasm_f64_trunc();
1869   return BuildCFuncInstruction(ref, type, input);
1870 }
1871 
BuildF64Floor(Node * input)1872 Node* WasmGraphBuilder::BuildF64Floor(Node* input) {
1873   MachineType type = MachineType::Float64();
1874   ExternalReference ref = ExternalReference::wasm_f64_floor();
1875   return BuildCFuncInstruction(ref, type, input);
1876 }
1877 
BuildF64Ceil(Node * input)1878 Node* WasmGraphBuilder::BuildF64Ceil(Node* input) {
1879   MachineType type = MachineType::Float64();
1880   ExternalReference ref = ExternalReference::wasm_f64_ceil();
1881   return BuildCFuncInstruction(ref, type, input);
1882 }
1883 
BuildF64NearestInt(Node * input)1884 Node* WasmGraphBuilder::BuildF64NearestInt(Node* input) {
1885   MachineType type = MachineType::Float64();
1886   ExternalReference ref = ExternalReference::wasm_f64_nearest_int();
1887   return BuildCFuncInstruction(ref, type, input);
1888 }
1889 
BuildF64Acos(Node * input)1890 Node* WasmGraphBuilder::BuildF64Acos(Node* input) {
1891   MachineType type = MachineType::Float64();
1892   ExternalReference ref = ExternalReference::f64_acos_wrapper_function();
1893   return BuildCFuncInstruction(ref, type, input);
1894 }
1895 
BuildF64Asin(Node * input)1896 Node* WasmGraphBuilder::BuildF64Asin(Node* input) {
1897   MachineType type = MachineType::Float64();
1898   ExternalReference ref = ExternalReference::f64_asin_wrapper_function();
1899   return BuildCFuncInstruction(ref, type, input);
1900 }
1901 
BuildF64Pow(Node * left,Node * right)1902 Node* WasmGraphBuilder::BuildF64Pow(Node* left, Node* right) {
1903   MachineType type = MachineType::Float64();
1904   ExternalReference ref = ExternalReference::wasm_float64_pow();
1905   return BuildCFuncInstruction(ref, type, left, right);
1906 }
1907 
BuildF64Mod(Node * left,Node * right)1908 Node* WasmGraphBuilder::BuildF64Mod(Node* left, Node* right) {
1909   MachineType type = MachineType::Float64();
1910   ExternalReference ref = ExternalReference::f64_mod_wrapper_function();
1911   return BuildCFuncInstruction(ref, type, left, right);
1912 }
1913 
BuildCFuncInstruction(ExternalReference ref,MachineType type,Node * input0,Node * input1)1914 Node* WasmGraphBuilder::BuildCFuncInstruction(ExternalReference ref,
1915                                               MachineType type, Node* input0,
1916                                               Node* input1) {
1917   // We do truncation by calling a C function which calculates the result.
1918   // The input is passed to the C function as a byte buffer holding the two
1919   // input doubles. We reserve this byte buffer as a stack slot, store the
1920   // parameters in this buffer slots, pass a pointer to the buffer to the C
1921   // function, and after calling the C function we collect the return value from
1922   // the buffer.
1923   Node* stack_slot;
1924   if (input1) {
1925     stack_slot = StoreArgsInStackSlot(
1926         {{type.representation(), input0}, {type.representation(), input1}});
1927   } else {
1928     stack_slot = StoreArgsInStackSlot({{type.representation(), input0}});
1929   }
1930 
1931   MachineType sig_types[] = {MachineType::Pointer()};
1932   MachineSignature sig(0, 1, sig_types);
1933   Node* function = graph()->NewNode(mcgraph()->common()->ExternalConstant(ref));
1934   BuildCCall(&sig, function, stack_slot);
1935 
1936   return SetEffect(graph()->NewNode(mcgraph()->machine()->Load(type),
1937                                     stack_slot, mcgraph()->Int32Constant(0),
1938                                     effect(), control()));
1939 }
1940 
BuildF32SConvertI64(Node * input)1941 Node* WasmGraphBuilder::BuildF32SConvertI64(Node* input) {
1942   // TODO(titzer/bradnelson): Check handlng of asm.js case.
1943   return BuildIntToFloatConversionInstruction(
1944       input, ExternalReference::wasm_int64_to_float32(),
1945       MachineRepresentation::kWord64, MachineType::Float32());
1946 }
BuildF32UConvertI64(Node * input)1947 Node* WasmGraphBuilder::BuildF32UConvertI64(Node* input) {
1948   // TODO(titzer/bradnelson): Check handlng of asm.js case.
1949   return BuildIntToFloatConversionInstruction(
1950       input, ExternalReference::wasm_uint64_to_float32(),
1951       MachineRepresentation::kWord64, MachineType::Float32());
1952 }
BuildF64SConvertI64(Node * input)1953 Node* WasmGraphBuilder::BuildF64SConvertI64(Node* input) {
1954   return BuildIntToFloatConversionInstruction(
1955       input, ExternalReference::wasm_int64_to_float64(),
1956       MachineRepresentation::kWord64, MachineType::Float64());
1957 }
BuildF64UConvertI64(Node * input)1958 Node* WasmGraphBuilder::BuildF64UConvertI64(Node* input) {
1959   return BuildIntToFloatConversionInstruction(
1960       input, ExternalReference::wasm_uint64_to_float64(),
1961       MachineRepresentation::kWord64, MachineType::Float64());
1962 }
1963 
BuildIntToFloatConversionInstruction(Node * input,ExternalReference ref,MachineRepresentation parameter_representation,const MachineType result_type)1964 Node* WasmGraphBuilder::BuildIntToFloatConversionInstruction(
1965     Node* input, ExternalReference ref,
1966     MachineRepresentation parameter_representation,
1967     const MachineType result_type) {
1968   int stack_slot_size =
1969       std::max(ElementSizeInBytes(parameter_representation),
1970                ElementSizeInBytes(result_type.representation()));
1971   Node* stack_slot =
1972       graph()->NewNode(mcgraph()->machine()->StackSlot(stack_slot_size));
1973   const Operator* store_op = mcgraph()->machine()->Store(
1974       StoreRepresentation(parameter_representation, kNoWriteBarrier));
1975   SetEffect(graph()->NewNode(store_op, stack_slot, mcgraph()->Int32Constant(0),
1976                              input, effect(), control()));
1977   MachineType sig_types[] = {MachineType::Pointer()};
1978   MachineSignature sig(0, 1, sig_types);
1979   Node* function = graph()->NewNode(mcgraph()->common()->ExternalConstant(ref));
1980   BuildCCall(&sig, function, stack_slot);
1981   return SetEffect(graph()->NewNode(mcgraph()->machine()->Load(result_type),
1982                                     stack_slot, mcgraph()->Int32Constant(0),
1983                                     effect(), control()));
1984 }
1985 
1986 namespace {
1987 
convert_ccall_ref(WasmGraphBuilder * builder,wasm::WasmOpcode opcode)1988 ExternalReference convert_ccall_ref(WasmGraphBuilder* builder,
1989                                     wasm::WasmOpcode opcode) {
1990   switch (opcode) {
1991     case wasm::kExprI64SConvertF32:
1992     case wasm::kExprI64SConvertSatF32:
1993       return ExternalReference::wasm_float32_to_int64();
1994     case wasm::kExprI64UConvertF32:
1995     case wasm::kExprI64UConvertSatF32:
1996       return ExternalReference::wasm_float32_to_uint64();
1997     case wasm::kExprI64SConvertF64:
1998     case wasm::kExprI64SConvertSatF64:
1999       return ExternalReference::wasm_float64_to_int64();
2000     case wasm::kExprI64UConvertF64:
2001     case wasm::kExprI64UConvertSatF64:
2002       return ExternalReference::wasm_float64_to_uint64();
2003     default:
2004       UNREACHABLE();
2005   }
2006 }
2007 
2008 }  // namespace
2009 
BuildCcallConvertFloat(Node * input,wasm::WasmCodePosition position,wasm::WasmOpcode opcode)2010 Node* WasmGraphBuilder::BuildCcallConvertFloat(Node* input,
2011                                                wasm::WasmCodePosition position,
2012                                                wasm::WasmOpcode opcode) {
2013   const MachineType int_ty = IntConvertType(opcode);
2014   const MachineType float_ty = FloatConvertType(opcode);
2015   ExternalReference call_ref = convert_ccall_ref(this, opcode);
2016   int stack_slot_size = std::max(ElementSizeInBytes(int_ty.representation()),
2017                                  ElementSizeInBytes(float_ty.representation()));
2018   Node* stack_slot =
2019       graph()->NewNode(mcgraph()->machine()->StackSlot(stack_slot_size));
2020   const Operator* store_op = mcgraph()->machine()->Store(
2021       StoreRepresentation(float_ty.representation(), kNoWriteBarrier));
2022   SetEffect(graph()->NewNode(store_op, stack_slot, Int32Constant(0), input,
2023                              effect(), control()));
2024   MachineType sig_types[] = {MachineType::Int32(), MachineType::Pointer()};
2025   MachineSignature sig(1, 1, sig_types);
2026   Node* function =
2027       graph()->NewNode(mcgraph()->common()->ExternalConstant(call_ref));
2028   Node* overflow = BuildCCall(&sig, function, stack_slot);
2029   if (IsTrappingConvertOp(opcode)) {
2030     ZeroCheck32(wasm::kTrapFloatUnrepresentable, overflow, position);
2031     return SetEffect(graph()->NewNode(mcgraph()->machine()->Load(int_ty),
2032                                       stack_slot, Int32Constant(0), effect(),
2033                                       control()));
2034   }
2035   Node* test = Binop(wasm::kExprI32Eq, overflow, Int32Constant(0), position);
2036   Diamond tl_d(graph(), mcgraph()->common(), test, BranchHint::kFalse);
2037   tl_d.Chain(control());
2038   Node* nan_test = Binop(NeOp(float_ty), input, input);
2039   Diamond nan_d(graph(), mcgraph()->common(), nan_test, BranchHint::kFalse);
2040   nan_d.Nest(tl_d, true);
2041   Node* neg_test = Binop(LtOp(float_ty), input, Zero(this, float_ty));
2042   Diamond sat_d(graph(), mcgraph()->common(), neg_test, BranchHint::kNone);
2043   sat_d.Nest(nan_d, false);
2044   Node* sat_val =
2045       sat_d.Phi(int_ty.representation(), Min(this, int_ty), Max(this, int_ty));
2046   Node* load =
2047       SetEffect(graph()->NewNode(mcgraph()->machine()->Load(int_ty), stack_slot,
2048                                  Int32Constant(0), effect(), control()));
2049   Node* nan_val =
2050       nan_d.Phi(int_ty.representation(), Zero(this, int_ty), sat_val);
2051   return tl_d.Phi(int_ty.representation(), nan_val, load);
2052 }
2053 
MemoryGrow(Node * input)2054 Node* WasmGraphBuilder::MemoryGrow(Node* input) {
2055   needs_stack_check_ = true;
2056 
2057   WasmMemoryGrowDescriptor interface_descriptor;
2058   auto call_descriptor = Linkage::GetStubCallDescriptor(
2059       mcgraph()->zone(),                              // zone
2060       interface_descriptor,                           // descriptor
2061       interface_descriptor.GetStackParameterCount(),  // stack parameter count
2062       CallDescriptor::kNoFlags,                       // flags
2063       Operator::kNoProperties,                        // properties
2064       StubCallMode::kCallWasmRuntimeStub);            // stub call mode
2065   // A direct call to a wasm runtime stub defined in this module.
2066   // Just encode the stub index. This will be patched at relocation.
2067   Node* call_target = mcgraph()->RelocatableIntPtrConstant(
2068       wasm::WasmCode::kWasmMemoryGrow, RelocInfo::WASM_STUB_CALL);
2069   return SetEffectControl(
2070       graph()->NewNode(mcgraph()->common()->Call(call_descriptor), call_target,
2071                        input, effect(), control()));
2072 }
2073 
Throw(uint32_t exception_index,const wasm::WasmException * exception,const Vector<Node * > values,wasm::WasmCodePosition position)2074 Node* WasmGraphBuilder::Throw(uint32_t exception_index,
2075                               const wasm::WasmException* exception,
2076                               const Vector<Node*> values,
2077                               wasm::WasmCodePosition position) {
2078   needs_stack_check_ = true;
2079   uint32_t encoded_size = WasmExceptionPackage::GetEncodedSize(exception);
2080   Node* create_parameters[] = {
2081       LoadExceptionTagFromTable(exception_index),
2082       BuildChangeUint31ToSmi(mcgraph()->Uint32Constant(encoded_size))};
2083   Node* except_obj =
2084       BuildCallToRuntime(Runtime::kWasmThrowCreate, create_parameters,
2085                          arraysize(create_parameters));
2086   SetSourcePosition(except_obj, position);
2087   Node* values_array = CALL_BUILTIN(
2088       WasmGetOwnProperty, except_obj,
2089       LOAD_FULL_POINTER(BuildLoadIsolateRoot(),
2090                         IsolateData::root_slot_offset(
2091                             RootIndex::kwasm_exception_values_symbol)),
2092       LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer()));
2093   uint32_t index = 0;
2094   const wasm::WasmExceptionSig* sig = exception->sig;
2095   MachineOperatorBuilder* m = mcgraph()->machine();
2096   for (size_t i = 0; i < sig->parameter_count(); ++i) {
2097     Node* value = values[i];
2098     switch (sig->GetParam(i).kind()) {
2099       case wasm::ValueType::kF32:
2100         value = graph()->NewNode(m->BitcastFloat32ToInt32(), value);
2101         V8_FALLTHROUGH;
2102       case wasm::ValueType::kI32:
2103         BuildEncodeException32BitValue(values_array, &index, value);
2104         break;
2105       case wasm::ValueType::kF64:
2106         value = graph()->NewNode(m->BitcastFloat64ToInt64(), value);
2107         V8_FALLTHROUGH;
2108       case wasm::ValueType::kI64: {
2109         Node* upper32 = graph()->NewNode(
2110             m->TruncateInt64ToInt32(),
2111             Binop(wasm::kExprI64ShrU, value, Int64Constant(32)));
2112         BuildEncodeException32BitValue(values_array, &index, upper32);
2113         Node* lower32 = graph()->NewNode(m->TruncateInt64ToInt32(), value);
2114         BuildEncodeException32BitValue(values_array, &index, lower32);
2115         break;
2116       }
2117       case wasm::ValueType::kS128:
2118         BuildEncodeException32BitValue(
2119             values_array, &index,
2120             graph()->NewNode(m->I32x4ExtractLane(0), value));
2121         BuildEncodeException32BitValue(
2122             values_array, &index,
2123             graph()->NewNode(m->I32x4ExtractLane(1), value));
2124         BuildEncodeException32BitValue(
2125             values_array, &index,
2126             graph()->NewNode(m->I32x4ExtractLane(2), value));
2127         BuildEncodeException32BitValue(
2128             values_array, &index,
2129             graph()->NewNode(m->I32x4ExtractLane(3), value));
2130         break;
2131       case wasm::ValueType::kRef:
2132       case wasm::ValueType::kOptRef:
2133         STORE_FIXED_ARRAY_SLOT_ANY(values_array, index, value);
2134         ++index;
2135         break;
2136       case wasm::ValueType::kRtt:  // TODO(7748): Implement.
2137       case wasm::ValueType::kI8:
2138       case wasm::ValueType::kI16:
2139       case wasm::ValueType::kStmt:
2140       case wasm::ValueType::kBottom:
2141         UNREACHABLE();
2142     }
2143   }
2144   DCHECK_EQ(encoded_size, index);
2145   WasmThrowDescriptor interface_descriptor;
2146   auto call_descriptor = Linkage::GetStubCallDescriptor(
2147       mcgraph()->zone(), interface_descriptor,
2148       interface_descriptor.GetStackParameterCount(), CallDescriptor::kNoFlags,
2149       Operator::kNoProperties, StubCallMode::kCallWasmRuntimeStub);
2150   Node* call_target = mcgraph()->RelocatableIntPtrConstant(
2151       wasm::WasmCode::kWasmThrow, RelocInfo::WASM_STUB_CALL);
2152   Node* call = SetEffectControl(
2153       graph()->NewNode(mcgraph()->common()->Call(call_descriptor), call_target,
2154                        except_obj, effect(), control()));
2155   SetSourcePosition(call, position);
2156   return call;
2157 }
2158 
BuildEncodeException32BitValue(Node * values_array,uint32_t * index,Node * value)2159 void WasmGraphBuilder::BuildEncodeException32BitValue(Node* values_array,
2160                                                       uint32_t* index,
2161                                                       Node* value) {
2162   MachineOperatorBuilder* machine = mcgraph()->machine();
2163   Node* upper_halfword_as_smi = BuildChangeUint31ToSmi(
2164       graph()->NewNode(machine->Word32Shr(), value, Int32Constant(16)));
2165   STORE_FIXED_ARRAY_SLOT_SMI(values_array, *index, upper_halfword_as_smi);
2166   ++(*index);
2167   Node* lower_halfword_as_smi = BuildChangeUint31ToSmi(
2168       graph()->NewNode(machine->Word32And(), value, Int32Constant(0xFFFFu)));
2169   STORE_FIXED_ARRAY_SLOT_SMI(values_array, *index, lower_halfword_as_smi);
2170   ++(*index);
2171 }
2172 
BuildDecodeException32BitValue(Node * values_array,uint32_t * index)2173 Node* WasmGraphBuilder::BuildDecodeException32BitValue(Node* values_array,
2174                                                        uint32_t* index) {
2175   MachineOperatorBuilder* machine = mcgraph()->machine();
2176   Node* upper =
2177       BuildChangeSmiToInt32(LOAD_FIXED_ARRAY_SLOT_SMI(values_array, *index));
2178   (*index)++;
2179   upper = graph()->NewNode(machine->Word32Shl(), upper, Int32Constant(16));
2180   Node* lower =
2181       BuildChangeSmiToInt32(LOAD_FIXED_ARRAY_SLOT_SMI(values_array, *index));
2182   (*index)++;
2183   Node* value = graph()->NewNode(machine->Word32Or(), upper, lower);
2184   return value;
2185 }
2186 
BuildDecodeException64BitValue(Node * values_array,uint32_t * index)2187 Node* WasmGraphBuilder::BuildDecodeException64BitValue(Node* values_array,
2188                                                        uint32_t* index) {
2189   Node* upper = Binop(wasm::kExprI64Shl,
2190                       Unop(wasm::kExprI64UConvertI32,
2191                            BuildDecodeException32BitValue(values_array, index)),
2192                       Int64Constant(32));
2193   Node* lower = Unop(wasm::kExprI64UConvertI32,
2194                      BuildDecodeException32BitValue(values_array, index));
2195   return Binop(wasm::kExprI64Ior, upper, lower);
2196 }
2197 
Rethrow(Node * except_obj)2198 Node* WasmGraphBuilder::Rethrow(Node* except_obj) {
2199   // TODO(v8:8091): Currently the message of the original exception is not being
2200   // preserved when rethrown to the console. The pending message will need to be
2201   // saved when caught and restored here while being rethrown.
2202   WasmThrowDescriptor interface_descriptor;
2203   auto call_descriptor = Linkage::GetStubCallDescriptor(
2204       mcgraph()->zone(), interface_descriptor,
2205       interface_descriptor.GetStackParameterCount(), CallDescriptor::kNoFlags,
2206       Operator::kNoProperties, StubCallMode::kCallWasmRuntimeStub);
2207   Node* call_target = mcgraph()->RelocatableIntPtrConstant(
2208       wasm::WasmCode::kWasmRethrow, RelocInfo::WASM_STUB_CALL);
2209   return gasm_->Call(call_descriptor, call_target, except_obj);
2210 }
2211 
ExceptionTagEqual(Node * caught_tag,Node * expected_tag)2212 Node* WasmGraphBuilder::ExceptionTagEqual(Node* caught_tag,
2213                                           Node* expected_tag) {
2214   MachineOperatorBuilder* machine = mcgraph()->machine();
2215   return graph()->NewNode(machine->WordEqual(), caught_tag, expected_tag);
2216 }
2217 
LoadExceptionTagFromTable(uint32_t exception_index)2218 Node* WasmGraphBuilder::LoadExceptionTagFromTable(uint32_t exception_index) {
2219   Node* exceptions_table =
2220       LOAD_INSTANCE_FIELD(ExceptionsTable, MachineType::TaggedPointer());
2221   Node* tag = LOAD_FIXED_ARRAY_SLOT_PTR(exceptions_table, exception_index);
2222   return tag;
2223 }
2224 
GetExceptionTag(Node * except_obj,wasm::WasmCodePosition position)2225 Node* WasmGraphBuilder::GetExceptionTag(Node* except_obj,
2226                                         wasm::WasmCodePosition position) {
2227   TrapIfTrue(wasm::kTrapBrOnExnNull, gasm_->WordEqual(RefNull(), except_obj),
2228              position);
2229   return CALL_BUILTIN(
2230       WasmGetOwnProperty, except_obj,
2231       LOAD_FULL_POINTER(
2232           BuildLoadIsolateRoot(),
2233           IsolateData::root_slot_offset(RootIndex::kwasm_exception_tag_symbol)),
2234       LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer()));
2235 }
2236 
GetExceptionValues(Node * except_obj,const wasm::WasmException * exception,Vector<Node * > values)2237 Node* WasmGraphBuilder::GetExceptionValues(Node* except_obj,
2238                                            const wasm::WasmException* exception,
2239                                            Vector<Node*> values) {
2240   Node* values_array = CALL_BUILTIN(
2241       WasmGetOwnProperty, except_obj,
2242       LOAD_FULL_POINTER(BuildLoadIsolateRoot(),
2243                         IsolateData::root_slot_offset(
2244                             RootIndex::kwasm_exception_values_symbol)),
2245       LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer()));
2246   uint32_t index = 0;
2247   const wasm::WasmExceptionSig* sig = exception->sig;
2248   DCHECK_EQ(sig->parameter_count(), values.size());
2249   for (size_t i = 0; i < sig->parameter_count(); ++i) {
2250     Node* value;
2251     switch (sig->GetParam(i).kind()) {
2252       case wasm::ValueType::kI32:
2253         value = BuildDecodeException32BitValue(values_array, &index);
2254         break;
2255       case wasm::ValueType::kI64:
2256         value = BuildDecodeException64BitValue(values_array, &index);
2257         break;
2258       case wasm::ValueType::kF32: {
2259         value = Unop(wasm::kExprF32ReinterpretI32,
2260                      BuildDecodeException32BitValue(values_array, &index));
2261         break;
2262       }
2263       case wasm::ValueType::kF64: {
2264         value = Unop(wasm::kExprF64ReinterpretI64,
2265                      BuildDecodeException64BitValue(values_array, &index));
2266         break;
2267       }
2268       case wasm::ValueType::kS128:
2269         value = graph()->NewNode(
2270             mcgraph()->machine()->I32x4Splat(),
2271             BuildDecodeException32BitValue(values_array, &index));
2272         value = graph()->NewNode(
2273             mcgraph()->machine()->I32x4ReplaceLane(1), value,
2274             BuildDecodeException32BitValue(values_array, &index));
2275         value = graph()->NewNode(
2276             mcgraph()->machine()->I32x4ReplaceLane(2), value,
2277             BuildDecodeException32BitValue(values_array, &index));
2278         value = graph()->NewNode(
2279             mcgraph()->machine()->I32x4ReplaceLane(3), value,
2280             BuildDecodeException32BitValue(values_array, &index));
2281         break;
2282       case wasm::ValueType::kRef:
2283       case wasm::ValueType::kOptRef:
2284         value = LOAD_FIXED_ARRAY_SLOT_ANY(values_array, index);
2285         ++index;
2286         break;
2287       case wasm::ValueType::kRtt:  // TODO(7748): Implement.
2288       case wasm::ValueType::kI8:
2289       case wasm::ValueType::kI16:
2290       case wasm::ValueType::kStmt:
2291       case wasm::ValueType::kBottom:
2292         UNREACHABLE();
2293     }
2294     values[i] = value;
2295   }
2296   DCHECK_EQ(index, WasmExceptionPackage::GetEncodedSize(exception));
2297   return values_array;
2298 }
2299 
BuildI32DivS(Node * left,Node * right,wasm::WasmCodePosition position)2300 Node* WasmGraphBuilder::BuildI32DivS(Node* left, Node* right,
2301                                      wasm::WasmCodePosition position) {
2302   MachineOperatorBuilder* m = mcgraph()->machine();
2303   ZeroCheck32(wasm::kTrapDivByZero, right, position);
2304   Node* before = control();
2305   Node* denom_is_m1;
2306   Node* denom_is_not_m1;
2307   BranchExpectFalse(
2308       graph()->NewNode(m->Word32Equal(), right, mcgraph()->Int32Constant(-1)),
2309       &denom_is_m1, &denom_is_not_m1);
2310   SetControl(denom_is_m1);
2311   TrapIfEq32(wasm::kTrapDivUnrepresentable, left, kMinInt, position);
2312   if (control() != denom_is_m1) {
2313     SetControl(graph()->NewNode(mcgraph()->common()->Merge(2), denom_is_not_m1,
2314                                 control()));
2315   } else {
2316     SetControl(before);
2317   }
2318   return graph()->NewNode(m->Int32Div(), left, right, control());
2319 }
2320 
BuildI32RemS(Node * left,Node * right,wasm::WasmCodePosition position)2321 Node* WasmGraphBuilder::BuildI32RemS(Node* left, Node* right,
2322                                      wasm::WasmCodePosition position) {
2323   MachineOperatorBuilder* m = mcgraph()->machine();
2324 
2325   ZeroCheck32(wasm::kTrapRemByZero, right, position);
2326 
2327   Diamond d(
2328       graph(), mcgraph()->common(),
2329       graph()->NewNode(m->Word32Equal(), right, mcgraph()->Int32Constant(-1)),
2330       BranchHint::kFalse);
2331   d.Chain(control());
2332 
2333   return d.Phi(MachineRepresentation::kWord32, mcgraph()->Int32Constant(0),
2334                graph()->NewNode(m->Int32Mod(), left, right, d.if_false));
2335 }
2336 
BuildI32DivU(Node * left,Node * right,wasm::WasmCodePosition position)2337 Node* WasmGraphBuilder::BuildI32DivU(Node* left, Node* right,
2338                                      wasm::WasmCodePosition position) {
2339   MachineOperatorBuilder* m = mcgraph()->machine();
2340   return graph()->NewNode(m->Uint32Div(), left, right,
2341                           ZeroCheck32(wasm::kTrapDivByZero, right, position));
2342 }
2343 
BuildI32RemU(Node * left,Node * right,wasm::WasmCodePosition position)2344 Node* WasmGraphBuilder::BuildI32RemU(Node* left, Node* right,
2345                                      wasm::WasmCodePosition position) {
2346   MachineOperatorBuilder* m = mcgraph()->machine();
2347   return graph()->NewNode(m->Uint32Mod(), left, right,
2348                           ZeroCheck32(wasm::kTrapRemByZero, right, position));
2349 }
2350 
BuildI32AsmjsDivS(Node * left,Node * right)2351 Node* WasmGraphBuilder::BuildI32AsmjsDivS(Node* left, Node* right) {
2352   MachineOperatorBuilder* m = mcgraph()->machine();
2353 
2354   Int32Matcher mr(right);
2355   if (mr.HasResolvedValue()) {
2356     if (mr.ResolvedValue() == 0) {
2357       return mcgraph()->Int32Constant(0);
2358     } else if (mr.ResolvedValue() == -1) {
2359       // The result is the negation of the left input.
2360       return graph()->NewNode(m->Int32Sub(), mcgraph()->Int32Constant(0), left);
2361     }
2362     return graph()->NewNode(m->Int32Div(), left, right, control());
2363   }
2364 
2365   // asm.js semantics return 0 on divide or mod by zero.
2366   if (m->Int32DivIsSafe()) {
2367     // The hardware instruction does the right thing (e.g. arm).
2368     return graph()->NewNode(m->Int32Div(), left, right, graph()->start());
2369   }
2370 
2371   // Check denominator for zero.
2372   Diamond z(
2373       graph(), mcgraph()->common(),
2374       graph()->NewNode(m->Word32Equal(), right, mcgraph()->Int32Constant(0)),
2375       BranchHint::kFalse);
2376 
2377   // Check numerator for -1. (avoid minint / -1 case).
2378   Diamond n(
2379       graph(), mcgraph()->common(),
2380       graph()->NewNode(m->Word32Equal(), right, mcgraph()->Int32Constant(-1)),
2381       BranchHint::kFalse);
2382 
2383   Node* div = graph()->NewNode(m->Int32Div(), left, right, z.if_false);
2384   Node* neg =
2385       graph()->NewNode(m->Int32Sub(), mcgraph()->Int32Constant(0), left);
2386 
2387   return n.Phi(
2388       MachineRepresentation::kWord32, neg,
2389       z.Phi(MachineRepresentation::kWord32, mcgraph()->Int32Constant(0), div));
2390 }
2391 
BuildI32AsmjsRemS(Node * left,Node * right)2392 Node* WasmGraphBuilder::BuildI32AsmjsRemS(Node* left, Node* right) {
2393   CommonOperatorBuilder* c = mcgraph()->common();
2394   MachineOperatorBuilder* m = mcgraph()->machine();
2395   Node* const zero = mcgraph()->Int32Constant(0);
2396 
2397   Int32Matcher mr(right);
2398   if (mr.HasResolvedValue()) {
2399     if (mr.ResolvedValue() == 0 || mr.ResolvedValue() == -1) {
2400       return zero;
2401     }
2402     return graph()->NewNode(m->Int32Mod(), left, right, control());
2403   }
2404 
2405   // General case for signed integer modulus, with optimization for (unknown)
2406   // power of 2 right hand side.
2407   //
2408   //   if 0 < right then
2409   //     msk = right - 1
2410   //     if right & msk != 0 then
2411   //       left % right
2412   //     else
2413   //       if left < 0 then
2414   //         -(-left & msk)
2415   //       else
2416   //         left & msk
2417   //   else
2418   //     if right < -1 then
2419   //       left % right
2420   //     else
2421   //       zero
2422   //
2423   // Note: We do not use the Diamond helper class here, because it really hurts
2424   // readability with nested diamonds.
2425   Node* const minus_one = mcgraph()->Int32Constant(-1);
2426 
2427   const Operator* const merge_op = c->Merge(2);
2428   const Operator* const phi_op = c->Phi(MachineRepresentation::kWord32, 2);
2429 
2430   Node* check0 = graph()->NewNode(m->Int32LessThan(), zero, right);
2431   Node* branch0 =
2432       graph()->NewNode(c->Branch(BranchHint::kTrue), check0, graph()->start());
2433 
2434   Node* if_true0 = graph()->NewNode(c->IfTrue(), branch0);
2435   Node* true0;
2436   {
2437     Node* msk = graph()->NewNode(m->Int32Add(), right, minus_one);
2438 
2439     Node* check1 = graph()->NewNode(m->Word32And(), right, msk);
2440     Node* branch1 = graph()->NewNode(c->Branch(), check1, if_true0);
2441 
2442     Node* if_true1 = graph()->NewNode(c->IfTrue(), branch1);
2443     Node* true1 = graph()->NewNode(m->Int32Mod(), left, right, if_true1);
2444 
2445     Node* if_false1 = graph()->NewNode(c->IfFalse(), branch1);
2446     Node* false1;
2447     {
2448       Node* check2 = graph()->NewNode(m->Int32LessThan(), left, zero);
2449       Node* branch2 =
2450           graph()->NewNode(c->Branch(BranchHint::kFalse), check2, if_false1);
2451 
2452       Node* if_true2 = graph()->NewNode(c->IfTrue(), branch2);
2453       Node* true2 = graph()->NewNode(
2454           m->Int32Sub(), zero,
2455           graph()->NewNode(m->Word32And(),
2456                            graph()->NewNode(m->Int32Sub(), zero, left), msk));
2457 
2458       Node* if_false2 = graph()->NewNode(c->IfFalse(), branch2);
2459       Node* false2 = graph()->NewNode(m->Word32And(), left, msk);
2460 
2461       if_false1 = graph()->NewNode(merge_op, if_true2, if_false2);
2462       false1 = graph()->NewNode(phi_op, true2, false2, if_false1);
2463     }
2464 
2465     if_true0 = graph()->NewNode(merge_op, if_true1, if_false1);
2466     true0 = graph()->NewNode(phi_op, true1, false1, if_true0);
2467   }
2468 
2469   Node* if_false0 = graph()->NewNode(c->IfFalse(), branch0);
2470   Node* false0;
2471   {
2472     Node* check1 = graph()->NewNode(m->Int32LessThan(), right, minus_one);
2473     Node* branch1 =
2474         graph()->NewNode(c->Branch(BranchHint::kTrue), check1, if_false0);
2475 
2476     Node* if_true1 = graph()->NewNode(c->IfTrue(), branch1);
2477     Node* true1 = graph()->NewNode(m->Int32Mod(), left, right, if_true1);
2478 
2479     Node* if_false1 = graph()->NewNode(c->IfFalse(), branch1);
2480     Node* false1 = zero;
2481 
2482     if_false0 = graph()->NewNode(merge_op, if_true1, if_false1);
2483     false0 = graph()->NewNode(phi_op, true1, false1, if_false0);
2484   }
2485 
2486   Node* merge0 = graph()->NewNode(merge_op, if_true0, if_false0);
2487   return graph()->NewNode(phi_op, true0, false0, merge0);
2488 }
2489 
BuildI32AsmjsDivU(Node * left,Node * right)2490 Node* WasmGraphBuilder::BuildI32AsmjsDivU(Node* left, Node* right) {
2491   MachineOperatorBuilder* m = mcgraph()->machine();
2492   // asm.js semantics return 0 on divide or mod by zero.
2493   if (m->Uint32DivIsSafe()) {
2494     // The hardware instruction does the right thing (e.g. arm).
2495     return graph()->NewNode(m->Uint32Div(), left, right, graph()->start());
2496   }
2497 
2498   // Explicit check for x % 0.
2499   Diamond z(
2500       graph(), mcgraph()->common(),
2501       graph()->NewNode(m->Word32Equal(), right, mcgraph()->Int32Constant(0)),
2502       BranchHint::kFalse);
2503 
2504   return z.Phi(MachineRepresentation::kWord32, mcgraph()->Int32Constant(0),
2505                graph()->NewNode(mcgraph()->machine()->Uint32Div(), left, right,
2506                                 z.if_false));
2507 }
2508 
BuildI32AsmjsRemU(Node * left,Node * right)2509 Node* WasmGraphBuilder::BuildI32AsmjsRemU(Node* left, Node* right) {
2510   MachineOperatorBuilder* m = mcgraph()->machine();
2511   // asm.js semantics return 0 on divide or mod by zero.
2512   // Explicit check for x % 0.
2513   Diamond z(
2514       graph(), mcgraph()->common(),
2515       graph()->NewNode(m->Word32Equal(), right, mcgraph()->Int32Constant(0)),
2516       BranchHint::kFalse);
2517 
2518   Node* rem = graph()->NewNode(mcgraph()->machine()->Uint32Mod(), left, right,
2519                                z.if_false);
2520   return z.Phi(MachineRepresentation::kWord32, mcgraph()->Int32Constant(0),
2521                rem);
2522 }
2523 
BuildI64DivS(Node * left,Node * right,wasm::WasmCodePosition position)2524 Node* WasmGraphBuilder::BuildI64DivS(Node* left, Node* right,
2525                                      wasm::WasmCodePosition position) {
2526   if (mcgraph()->machine()->Is32()) {
2527     return BuildDiv64Call(left, right, ExternalReference::wasm_int64_div(),
2528                           MachineType::Int64(), wasm::kTrapDivByZero, position);
2529   }
2530   ZeroCheck64(wasm::kTrapDivByZero, right, position);
2531   Node* before = control();
2532   Node* denom_is_m1;
2533   Node* denom_is_not_m1;
2534   BranchExpectFalse(graph()->NewNode(mcgraph()->machine()->Word64Equal(), right,
2535                                      mcgraph()->Int64Constant(-1)),
2536                     &denom_is_m1, &denom_is_not_m1);
2537   SetControl(denom_is_m1);
2538   TrapIfEq64(wasm::kTrapDivUnrepresentable, left,
2539              std::numeric_limits<int64_t>::min(), position);
2540   if (control() != denom_is_m1) {
2541     SetControl(graph()->NewNode(mcgraph()->common()->Merge(2), denom_is_not_m1,
2542                                 control()));
2543   } else {
2544     SetControl(before);
2545   }
2546   return graph()->NewNode(mcgraph()->machine()->Int64Div(), left, right,
2547                           control());
2548 }
2549 
BuildI64RemS(Node * left,Node * right,wasm::WasmCodePosition position)2550 Node* WasmGraphBuilder::BuildI64RemS(Node* left, Node* right,
2551                                      wasm::WasmCodePosition position) {
2552   if (mcgraph()->machine()->Is32()) {
2553     return BuildDiv64Call(left, right, ExternalReference::wasm_int64_mod(),
2554                           MachineType::Int64(), wasm::kTrapRemByZero, position);
2555   }
2556   ZeroCheck64(wasm::kTrapRemByZero, right, position);
2557   Diamond d(mcgraph()->graph(), mcgraph()->common(),
2558             graph()->NewNode(mcgraph()->machine()->Word64Equal(), right,
2559                              mcgraph()->Int64Constant(-1)));
2560 
2561   d.Chain(control());
2562 
2563   Node* rem = graph()->NewNode(mcgraph()->machine()->Int64Mod(), left, right,
2564                                d.if_false);
2565 
2566   return d.Phi(MachineRepresentation::kWord64, mcgraph()->Int64Constant(0),
2567                rem);
2568 }
2569 
BuildI64DivU(Node * left,Node * right,wasm::WasmCodePosition position)2570 Node* WasmGraphBuilder::BuildI64DivU(Node* left, Node* right,
2571                                      wasm::WasmCodePosition position) {
2572   if (mcgraph()->machine()->Is32()) {
2573     return BuildDiv64Call(left, right, ExternalReference::wasm_uint64_div(),
2574                           MachineType::Int64(), wasm::kTrapDivByZero, position);
2575   }
2576   return graph()->NewNode(mcgraph()->machine()->Uint64Div(), left, right,
2577                           ZeroCheck64(wasm::kTrapDivByZero, right, position));
2578 }
BuildI64RemU(Node * left,Node * right,wasm::WasmCodePosition position)2579 Node* WasmGraphBuilder::BuildI64RemU(Node* left, Node* right,
2580                                      wasm::WasmCodePosition position) {
2581   if (mcgraph()->machine()->Is32()) {
2582     return BuildDiv64Call(left, right, ExternalReference::wasm_uint64_mod(),
2583                           MachineType::Int64(), wasm::kTrapRemByZero, position);
2584   }
2585   return graph()->NewNode(mcgraph()->machine()->Uint64Mod(), left, right,
2586                           ZeroCheck64(wasm::kTrapRemByZero, right, position));
2587 }
2588 
GetBuiltinPointerTarget(int builtin_id)2589 Node* WasmGraphBuilder::GetBuiltinPointerTarget(int builtin_id) {
2590   static_assert(std::is_same<Smi, BuiltinPtr>(), "BuiltinPtr must be Smi");
2591   return graph()->NewNode(mcgraph()->common()->NumberConstant(builtin_id));
2592 }
2593 
BuildDiv64Call(Node * left,Node * right,ExternalReference ref,MachineType result_type,wasm::TrapReason trap_zero,wasm::WasmCodePosition position)2594 Node* WasmGraphBuilder::BuildDiv64Call(Node* left, Node* right,
2595                                        ExternalReference ref,
2596                                        MachineType result_type,
2597                                        wasm::TrapReason trap_zero,
2598                                        wasm::WasmCodePosition position) {
2599   Node* stack_slot =
2600       StoreArgsInStackSlot({{MachineRepresentation::kWord64, left},
2601                             {MachineRepresentation::kWord64, right}});
2602 
2603   MachineType sig_types[] = {MachineType::Int32(), MachineType::Pointer()};
2604   MachineSignature sig(1, 1, sig_types);
2605 
2606   Node* function = graph()->NewNode(mcgraph()->common()->ExternalConstant(ref));
2607   Node* call = BuildCCall(&sig, function, stack_slot);
2608 
2609   ZeroCheck32(trap_zero, call, position);
2610   TrapIfEq32(wasm::kTrapDivUnrepresentable, call, -1, position);
2611   return SetEffect(graph()->NewNode(mcgraph()->machine()->Load(result_type),
2612                                     stack_slot, mcgraph()->Int32Constant(0),
2613                                     effect(), control()));
2614 }
2615 
2616 template <typename... Args>
BuildCCall(MachineSignature * sig,Node * function,Args...args)2617 Node* WasmGraphBuilder::BuildCCall(MachineSignature* sig, Node* function,
2618                                    Args... args) {
2619   DCHECK_LE(sig->return_count(), 1);
2620   DCHECK_EQ(sizeof...(args), sig->parameter_count());
2621   Node* const call_args[] = {function, args..., effect(), control()};
2622 
2623   auto call_descriptor =
2624       Linkage::GetSimplifiedCDescriptor(mcgraph()->zone(), sig);
2625 
2626   const Operator* op = mcgraph()->common()->Call(call_descriptor);
2627   return SetEffect(graph()->NewNode(op, arraysize(call_args), call_args));
2628 }
2629 
BuildCallNode(const wasm::FunctionSig * sig,Vector<Node * > args,wasm::WasmCodePosition position,Node * instance_node,const Operator * op)2630 Node* WasmGraphBuilder::BuildCallNode(const wasm::FunctionSig* sig,
2631                                       Vector<Node*> args,
2632                                       wasm::WasmCodePosition position,
2633                                       Node* instance_node, const Operator* op) {
2634   if (instance_node == nullptr) {
2635     DCHECK_NOT_NULL(instance_node_);
2636     instance_node = instance_node_.get();
2637   }
2638   needs_stack_check_ = true;
2639   const size_t params = sig->parameter_count();
2640   const size_t extra = 3;  // instance_node, effect, and control.
2641   const size_t count = 1 + params + extra;
2642 
2643   // Reallocate the buffer to make space for extra inputs.
2644   base::SmallVector<Node*, 16 + extra> inputs(count);
2645   DCHECK_EQ(1 + params, args.size());
2646 
2647   // Make room for the instance_node parameter at index 1, just after code.
2648   inputs[0] = args[0];  // code
2649   inputs[1] = instance_node;
2650   if (params > 0) memcpy(&inputs[2], &args[1], params * sizeof(Node*));
2651 
2652   // Add effect and control inputs.
2653   inputs[params + 2] = effect();
2654   inputs[params + 3] = control();
2655 
2656   Node* call = graph()->NewNode(op, static_cast<int>(count), inputs.begin());
2657   // Return calls have no effect output. Other calls are the new effect node.
2658   if (op->EffectOutputCount() > 0) SetEffect(call);
2659   DCHECK(position == wasm::kNoCodePosition || position > 0);
2660   if (position > 0) SetSourcePosition(call, position);
2661 
2662   return call;
2663 }
2664 
BuildWasmCall(const wasm::FunctionSig * sig,Vector<Node * > args,Vector<Node * > rets,wasm::WasmCodePosition position,Node * instance_node,UseRetpoline use_retpoline)2665 Node* WasmGraphBuilder::BuildWasmCall(const wasm::FunctionSig* sig,
2666                                       Vector<Node*> args, Vector<Node*> rets,
2667                                       wasm::WasmCodePosition position,
2668                                       Node* instance_node,
2669                                       UseRetpoline use_retpoline) {
2670   CallDescriptor* call_descriptor =
2671       GetWasmCallDescriptor(mcgraph()->zone(), sig, use_retpoline);
2672   const Operator* op = mcgraph()->common()->Call(call_descriptor);
2673   Node* call = BuildCallNode(sig, args, position, instance_node, op);
2674 
2675   size_t ret_count = sig->return_count();
2676   if (ret_count == 0) return call;  // No return value.
2677 
2678   DCHECK_EQ(ret_count, rets.size());
2679   if (ret_count == 1) {
2680     // Only a single return value.
2681     rets[0] = call;
2682   } else {
2683     // Create projections for all return values.
2684     for (size_t i = 0; i < ret_count; i++) {
2685       rets[i] = graph()->NewNode(mcgraph()->common()->Projection(i), call,
2686                                  graph()->start());
2687     }
2688   }
2689   return call;
2690 }
2691 
BuildWasmReturnCall(const wasm::FunctionSig * sig,Vector<Node * > args,wasm::WasmCodePosition position,Node * instance_node,UseRetpoline use_retpoline)2692 Node* WasmGraphBuilder::BuildWasmReturnCall(const wasm::FunctionSig* sig,
2693                                             Vector<Node*> args,
2694                                             wasm::WasmCodePosition position,
2695                                             Node* instance_node,
2696                                             UseRetpoline use_retpoline) {
2697   CallDescriptor* call_descriptor =
2698       GetWasmCallDescriptor(mcgraph()->zone(), sig, use_retpoline);
2699   const Operator* op = mcgraph()->common()->TailCall(call_descriptor);
2700   Node* call = BuildCallNode(sig, args, position, instance_node, op);
2701 
2702   MergeControlToEnd(mcgraph(), call);
2703 
2704   return call;
2705 }
2706 
BuildImportCall(const wasm::FunctionSig * sig,Vector<Node * > args,Vector<Node * > rets,wasm::WasmCodePosition position,int func_index,IsReturnCall continuation)2707 Node* WasmGraphBuilder::BuildImportCall(const wasm::FunctionSig* sig,
2708                                         Vector<Node*> args, Vector<Node*> rets,
2709                                         wasm::WasmCodePosition position,
2710                                         int func_index,
2711                                         IsReturnCall continuation) {
2712   // Load the imported function refs array from the instance.
2713   Node* imported_function_refs =
2714       LOAD_INSTANCE_FIELD(ImportedFunctionRefs, MachineType::TaggedPointer());
2715   Node* ref_node =
2716       LOAD_FIXED_ARRAY_SLOT_PTR(imported_function_refs, func_index);
2717 
2718   // Load the target from the imported_targets array at a known offset.
2719   Node* imported_targets =
2720       LOAD_INSTANCE_FIELD(ImportedFunctionTargets, MachineType::Pointer());
2721   Node* target_node = SetEffect(graph()->NewNode(
2722       mcgraph()->machine()->Load(MachineType::Pointer()), imported_targets,
2723       mcgraph()->Int32Constant(func_index * kSystemPointerSize), effect(),
2724       control()));
2725   args[0] = target_node;
2726   const UseRetpoline use_retpoline =
2727       untrusted_code_mitigations_ ? kRetpoline : kNoRetpoline;
2728 
2729   switch (continuation) {
2730     case kCallContinues:
2731       return BuildWasmCall(sig, args, rets, position, ref_node, use_retpoline);
2732     case kReturnCall:
2733       DCHECK(rets.empty());
2734       return BuildWasmReturnCall(sig, args, position, ref_node, use_retpoline);
2735   }
2736 }
2737 
BuildImportCall(const wasm::FunctionSig * sig,Vector<Node * > args,Vector<Node * > rets,wasm::WasmCodePosition position,Node * func_index,IsReturnCall continuation)2738 Node* WasmGraphBuilder::BuildImportCall(const wasm::FunctionSig* sig,
2739                                         Vector<Node*> args, Vector<Node*> rets,
2740                                         wasm::WasmCodePosition position,
2741                                         Node* func_index,
2742                                         IsReturnCall continuation) {
2743   // Load the imported function refs array from the instance.
2744   Node* imported_function_refs =
2745       LOAD_INSTANCE_FIELD(ImportedFunctionRefs, MachineType::TaggedPointer());
2746   // Access fixed array at {header_size - tag + func_index * kTaggedSize}.
2747   Node* imported_instances_data = graph()->NewNode(
2748       mcgraph()->machine()->IntAdd(), imported_function_refs,
2749       mcgraph()->IntPtrConstant(
2750           wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(0)));
2751   Node* func_index_times_tagged_size = graph()->NewNode(
2752       mcgraph()->machine()->IntMul(), Uint32ToUintptr(func_index),
2753       mcgraph()->Int32Constant(kTaggedSize));
2754   Node* ref_node =
2755       gasm_->Load(MachineType::TaggedPointer(), imported_instances_data,
2756                   func_index_times_tagged_size);
2757 
2758   // Load the target from the imported_targets array at the offset of
2759   // {func_index}.
2760   Node* func_index_times_pointersize;
2761   if (kSystemPointerSize == kTaggedSize) {
2762     func_index_times_pointersize = func_index_times_tagged_size;
2763 
2764   } else {
2765     DCHECK_EQ(kSystemPointerSize, kTaggedSize + kTaggedSize);
2766     func_index_times_pointersize = graph()->NewNode(
2767         mcgraph()->machine()->Int32Add(), func_index_times_tagged_size,
2768         func_index_times_tagged_size);
2769   }
2770   Node* imported_targets =
2771       LOAD_INSTANCE_FIELD(ImportedFunctionTargets, MachineType::Pointer());
2772   Node* target_node = SetEffect(graph()->NewNode(
2773       mcgraph()->machine()->Load(MachineType::Pointer()), imported_targets,
2774       func_index_times_pointersize, effect(), control()));
2775   args[0] = target_node;
2776   const UseRetpoline use_retpoline =
2777       untrusted_code_mitigations_ ? kRetpoline : kNoRetpoline;
2778 
2779   switch (continuation) {
2780     case kCallContinues:
2781       return BuildWasmCall(sig, args, rets, position, ref_node, use_retpoline);
2782     case kReturnCall:
2783       DCHECK(rets.empty());
2784       return BuildWasmReturnCall(sig, args, position, ref_node, use_retpoline);
2785   }
2786 }
2787 
CallDirect(uint32_t index,Vector<Node * > args,Vector<Node * > rets,wasm::WasmCodePosition position)2788 Node* WasmGraphBuilder::CallDirect(uint32_t index, Vector<Node*> args,
2789                                    Vector<Node*> rets,
2790                                    wasm::WasmCodePosition position) {
2791   DCHECK_NULL(args[0]);
2792   const wasm::FunctionSig* sig = env_->module->functions[index].sig;
2793 
2794   if (env_ && index < env_->module->num_imported_functions) {
2795     // Call to an imported function.
2796     return BuildImportCall(sig, args, rets, position, index, kCallContinues);
2797   }
2798 
2799   // A direct call to a wasm function defined in this module.
2800   // Just encode the function index. This will be patched at instantiation.
2801   Address code = static_cast<Address>(index);
2802   args[0] = mcgraph()->RelocatableIntPtrConstant(code, RelocInfo::WASM_CALL);
2803 
2804   return BuildWasmCall(sig, args, rets, position, nullptr, kNoRetpoline);
2805 }
2806 
CallIndirect(uint32_t table_index,uint32_t sig_index,Vector<Node * > args,Vector<Node * > rets,wasm::WasmCodePosition position)2807 Node* WasmGraphBuilder::CallIndirect(uint32_t table_index, uint32_t sig_index,
2808                                      Vector<Node*> args, Vector<Node*> rets,
2809                                      wasm::WasmCodePosition position) {
2810   return BuildIndirectCall(table_index, sig_index, args, rets, position,
2811                            kCallContinues);
2812 }
2813 
LoadIndirectFunctionTable(uint32_t table_index,Node ** ift_size,Node ** ift_sig_ids,Node ** ift_targets,Node ** ift_instances)2814 void WasmGraphBuilder::LoadIndirectFunctionTable(uint32_t table_index,
2815                                                  Node** ift_size,
2816                                                  Node** ift_sig_ids,
2817                                                  Node** ift_targets,
2818                                                  Node** ift_instances) {
2819   if (table_index == 0) {
2820     *ift_size =
2821         LOAD_INSTANCE_FIELD(IndirectFunctionTableSize, MachineType::Uint32());
2822     *ift_sig_ids = LOAD_INSTANCE_FIELD(IndirectFunctionTableSigIds,
2823                                        MachineType::Pointer());
2824     *ift_targets = LOAD_INSTANCE_FIELD(IndirectFunctionTableTargets,
2825                                        MachineType::Pointer());
2826     *ift_instances = LOAD_INSTANCE_FIELD(IndirectFunctionTableRefs,
2827                                          MachineType::TaggedPointer());
2828     return;
2829   }
2830 
2831   Node* ift_tables =
2832       LOAD_INSTANCE_FIELD(IndirectFunctionTables, MachineType::TaggedPointer());
2833   Node* ift_table = LOAD_FIXED_ARRAY_SLOT_ANY(ift_tables, table_index);
2834 
2835   *ift_size = gasm_->Load(
2836       MachineType::Int32(), ift_table,
2837       wasm::ObjectAccess::ToTagged(WasmIndirectFunctionTable::kSizeOffset));
2838 
2839   *ift_sig_ids = gasm_->Load(
2840       MachineType::Pointer(), ift_table,
2841       wasm::ObjectAccess::ToTagged(WasmIndirectFunctionTable::kSigIdsOffset));
2842 
2843   *ift_targets = gasm_->Load(
2844       MachineType::Pointer(), ift_table,
2845       wasm::ObjectAccess::ToTagged(WasmIndirectFunctionTable::kTargetsOffset));
2846 
2847   *ift_instances = gasm_->Load(
2848       MachineType::TaggedPointer(), ift_table,
2849       wasm::ObjectAccess::ToTagged(WasmIndirectFunctionTable::kRefsOffset));
2850 }
2851 
BuildIndirectCall(uint32_t table_index,uint32_t sig_index,Vector<Node * > args,Vector<Node * > rets,wasm::WasmCodePosition position,IsReturnCall continuation)2852 Node* WasmGraphBuilder::BuildIndirectCall(uint32_t table_index,
2853                                           uint32_t sig_index,
2854                                           Vector<Node*> args,
2855                                           Vector<Node*> rets,
2856                                           wasm::WasmCodePosition position,
2857                                           IsReturnCall continuation) {
2858   DCHECK_NOT_NULL(args[0]);
2859   DCHECK_NOT_NULL(env_);
2860 
2861   // First we have to load the table.
2862   Node* ift_size;
2863   Node* ift_sig_ids;
2864   Node* ift_targets;
2865   Node* ift_instances;
2866   LoadIndirectFunctionTable(table_index, &ift_size, &ift_sig_ids, &ift_targets,
2867                             &ift_instances);
2868 
2869   const wasm::FunctionSig* sig = env_->module->signature(sig_index);
2870 
2871   MachineOperatorBuilder* machine = mcgraph()->machine();
2872   Node* key = args[0];
2873 
2874   // Bounds check against the table size.
2875   Node* in_bounds = graph()->NewNode(machine->Uint32LessThan(), key, ift_size);
2876   TrapIfFalse(wasm::kTrapTableOutOfBounds, in_bounds, position);
2877 
2878   // Mask the key to prevent SSCA.
2879   if (untrusted_code_mitigations_) {
2880     // mask = ((key - size) & ~key) >> 31
2881     Node* neg_key =
2882         graph()->NewNode(machine->Word32Xor(), key, Int32Constant(-1));
2883     Node* masked_diff = graph()->NewNode(
2884         machine->Word32And(),
2885         graph()->NewNode(machine->Int32Sub(), key, ift_size), neg_key);
2886     Node* mask =
2887         graph()->NewNode(machine->Word32Sar(), masked_diff, Int32Constant(31));
2888     key = graph()->NewNode(machine->Word32And(), key, mask);
2889   }
2890 
2891   Node* int32_scaled_key = Uint32ToUintptr(
2892       graph()->NewNode(machine->Word32Shl(), key, Int32Constant(2)));
2893 
2894   Node* loaded_sig = SetEffect(
2895       graph()->NewNode(machine->Load(MachineType::Int32()), ift_sig_ids,
2896                        int32_scaled_key, effect(), control()));
2897   // Check that the dynamic type of the function is a subtype of its static
2898   // (table) type. Currently, the only subtyping between function types is
2899   // $t <: funcref for all $t: function_type.
2900   // TODO(7748): Expand this with function subtyping.
2901   const bool needs_typechecking =
2902       env_->module->tables[table_index].type == wasm::kWasmFuncRef;
2903   if (needs_typechecking) {
2904     int32_t expected_sig_id = env_->module->canonicalized_type_ids[sig_index];
2905     Node* sig_match = graph()->NewNode(machine->Word32Equal(), loaded_sig,
2906                                        Int32Constant(expected_sig_id));
2907     TrapIfFalse(wasm::kTrapFuncSigMismatch, sig_match, position);
2908   } else {
2909     // We still have to check that the entry is initialized.
2910     // TODO(9495): Skip this check for non-nullable tables when they are
2911     // allowed.
2912     Node* function_is_null =
2913         graph()->NewNode(machine->Word32Equal(), loaded_sig, Int32Constant(-1));
2914     TrapIfTrue(wasm::kTrapNullDereference, function_is_null, position);
2915   }
2916 
2917   Node* tagged_scaled_key;
2918   if (kTaggedSize == kInt32Size) {
2919     tagged_scaled_key = int32_scaled_key;
2920   } else {
2921     DCHECK_EQ(kTaggedSize, kInt32Size * 2);
2922     tagged_scaled_key = graph()->NewNode(machine->Int32Add(), int32_scaled_key,
2923                                          int32_scaled_key);
2924   }
2925 
2926   Node* target_instance = gasm_->Load(
2927       MachineType::TaggedPointer(),
2928       graph()->NewNode(machine->IntAdd(), ift_instances, tagged_scaled_key),
2929       wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(0));
2930 
2931   Node* intptr_scaled_key;
2932   if (kSystemPointerSize == kTaggedSize) {
2933     intptr_scaled_key = tagged_scaled_key;
2934   } else {
2935     DCHECK_EQ(kSystemPointerSize, kTaggedSize + kTaggedSize);
2936     intptr_scaled_key = graph()->NewNode(machine->Int32Add(), tagged_scaled_key,
2937                                          tagged_scaled_key);
2938   }
2939 
2940   Node* target = SetEffect(
2941       graph()->NewNode(machine->Load(MachineType::Pointer()), ift_targets,
2942                        intptr_scaled_key, effect(), control()));
2943 
2944   args[0] = target;
2945   const UseRetpoline use_retpoline =
2946       untrusted_code_mitigations_ ? kRetpoline : kNoRetpoline;
2947 
2948   switch (continuation) {
2949     case kCallContinues:
2950       return BuildWasmCall(sig, args, rets, position, target_instance,
2951                            use_retpoline);
2952     case kReturnCall:
2953       return BuildWasmReturnCall(sig, args, position, target_instance,
2954                                  use_retpoline);
2955   }
2956 }
2957 
BuildLoadFunctionDataFromJSFunction(Node * js_function)2958 Node* WasmGraphBuilder::BuildLoadFunctionDataFromJSFunction(Node* js_function) {
2959   Node* shared = gasm_->Load(
2960       MachineType::AnyTagged(), js_function,
2961       wasm::ObjectAccess::SharedFunctionInfoOffsetInTaggedJSFunction());
2962   return gasm_->Load(MachineType::AnyTagged(), shared,
2963                      SharedFunctionInfo::kFunctionDataOffset - kHeapObjectTag);
2964 }
2965 
BuildLoadJumpTableOffsetFromExportedFunctionData(Node * function_data)2966 Node* WasmGraphBuilder::BuildLoadJumpTableOffsetFromExportedFunctionData(
2967     Node* function_data) {
2968   Node* jump_table_offset_smi = gasm_->Load(
2969       MachineType::TaggedSigned(), function_data,
2970       WasmExportedFunctionData::kJumpTableOffsetOffset - kHeapObjectTag);
2971   return BuildChangeSmiToIntPtr(jump_table_offset_smi);
2972 }
2973 
BuildLoadFunctionIndexFromExportedFunctionData(Node * function_data)2974 Node* WasmGraphBuilder::BuildLoadFunctionIndexFromExportedFunctionData(
2975     Node* function_data) {
2976   Node* function_index_smi = gasm_->Load(
2977       MachineType::TaggedSigned(), function_data,
2978       WasmExportedFunctionData::kFunctionIndexOffset - kHeapObjectTag);
2979   Node* function_index = BuildChangeSmiToInt32(function_index_smi);
2980   return function_index;
2981 }
2982 
HasInstanceType(WasmGraphAssembler * gasm,Node * object,InstanceType type)2983 Node* HasInstanceType(WasmGraphAssembler* gasm, Node* object,
2984                       InstanceType type) {
2985   Node* map = gasm->Load(MachineType::TaggedPointer(), object,
2986                          wasm::ObjectAccess::ToTagged(HeapObject::kMapOffset));
2987   Node* instance_type =
2988       gasm->Load(MachineType::Uint16(), map,
2989                  wasm::ObjectAccess::ToTagged(Map::kInstanceTypeOffset));
2990   return gasm->Word32Equal(instance_type, gasm->Int32Constant(type));
2991 }
2992 
BuildCallRef(uint32_t sig_index,Vector<Node * > args,Vector<Node * > rets,CheckForNull null_check,IsReturnCall continuation,wasm::WasmCodePosition position)2993 Node* WasmGraphBuilder::BuildCallRef(uint32_t sig_index, Vector<Node*> args,
2994                                      Vector<Node*> rets,
2995                                      CheckForNull null_check,
2996                                      IsReturnCall continuation,
2997                                      wasm::WasmCodePosition position) {
2998   if (null_check == kWithNullCheck) {
2999     TrapIfTrue(wasm::kTrapNullDereference, gasm_->WordEqual(args[0], RefNull()),
3000                position);
3001   }
3002 
3003   const wasm::FunctionSig* sig = env_->module->signature(sig_index);
3004 
3005   Node* function_data = BuildLoadFunctionDataFromJSFunction(args[0]);
3006 
3007   Node* is_js_function =
3008       HasInstanceType(gasm_.get(), function_data, WASM_JS_FUNCTION_DATA_TYPE);
3009 
3010   auto js_label = gasm_->MakeLabel();
3011   auto end_label = gasm_->MakeLabel(MachineRepresentation::kTaggedPointer,
3012                                     MachineRepresentation::kTaggedPointer);
3013 
3014   gasm_->GotoIf(is_js_function, &js_label);
3015 
3016   {
3017     // Call to a WasmExportedFunction.
3018     // Load instance object corresponding to module where callee is defined.
3019     Node* callee_instance =
3020         gasm_->Load(MachineType::TaggedPointer(), function_data,
3021                     wasm::ObjectAccess::ToTagged(
3022                         WasmExportedFunctionData::kInstanceOffset));
3023 
3024     Node* function_index =
3025         gasm_->Load(MachineType::TaggedPointer(), function_data,
3026                     wasm::ObjectAccess::ToTagged(
3027                         WasmExportedFunctionData::kFunctionIndexOffset));
3028 
3029     auto imported_label = gasm_->MakeLabel();
3030 
3031     // Check if callee is a locally defined or imported function it its module.
3032     Node* imported_function_refs =
3033         gasm_->Load(MachineType::TaggedPointer(), callee_instance,
3034                     wasm::ObjectAccess::ToTagged(
3035                         WasmInstanceObject::kImportedFunctionRefsOffset));
3036     Node* imported_functions_num =
3037         gasm_->Load(MachineType::TaggedPointer(), imported_function_refs,
3038                     wasm::ObjectAccess::ToTagged(FixedArray::kLengthOffset));
3039     gasm_->GotoIf(gasm_->SmiLessThan(function_index, imported_functions_num),
3040                   &imported_label);
3041     {
3042       // Function locally defined in module.
3043       Node* jump_table_start =
3044           gasm_->Load(MachineType::Pointer(), callee_instance,
3045                       wasm::ObjectAccess::ToTagged(
3046                           WasmInstanceObject::kJumpTableStartOffset));
3047       Node* jump_table_offset =
3048           BuildLoadJumpTableOffsetFromExportedFunctionData(function_data);
3049       Node* jump_table_slot =
3050           gasm_->IntAdd(jump_table_start, jump_table_offset);
3051 
3052       gasm_->Goto(&end_label, jump_table_slot,
3053                   callee_instance /* Unused, dummy value */);
3054     }
3055 
3056     {
3057       // Function imported to module.
3058       gasm_->Bind(&imported_label);
3059 
3060       Node* imported_instance = gasm_->Load(
3061           MachineType::TaggedPointer(), imported_function_refs,
3062           gasm_->Int32Add(
3063               gasm_->Int32Mul(BuildChangeSmiToInt32(function_index),
3064                               gasm_->Int32Constant(kTaggedSize)),
3065               gasm_->Int32Constant(FixedArray::kHeaderSize - kHeapObjectTag)));
3066 
3067       Node* imported_function_targets =
3068           gasm_->Load(MachineType::Pointer(), callee_instance,
3069                       wasm::ObjectAccess::ToTagged(
3070                           WasmInstanceObject::kImportedFunctionTargetsOffset));
3071 
3072       Node* target_node =
3073           gasm_->Load(MachineType::Pointer(), imported_function_targets,
3074                       gasm_->IntMul(BuildChangeSmiToIntPtr(function_index),
3075                                     gasm_->IntPtrConstant(kSystemPointerSize)));
3076 
3077       gasm_->Goto(&end_label, target_node, imported_instance);
3078     }
3079   }
3080 
3081   {
3082     // Call to a WasmJSFunction. The call target is
3083     // function_data->wasm_to_js_wrapper_code()->instruction_start().
3084     // The instance_node is the pair
3085     // (current WasmInstanceObject, function_data->callable()).
3086     gasm_->Bind(&js_label);
3087 
3088     Node* wrapper_code =
3089         gasm_->Load(MachineType::TaggedPointer(), function_data,
3090                     wasm::ObjectAccess::ToTagged(
3091                         WasmJSFunctionData::kWasmToJsWrapperCodeOffset));
3092     Node* call_target = gasm_->IntAdd(
3093         wrapper_code,
3094         gasm_->IntPtrConstant(wasm::ObjectAccess::ToTagged(Code::kHeaderSize)));
3095 
3096     Node* callable = gasm_->Load(
3097         MachineType::TaggedPointer(), function_data,
3098         wasm::ObjectAccess::ToTagged(WasmJSFunctionData::kCallableOffset));
3099     // TODO(manoskouk): Find an elegant way to avoid allocating this pair for
3100     // every call.
3101     Node* function_instance_node = CALL_BUILTIN(
3102         WasmAllocatePair, instance_node_.get(), callable,
3103         LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer()));
3104 
3105     gasm_->Goto(&end_label, call_target, function_instance_node);
3106   }
3107 
3108   gasm_->Bind(&end_label);
3109 
3110   args[0] = end_label.PhiAt(0);
3111   Node* instance_node = end_label.PhiAt(1);
3112 
3113   const UseRetpoline use_retpoline =
3114       untrusted_code_mitigations_ ? kRetpoline : kNoRetpoline;
3115 
3116   Node* call = continuation == kCallContinues
3117                    ? BuildWasmCall(sig, args, rets, position, instance_node,
3118                                    use_retpoline)
3119                    : BuildWasmReturnCall(sig, args, position, instance_node,
3120                                          use_retpoline);
3121   return call;
3122 }
3123 
CallRef(uint32_t sig_index,Vector<Node * > args,Vector<Node * > rets,WasmGraphBuilder::CheckForNull null_check,wasm::WasmCodePosition position)3124 Node* WasmGraphBuilder::CallRef(uint32_t sig_index, Vector<Node*> args,
3125                                 Vector<Node*> rets,
3126                                 WasmGraphBuilder::CheckForNull null_check,
3127                                 wasm::WasmCodePosition position) {
3128   return BuildCallRef(sig_index, args, rets, null_check,
3129                       IsReturnCall::kCallContinues, position);
3130 }
3131 
ReturnCallRef(uint32_t sig_index,Vector<Node * > args,WasmGraphBuilder::CheckForNull null_check,wasm::WasmCodePosition position)3132 Node* WasmGraphBuilder::ReturnCallRef(uint32_t sig_index, Vector<Node*> args,
3133                                       WasmGraphBuilder::CheckForNull null_check,
3134                                       wasm::WasmCodePosition position) {
3135   return BuildCallRef(sig_index, args, {}, null_check,
3136                       IsReturnCall::kReturnCall, position);
3137 }
3138 
ReturnCall(uint32_t index,Vector<Node * > args,wasm::WasmCodePosition position)3139 Node* WasmGraphBuilder::ReturnCall(uint32_t index, Vector<Node*> args,
3140                                    wasm::WasmCodePosition position) {
3141   DCHECK_NULL(args[0]);
3142   const wasm::FunctionSig* sig = env_->module->functions[index].sig;
3143 
3144   if (env_ && index < env_->module->num_imported_functions) {
3145     // Return Call to an imported function.
3146     return BuildImportCall(sig, args, {}, position, index, kReturnCall);
3147   }
3148 
3149   // A direct tail call to a wasm function defined in this module.
3150   // Just encode the function index. This will be patched during code
3151   // generation.
3152   Address code = static_cast<Address>(index);
3153   args[0] = mcgraph()->RelocatableIntPtrConstant(code, RelocInfo::WASM_CALL);
3154 
3155   return BuildWasmReturnCall(sig, args, position, nullptr, kNoRetpoline);
3156 }
3157 
ReturnCallIndirect(uint32_t table_index,uint32_t sig_index,Vector<Node * > args,wasm::WasmCodePosition position)3158 Node* WasmGraphBuilder::ReturnCallIndirect(uint32_t table_index,
3159                                            uint32_t sig_index,
3160                                            Vector<Node*> args,
3161                                            wasm::WasmCodePosition position) {
3162   return BuildIndirectCall(table_index, sig_index, args, {}, position,
3163                            kReturnCall);
3164 }
3165 
BrOnNull(Node * ref_object,Node ** null_node,Node ** non_null_node)3166 Node* WasmGraphBuilder::BrOnNull(Node* ref_object, Node** null_node,
3167                                  Node** non_null_node) {
3168   BranchExpectFalse(gasm_->WordEqual(ref_object, RefNull()), null_node,
3169                     non_null_node);
3170   // Return value is not used, but we need it for compatibility
3171   // with graph-builder-interface.
3172   return nullptr;
3173 }
3174 
BuildI32Rol(Node * left,Node * right)3175 Node* WasmGraphBuilder::BuildI32Rol(Node* left, Node* right) {
3176   // Implement Rol by Ror since TurboFan does not have Rol opcode.
3177   // TODO(weiliang): support Word32Rol opcode in TurboFan.
3178   Int32Matcher m(right);
3179   if (m.HasResolvedValue()) {
3180     return Binop(wasm::kExprI32Ror, left,
3181                  mcgraph()->Int32Constant(32 - (m.ResolvedValue() & 0x1F)));
3182   } else {
3183     return Binop(wasm::kExprI32Ror, left,
3184                  Binop(wasm::kExprI32Sub, mcgraph()->Int32Constant(32), right));
3185   }
3186 }
3187 
BuildI64Rol(Node * left,Node * right)3188 Node* WasmGraphBuilder::BuildI64Rol(Node* left, Node* right) {
3189   // Implement Rol by Ror since TurboFan does not have Rol opcode.
3190   // TODO(weiliang): support Word64Rol opcode in TurboFan.
3191   Int64Matcher m(right);
3192   Node* inv_right =
3193       m.HasResolvedValue()
3194           ? mcgraph()->Int64Constant(64 - (m.ResolvedValue() & 0x3F))
3195           : Binop(wasm::kExprI64Sub, mcgraph()->Int64Constant(64), right);
3196   return Binop(wasm::kExprI64Ror, left, inv_right);
3197 }
3198 
Invert(Node * node)3199 Node* WasmGraphBuilder::Invert(Node* node) {
3200   return Unop(wasm::kExprI32Eqz, node);
3201 }
3202 
BuildTruncateIntPtrToInt32(Node * value)3203 Node* WasmGraphBuilder::BuildTruncateIntPtrToInt32(Node* value) {
3204   return mcgraph()->machine()->Is64() ? gasm_->TruncateInt64ToInt32(value)
3205                                       : value;
3206 }
3207 
BuildChangeInt32ToIntPtr(Node * value)3208 Node* WasmGraphBuilder::BuildChangeInt32ToIntPtr(Node* value) {
3209   return mcgraph()->machine()->Is64() ? gasm_->ChangeInt32ToInt64(value)
3210                                       : value;
3211 }
3212 
BuildChangeInt32ToSmi(Node * value)3213 Node* WasmGraphBuilder::BuildChangeInt32ToSmi(Node* value) {
3214   // With pointer compression, only the lower 32 bits are used.
3215   return COMPRESS_POINTERS_BOOL
3216              ? gasm_->Word32Shl(value, BuildSmiShiftBitsConstant32())
3217              : gasm_->WordShl(BuildChangeInt32ToIntPtr(value),
3218                               BuildSmiShiftBitsConstant());
3219 }
3220 
BuildChangeUint31ToSmi(Node * value)3221 Node* WasmGraphBuilder::BuildChangeUint31ToSmi(Node* value) {
3222   return COMPRESS_POINTERS_BOOL
3223              ? gasm_->Word32Shl(value, BuildSmiShiftBitsConstant32())
3224              : graph()->NewNode(mcgraph()->machine()->WordShl(),
3225                                 Uint32ToUintptr(value),
3226                                 BuildSmiShiftBitsConstant());
3227 }
3228 
BuildSmiShiftBitsConstant()3229 Node* WasmGraphBuilder::BuildSmiShiftBitsConstant() {
3230   return gasm_->IntPtrConstant(kSmiShiftSize + kSmiTagSize);
3231 }
3232 
BuildSmiShiftBitsConstant32()3233 Node* WasmGraphBuilder::BuildSmiShiftBitsConstant32() {
3234   return gasm_->Int32Constant(kSmiShiftSize + kSmiTagSize);
3235 }
3236 
BuildChangeSmiToInt32(Node * value)3237 Node* WasmGraphBuilder::BuildChangeSmiToInt32(Node* value) {
3238   return COMPRESS_POINTERS_BOOL
3239              ? gasm_->Word32Sar(gasm_->TruncateInt64ToInt32(value),
3240                                 BuildSmiShiftBitsConstant32())
3241              : BuildTruncateIntPtrToInt32(BuildChangeSmiToIntPtr(value));
3242 }
3243 
BuildChangeSmiToIntPtr(Node * value)3244 Node* WasmGraphBuilder::BuildChangeSmiToIntPtr(Node* value) {
3245   if (COMPRESS_POINTERS_BOOL) {
3246     value = BuildChangeSmiToInt32(value);
3247     return BuildChangeInt32ToIntPtr(value);
3248   }
3249   return graph()->NewNode(mcgraph()->machine()->WordSar(), value,
3250                           BuildSmiShiftBitsConstant());
3251 }
3252 
BuildConvertUint32ToSmiWithSaturation(Node * value,uint32_t maxval)3253 Node* WasmGraphBuilder::BuildConvertUint32ToSmiWithSaturation(Node* value,
3254                                                               uint32_t maxval) {
3255   DCHECK(Smi::IsValid(maxval));
3256   Node* max = mcgraph()->Uint32Constant(maxval);
3257   Node* check = graph()->NewNode(mcgraph()->machine()->Uint32LessThanOrEqual(),
3258                                  value, max);
3259   Node* valsmi = BuildChangeUint31ToSmi(value);
3260   Node* maxsmi = graph()->NewNode(mcgraph()->common()->NumberConstant(maxval));
3261   Diamond d(graph(), mcgraph()->common(), check, BranchHint::kTrue);
3262   d.Chain(control());
3263   return d.Phi(MachineRepresentation::kTagged, valsmi, maxsmi);
3264 }
3265 
InitInstanceCache(WasmInstanceCacheNodes * instance_cache)3266 void WasmGraphBuilder::InitInstanceCache(
3267     WasmInstanceCacheNodes* instance_cache) {
3268   DCHECK_NOT_NULL(instance_node_);
3269 
3270   // Load the memory start.
3271   instance_cache->mem_start =
3272       LOAD_INSTANCE_FIELD(MemoryStart, MachineType::UintPtr());
3273 
3274   // Load the memory size.
3275   instance_cache->mem_size =
3276       LOAD_INSTANCE_FIELD(MemorySize, MachineType::UintPtr());
3277 
3278   if (untrusted_code_mitigations_) {
3279     // Load the memory mask.
3280     instance_cache->mem_mask =
3281         LOAD_INSTANCE_FIELD(MemoryMask, MachineType::UintPtr());
3282   } else {
3283     // Explicitly set to nullptr to ensure a SEGV when we try to use it.
3284     instance_cache->mem_mask = nullptr;
3285   }
3286 }
3287 
PrepareInstanceCacheForLoop(WasmInstanceCacheNodes * instance_cache,Node * control)3288 void WasmGraphBuilder::PrepareInstanceCacheForLoop(
3289     WasmInstanceCacheNodes* instance_cache, Node* control) {
3290 #define INTRODUCE_PHI(field, rep)                                            \
3291   instance_cache->field = graph()->NewNode(mcgraph()->common()->Phi(rep, 1), \
3292                                            instance_cache->field, control);
3293 
3294   INTRODUCE_PHI(mem_start, MachineType::PointerRepresentation());
3295   INTRODUCE_PHI(mem_size, MachineType::PointerRepresentation());
3296   if (untrusted_code_mitigations_) {
3297     INTRODUCE_PHI(mem_mask, MachineType::PointerRepresentation());
3298   }
3299 
3300 #undef INTRODUCE_PHI
3301 }
3302 
NewInstanceCacheMerge(WasmInstanceCacheNodes * to,WasmInstanceCacheNodes * from,Node * merge)3303 void WasmGraphBuilder::NewInstanceCacheMerge(WasmInstanceCacheNodes* to,
3304                                              WasmInstanceCacheNodes* from,
3305                                              Node* merge) {
3306 #define INTRODUCE_PHI(field, rep)                                            \
3307   if (to->field != from->field) {                                            \
3308     Node* vals[] = {to->field, from->field, merge};                          \
3309     to->field = graph()->NewNode(mcgraph()->common()->Phi(rep, 2), 3, vals); \
3310   }
3311 
3312   INTRODUCE_PHI(mem_start, MachineType::PointerRepresentation());
3313   INTRODUCE_PHI(mem_size, MachineRepresentation::kWord32);
3314   if (untrusted_code_mitigations_) {
3315     INTRODUCE_PHI(mem_mask, MachineRepresentation::kWord32);
3316   }
3317 
3318 #undef INTRODUCE_PHI
3319 }
3320 
MergeInstanceCacheInto(WasmInstanceCacheNodes * to,WasmInstanceCacheNodes * from,Node * merge)3321 void WasmGraphBuilder::MergeInstanceCacheInto(WasmInstanceCacheNodes* to,
3322                                               WasmInstanceCacheNodes* from,
3323                                               Node* merge) {
3324   to->mem_size = CreateOrMergeIntoPhi(MachineType::PointerRepresentation(),
3325                                       merge, to->mem_size, from->mem_size);
3326   to->mem_start = CreateOrMergeIntoPhi(MachineType::PointerRepresentation(),
3327                                        merge, to->mem_start, from->mem_start);
3328   if (untrusted_code_mitigations_) {
3329     to->mem_mask = CreateOrMergeIntoPhi(MachineType::PointerRepresentation(),
3330                                         merge, to->mem_mask, from->mem_mask);
3331   }
3332 }
3333 
CreateOrMergeIntoPhi(MachineRepresentation rep,Node * merge,Node * tnode,Node * fnode)3334 Node* WasmGraphBuilder::CreateOrMergeIntoPhi(MachineRepresentation rep,
3335                                              Node* merge, Node* tnode,
3336                                              Node* fnode) {
3337   if (IsPhiWithMerge(tnode, merge)) {
3338     AppendToPhi(tnode, fnode);
3339   } else if (tnode != fnode) {
3340     // Note that it is not safe to use {Buffer} here since this method is used
3341     // via {CheckForException} while the {Buffer} is in use by another method.
3342     uint32_t count = merge->InputCount();
3343     // + 1 for the merge node.
3344     base::SmallVector<Node*, 9> inputs(count + 1);
3345     for (uint32_t j = 0; j < count - 1; j++) inputs[j] = tnode;
3346     inputs[count - 1] = fnode;
3347     inputs[count] = merge;
3348     tnode = graph()->NewNode(mcgraph()->common()->Phi(rep, count), count + 1,
3349                              inputs.begin());
3350   }
3351   return tnode;
3352 }
3353 
CreateOrMergeIntoEffectPhi(Node * merge,Node * tnode,Node * fnode)3354 Node* WasmGraphBuilder::CreateOrMergeIntoEffectPhi(Node* merge, Node* tnode,
3355                                                    Node* fnode) {
3356   if (IsPhiWithMerge(tnode, merge)) {
3357     AppendToPhi(tnode, fnode);
3358   } else if (tnode != fnode) {
3359     // Note that it is not safe to use {Buffer} here since this method is used
3360     // via {CheckForException} while the {Buffer} is in use by another method.
3361     uint32_t count = merge->InputCount();
3362     // + 1 for the merge node.
3363     base::SmallVector<Node*, 9> inputs(count + 1);
3364     for (uint32_t j = 0; j < count - 1; j++) {
3365       inputs[j] = tnode;
3366     }
3367     inputs[count - 1] = fnode;
3368     inputs[count] = merge;
3369     tnode = graph()->NewNode(mcgraph()->common()->EffectPhi(count), count + 1,
3370                              inputs.begin());
3371   }
3372   return tnode;
3373 }
3374 
effect()3375 Node* WasmGraphBuilder::effect() { return gasm_->effect(); }
3376 
control()3377 Node* WasmGraphBuilder::control() { return gasm_->control(); }
3378 
SetEffect(Node * node)3379 Node* WasmGraphBuilder::SetEffect(Node* node) {
3380   SetEffectControl(node, control());
3381   return node;
3382 }
3383 
SetControl(Node * node)3384 Node* WasmGraphBuilder::SetControl(Node* node) {
3385   SetEffectControl(effect(), node);
3386   return node;
3387 }
3388 
SetEffectControl(Node * effect,Node * control)3389 void WasmGraphBuilder::SetEffectControl(Node* effect, Node* control) {
3390   gasm_->InitializeEffectControl(effect, control);
3391 }
3392 
GetImportedMutableGlobals()3393 Node* WasmGraphBuilder::GetImportedMutableGlobals() {
3394   if (imported_mutable_globals_ == nullptr) {
3395     // Load imported_mutable_globals_ from the instance object at runtime.
3396     imported_mutable_globals_ = graph()->NewNode(
3397         mcgraph()->machine()->Load(MachineType::UintPtr()),
3398         instance_node_.get(),
3399         mcgraph()->Int32Constant(
3400             WASM_INSTANCE_OBJECT_OFFSET(ImportedMutableGlobals)),
3401         graph()->start(), graph()->start());
3402   }
3403   return imported_mutable_globals_.get();
3404 }
3405 
GetGlobalBaseAndOffset(MachineType mem_type,const wasm::WasmGlobal & global,Node ** base_node,Node ** offset_node)3406 void WasmGraphBuilder::GetGlobalBaseAndOffset(MachineType mem_type,
3407                                               const wasm::WasmGlobal& global,
3408                                               Node** base_node,
3409                                               Node** offset_node) {
3410   DCHECK_NOT_NULL(instance_node_);
3411   if (global.mutability && global.imported) {
3412     *base_node = SetEffect(graph()->NewNode(
3413         mcgraph()->machine()->Load(MachineType::UintPtr()),
3414         GetImportedMutableGlobals(),
3415         mcgraph()->Int32Constant(global.index * sizeof(Address)), effect(),
3416         control()));
3417     *offset_node = mcgraph()->Int32Constant(0);
3418   } else {
3419     if (globals_start_ == nullptr) {
3420       // Load globals_start from the instance object at runtime.
3421       // TODO(wasm): we currently generate only one load of the {globals_start}
3422       // start per graph, which means it can be placed anywhere by the
3423       // scheduler. This is legal because the globals_start should never change.
3424       // However, in some cases (e.g. if the instance object is already in a
3425       // register), it is slightly more efficient to reload this value from the
3426       // instance object. Since this depends on register allocation, it is not
3427       // possible to express in the graph, and would essentially constitute a
3428       // "mem2reg" optimization in TurboFan.
3429       globals_start_ = graph()->NewNode(
3430           mcgraph()->machine()->Load(MachineType::UintPtr()),
3431           instance_node_.get(),
3432           mcgraph()->Int32Constant(WASM_INSTANCE_OBJECT_OFFSET(GlobalsStart)),
3433           graph()->start(), graph()->start());
3434     }
3435     *base_node = globals_start_.get();
3436     *offset_node = mcgraph()->Int32Constant(global.offset);
3437 
3438     if (mem_type == MachineType::Simd128() && global.offset != 0) {
3439       // TODO(titzer,bbudge): code generation for SIMD memory offsets is broken.
3440       *base_node = graph()->NewNode(mcgraph()->machine()->IntAdd(), *base_node,
3441                                     *offset_node);
3442       *offset_node = mcgraph()->Int32Constant(0);
3443     }
3444   }
3445 }
3446 
GetBaseAndOffsetForImportedMutableExternRefGlobal(const wasm::WasmGlobal & global,Node ** base,Node ** offset)3447 void WasmGraphBuilder::GetBaseAndOffsetForImportedMutableExternRefGlobal(
3448     const wasm::WasmGlobal& global, Node** base, Node** offset) {
3449   // Load the base from the ImportedMutableGlobalsBuffer of the instance.
3450   Node* buffers = LOAD_INSTANCE_FIELD(ImportedMutableGlobalsBuffers,
3451                                       MachineType::TaggedPointer());
3452   *base = LOAD_FIXED_ARRAY_SLOT_ANY(buffers, global.index);
3453 
3454   // For the offset we need the index of the global in the buffer, and then
3455   // calculate the actual offset from the index. Load the index from the
3456   // ImportedMutableGlobals array of the instance.
3457   Node* index = SetEffect(
3458       graph()->NewNode(mcgraph()->machine()->Load(MachineType::UintPtr()),
3459                        GetImportedMutableGlobals(),
3460                        mcgraph()->Int32Constant(global.index * sizeof(Address)),
3461                        effect(), control()));
3462 
3463   // From the index, calculate the actual offset in the FixeArray. This
3464   // is kHeaderSize + (index * kTaggedSize). kHeaderSize can be acquired with
3465   // wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(0).
3466   Node* index_times_tagged_size =
3467       graph()->NewNode(mcgraph()->machine()->IntMul(), Uint32ToUintptr(index),
3468                        mcgraph()->Int32Constant(kTaggedSize));
3469   *offset = graph()->NewNode(
3470       mcgraph()->machine()->IntAdd(), index_times_tagged_size,
3471       mcgraph()->IntPtrConstant(
3472           wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(0)));
3473 }
3474 
MemBuffer(uintptr_t offset)3475 Node* WasmGraphBuilder::MemBuffer(uintptr_t offset) {
3476   DCHECK_NOT_NULL(instance_cache_);
3477   Node* mem_start = instance_cache_->mem_start;
3478   DCHECK_NOT_NULL(mem_start);
3479   if (offset == 0) return mem_start;
3480   return gasm_->IntAdd(mem_start, gasm_->UintPtrConstant(offset));
3481 }
3482 
CurrentMemoryPages()3483 Node* WasmGraphBuilder::CurrentMemoryPages() {
3484   // CurrentMemoryPages can not be called from asm.js.
3485   DCHECK_EQ(wasm::kWasmOrigin, env_->module->origin);
3486   DCHECK_NOT_NULL(instance_cache_);
3487   Node* mem_size = instance_cache_->mem_size;
3488   DCHECK_NOT_NULL(mem_size);
3489   Node* result =
3490       graph()->NewNode(mcgraph()->machine()->WordShr(), mem_size,
3491                        mcgraph()->Int32Constant(wasm::kWasmPageSizeLog2));
3492   result = BuildTruncateIntPtrToInt32(result);
3493   return result;
3494 }
3495 
3496 // Only call this function for code which is not reused across instantiations,
3497 // as we do not patch the embedded js_context.
BuildCallToRuntimeWithContext(Runtime::FunctionId f,Node * js_context,Node ** parameters,int parameter_count)3498 Node* WasmGraphBuilder::BuildCallToRuntimeWithContext(Runtime::FunctionId f,
3499                                                       Node* js_context,
3500                                                       Node** parameters,
3501                                                       int parameter_count) {
3502   const Runtime::Function* fun = Runtime::FunctionForId(f);
3503   auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
3504       mcgraph()->zone(), f, fun->nargs, Operator::kNoProperties,
3505       CallDescriptor::kNoFlags);
3506   // The CEntryStub is loaded from the IsolateRoot so that generated code is
3507   // Isolate independent. At the moment this is only done for CEntryStub(1).
3508   Node* isolate_root = BuildLoadIsolateRoot();
3509   DCHECK_EQ(1, fun->result_size);
3510   auto centry_id =
3511       Builtins::kCEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit;
3512   Node* centry_stub = LOAD_FULL_POINTER(
3513       isolate_root, IsolateData::builtin_slot_offset(centry_id));
3514   // TODO(titzer): allow arbitrary number of runtime arguments
3515   // At the moment we only allow 5 parameters. If more parameters are needed,
3516   // increase this constant accordingly.
3517   static const int kMaxParams = 5;
3518   DCHECK_GE(kMaxParams, parameter_count);
3519   Node* inputs[kMaxParams + 6];
3520   int count = 0;
3521   inputs[count++] = centry_stub;
3522   for (int i = 0; i < parameter_count; i++) {
3523     inputs[count++] = parameters[i];
3524   }
3525   inputs[count++] =
3526       mcgraph()->ExternalConstant(ExternalReference::Create(f));  // ref
3527   inputs[count++] = mcgraph()->Int32Constant(fun->nargs);         // arity
3528   inputs[count++] = js_context;                                   // js_context
3529   inputs[count++] = effect();
3530   inputs[count++] = control();
3531 
3532   Node* call = mcgraph()->graph()->NewNode(
3533       mcgraph()->common()->Call(call_descriptor), count, inputs);
3534   SetEffect(call);
3535   return call;
3536 }
3537 
BuildCallToRuntime(Runtime::FunctionId f,Node ** parameters,int parameter_count)3538 Node* WasmGraphBuilder::BuildCallToRuntime(Runtime::FunctionId f,
3539                                            Node** parameters,
3540                                            int parameter_count) {
3541   return BuildCallToRuntimeWithContext(f, NoContextConstant(), parameters,
3542                                        parameter_count);
3543 }
3544 
GlobalGet(uint32_t index)3545 Node* WasmGraphBuilder::GlobalGet(uint32_t index) {
3546   const wasm::WasmGlobal& global = env_->module->globals[index];
3547   if (global.type.is_reference_type()) {
3548     if (global.mutability && global.imported) {
3549       Node* base = nullptr;
3550       Node* offset = nullptr;
3551       GetBaseAndOffsetForImportedMutableExternRefGlobal(global, &base, &offset);
3552       return gasm_->Load(MachineType::AnyTagged(), base, offset);
3553     }
3554     Node* globals_buffer =
3555         LOAD_INSTANCE_FIELD(TaggedGlobalsBuffer, MachineType::TaggedPointer());
3556     return LOAD_FIXED_ARRAY_SLOT_ANY(globals_buffer, global.offset);
3557   }
3558 
3559   MachineType mem_type = global.type.machine_type();
3560   if (mem_type.representation() == MachineRepresentation::kSimd128) {
3561     has_simd_ = true;
3562   }
3563   Node* base = nullptr;
3564   Node* offset = nullptr;
3565   GetGlobalBaseAndOffset(mem_type, global, &base, &offset);
3566   Node* result = SetEffect(graph()->NewNode(
3567       mcgraph()->machine()->Load(mem_type), base, offset, effect(), control()));
3568 #if defined(V8_TARGET_BIG_ENDIAN)
3569   result = BuildChangeEndiannessLoad(result, mem_type, global.type);
3570 #endif
3571   return result;
3572 }
3573 
GlobalSet(uint32_t index,Node * val)3574 Node* WasmGraphBuilder::GlobalSet(uint32_t index, Node* val) {
3575   const wasm::WasmGlobal& global = env_->module->globals[index];
3576   if (global.type.is_reference_type()) {
3577     if (global.mutability && global.imported) {
3578       Node* base = nullptr;
3579       Node* offset = nullptr;
3580       GetBaseAndOffsetForImportedMutableExternRefGlobal(global, &base, &offset);
3581 
3582       return STORE_RAW_NODE_OFFSET(
3583           base, offset, val, MachineRepresentation::kTagged, kFullWriteBarrier);
3584     }
3585     Node* globals_buffer =
3586         LOAD_INSTANCE_FIELD(TaggedGlobalsBuffer, MachineType::TaggedPointer());
3587     return STORE_FIXED_ARRAY_SLOT_ANY(globals_buffer, global.offset, val);
3588   }
3589 
3590   MachineType mem_type = global.type.machine_type();
3591   if (mem_type.representation() == MachineRepresentation::kSimd128) {
3592     has_simd_ = true;
3593   }
3594   Node* base = nullptr;
3595   Node* offset = nullptr;
3596   GetGlobalBaseAndOffset(mem_type, global, &base, &offset);
3597   const Operator* op = mcgraph()->machine()->Store(
3598       StoreRepresentation(mem_type.representation(), kNoWriteBarrier));
3599 #if defined(V8_TARGET_BIG_ENDIAN)
3600   val = BuildChangeEndiannessStore(val, mem_type.representation(), global.type);
3601 #endif
3602   return SetEffect(
3603       graph()->NewNode(op, base, offset, val, effect(), control()));
3604 }
3605 
TableGet(uint32_t table_index,Node * index,wasm::WasmCodePosition position)3606 Node* WasmGraphBuilder::TableGet(uint32_t table_index, Node* index,
3607                                  wasm::WasmCodePosition position) {
3608   auto call_descriptor = GetBuiltinCallDescriptor<WasmTableGetDescriptor>(
3609       this, StubCallMode::kCallWasmRuntimeStub);
3610   // A direct call to a wasm runtime stub defined in this module.
3611   // Just encode the stub index. This will be patched at relocation.
3612   Node* call_target = mcgraph()->RelocatableIntPtrConstant(
3613       wasm::WasmCode::kWasmTableGet, RelocInfo::WASM_STUB_CALL);
3614 
3615   return SetEffectControl(graph()->NewNode(
3616       mcgraph()->common()->Call(call_descriptor), call_target,
3617       mcgraph()->IntPtrConstant(table_index), index, effect(), control()));
3618 }
3619 
TableSet(uint32_t table_index,Node * index,Node * val,wasm::WasmCodePosition position)3620 Node* WasmGraphBuilder::TableSet(uint32_t table_index, Node* index, Node* val,
3621                                  wasm::WasmCodePosition position) {
3622   auto call_descriptor = GetBuiltinCallDescriptor<WasmTableSetDescriptor>(
3623       this, StubCallMode::kCallWasmRuntimeStub);
3624   // A direct call to a wasm runtime stub defined in this module.
3625   // Just encode the stub index. This will be patched at relocation.
3626   Node* call_target = mcgraph()->RelocatableIntPtrConstant(
3627       wasm::WasmCode::kWasmTableSet, RelocInfo::WASM_STUB_CALL);
3628 
3629   return gasm_->Call(call_descriptor, call_target,
3630                      gasm_->IntPtrConstant(table_index), index, val);
3631 }
3632 
CheckBoundsAndAlignment(int8_t access_size,Node * index,uint64_t offset,wasm::WasmCodePosition position)3633 Node* WasmGraphBuilder::CheckBoundsAndAlignment(
3634     int8_t access_size, Node* index, uint64_t offset,
3635     wasm::WasmCodePosition position) {
3636   // Atomic operations need bounds checks until the backend can emit protected
3637   // loads.
3638   index =
3639       BoundsCheckMem(access_size, index, offset, position, kNeedsBoundsCheck);
3640 
3641   const uintptr_t align_mask = access_size - 1;
3642 
3643   // {offset} is validated to be within uintptr_t range in {BoundsCheckMem}.
3644   uintptr_t capped_offset = static_cast<uintptr_t>(offset);
3645   // Don't emit an alignment check if the index is a constant.
3646   // TODO(wasm): a constant match is also done above in {BoundsCheckMem}.
3647   UintPtrMatcher match(index);
3648   if (match.HasResolvedValue()) {
3649     uintptr_t effective_offset = match.ResolvedValue() + capped_offset;
3650     if ((effective_offset & align_mask) != 0) {
3651       // statically known to be unaligned; trap.
3652       TrapIfEq32(wasm::kTrapUnalignedAccess, Int32Constant(0), 0, position);
3653     }
3654     return index;
3655   }
3656 
3657   // Unlike regular memory accesses, atomic memory accesses should trap if
3658   // the effective offset is misaligned.
3659   // TODO(wasm): this addition is redundant with one inserted by {MemBuffer}.
3660   Node* effective_offset = gasm_->IntAdd(MemBuffer(capped_offset), index);
3661 
3662   Node* cond =
3663       gasm_->WordAnd(effective_offset, gasm_->IntPtrConstant(align_mask));
3664   TrapIfFalse(wasm::kTrapUnalignedAccess,
3665               gasm_->Word32Equal(cond, gasm_->Int32Constant(0)), position);
3666   return index;
3667 }
3668 
3669 // Insert code to bounds check a memory access if necessary. Return the
3670 // bounds-checked index, which is guaranteed to have (the equivalent of)
3671 // {uintptr_t} representation.
BoundsCheckMem(uint8_t access_size,Node * index,uint64_t offset,wasm::WasmCodePosition position,EnforceBoundsCheck enforce_check)3672 Node* WasmGraphBuilder::BoundsCheckMem(uint8_t access_size, Node* index,
3673                                        uint64_t offset,
3674                                        wasm::WasmCodePosition position,
3675                                        EnforceBoundsCheck enforce_check) {
3676   DCHECK_LE(1, access_size);
3677   index = Uint32ToUintptr(index);
3678   if (!FLAG_wasm_bounds_checks) return index;
3679 
3680   if (use_trap_handler() && enforce_check == kCanOmitBoundsCheck) {
3681     return index;
3682   }
3683 
3684   // If the offset does not fit in a uintptr_t, this can never succeed on this
3685   // machine.
3686   if (offset > std::numeric_limits<uintptr_t>::max() ||
3687       !base::IsInBounds<uintptr_t>(offset, access_size,
3688                                    env_->max_memory_size)) {
3689     // The access will be out of bounds, even for the largest memory.
3690     TrapIfEq32(wasm::kTrapMemOutOfBounds, Int32Constant(0), 0, position);
3691     return gasm_->UintPtrConstant(0);
3692   }
3693   uintptr_t end_offset = offset + access_size - 1u;
3694   Node* end_offset_node = mcgraph_->UintPtrConstant(end_offset);
3695 
3696   // The accessed memory is [index + offset, index + end_offset].
3697   // Check that the last read byte (at {index + end_offset}) is in bounds.
3698   // 1) Check that {end_offset < mem_size}. This also ensures that we can safely
3699   //    compute {effective_size} as {mem_size - end_offset)}.
3700   //    {effective_size} is >= 1 if condition 1) holds.
3701   // 2) Check that {index + end_offset < mem_size} by
3702   //    - computing {effective_size} as {mem_size - end_offset} and
3703   //    - checking that {index < effective_size}.
3704 
3705   Node* mem_size = instance_cache_->mem_size;
3706   if (end_offset >= env_->min_memory_size) {
3707     // The end offset is larger than the smallest memory.
3708     // Dynamically check the end offset against the dynamic memory size.
3709     Node* cond = gasm_->UintLessThan(end_offset_node, mem_size);
3710     TrapIfFalse(wasm::kTrapMemOutOfBounds, cond, position);
3711   } else {
3712     // The end offset is smaller than the smallest memory, so only one check is
3713     // required. Check to see if the index is also a constant.
3714     UintPtrMatcher match(index);
3715     if (match.HasResolvedValue()) {
3716       uintptr_t index_val = match.ResolvedValue();
3717       if (index_val < env_->min_memory_size - end_offset) {
3718         // The input index is a constant and everything is statically within
3719         // bounds of the smallest possible memory.
3720         return index;
3721       }
3722     }
3723   }
3724 
3725   // This produces a positive number, since {end_offset < min_size <= mem_size}.
3726   Node* effective_size = gasm_->IntSub(mem_size, end_offset_node);
3727 
3728   // Introduce the actual bounds check.
3729   Node* cond = gasm_->UintLessThan(index, effective_size);
3730   TrapIfFalse(wasm::kTrapMemOutOfBounds, cond, position);
3731 
3732   if (untrusted_code_mitigations_) {
3733     // In the fallthrough case, condition the index with the memory mask.
3734     Node* mem_mask = instance_cache_->mem_mask;
3735     DCHECK_NOT_NULL(mem_mask);
3736     index = gasm_->WordAnd(index, mem_mask);
3737   }
3738   return index;
3739 }
3740 
BoundsCheckRange(Node * start,Node ** size,Node * max,wasm::WasmCodePosition position)3741 Node* WasmGraphBuilder::BoundsCheckRange(Node* start, Node** size, Node* max,
3742                                          wasm::WasmCodePosition position) {
3743   auto m = mcgraph()->machine();
3744   // The region we are trying to access is [start, start+size). If
3745   // {start} > {max}, none of this region is valid, so we trap. Otherwise,
3746   // there may be a subset of the region that is valid. {max - start} is the
3747   // maximum valid size, so if {max - start < size}, then the region is
3748   // partially out-of-bounds.
3749   TrapIfTrue(wasm::kTrapMemOutOfBounds,
3750              graph()->NewNode(m->Uint32LessThan(), max, start), position);
3751   Node* sub = graph()->NewNode(m->Int32Sub(), max, start);
3752   Node* fail = graph()->NewNode(m->Uint32LessThan(), sub, *size);
3753   Diamond d(graph(), mcgraph()->common(), fail, BranchHint::kFalse);
3754   d.Chain(control());
3755   *size = d.Phi(MachineRepresentation::kWord32, sub, *size);
3756   return fail;
3757 }
3758 
BoundsCheckMemRange(Node ** start,Node ** size,wasm::WasmCodePosition position)3759 Node* WasmGraphBuilder::BoundsCheckMemRange(Node** start, Node** size,
3760                                             wasm::WasmCodePosition position) {
3761   // TODO(binji): Support trap handler and no bounds check mode.
3762   Node* fail =
3763       BoundsCheckRange(*start, size, instance_cache_->mem_size, position);
3764   *start = graph()->NewNode(mcgraph()->machine()->IntAdd(), MemBuffer(0),
3765                             Uint32ToUintptr(*start));
3766   return fail;
3767 }
3768 
GetSafeLoadOperator(int offset,wasm::ValueType type)3769 const Operator* WasmGraphBuilder::GetSafeLoadOperator(int offset,
3770                                                       wasm::ValueType type) {
3771   int alignment = offset % type.element_size_bytes();
3772   MachineType mach_type = type.machine_type();
3773   if (COMPRESS_POINTERS_BOOL && mach_type.IsTagged()) {
3774     // We are loading tagged value from off-heap location, so we need to load
3775     // it as a full word otherwise we will not be able to decompress it.
3776     mach_type = MachineType::Pointer();
3777   }
3778   if (alignment == 0 || mcgraph()->machine()->UnalignedLoadSupported(
3779                             type.machine_representation())) {
3780     return mcgraph()->machine()->Load(mach_type);
3781   }
3782   return mcgraph()->machine()->UnalignedLoad(mach_type);
3783 }
3784 
GetSafeStoreOperator(int offset,wasm::ValueType type)3785 const Operator* WasmGraphBuilder::GetSafeStoreOperator(int offset,
3786                                                        wasm::ValueType type) {
3787   int alignment = offset % type.element_size_bytes();
3788   MachineRepresentation rep = type.machine_representation();
3789   if (COMPRESS_POINTERS_BOOL && IsAnyTagged(rep)) {
3790     // We are storing tagged value to off-heap location, so we need to store
3791     // it as a full word otherwise we will not be able to decompress it.
3792     rep = MachineType::PointerRepresentation();
3793   }
3794   if (alignment == 0 || mcgraph()->machine()->UnalignedStoreSupported(rep)) {
3795     StoreRepresentation store_rep(rep, WriteBarrierKind::kNoWriteBarrier);
3796     return mcgraph()->machine()->Store(store_rep);
3797   }
3798   UnalignedStoreRepresentation store_rep(rep);
3799   return mcgraph()->machine()->UnalignedStore(store_rep);
3800 }
3801 
TraceFunctionEntry(wasm::WasmCodePosition position)3802 Node* WasmGraphBuilder::TraceFunctionEntry(wasm::WasmCodePosition position) {
3803   Node* call = BuildCallToRuntime(Runtime::kWasmTraceEnter, nullptr, 0);
3804   SetSourcePosition(call, position);
3805   return call;
3806 }
3807 
TraceFunctionExit(Vector<Node * > vals,wasm::WasmCodePosition position)3808 Node* WasmGraphBuilder::TraceFunctionExit(Vector<Node*> vals,
3809                                           wasm::WasmCodePosition position) {
3810   Node* info = gasm_->IntPtrConstant(0);
3811   size_t num_returns = vals.size();
3812   if (num_returns == 1) {
3813     wasm::ValueType return_type = sig_->GetReturn(0);
3814     MachineRepresentation rep = return_type.machine_representation();
3815     int size = ElementSizeInBytes(rep);
3816     info = gasm_->StackSlot(size, size);
3817 
3818     gasm_->Store(StoreRepresentation(rep, kNoWriteBarrier), info,
3819                  gasm_->Int32Constant(0), vals[0]);
3820   }
3821 
3822   Node* call = BuildCallToRuntime(Runtime::kWasmTraceExit, &info, 1);
3823   SetSourcePosition(call, position);
3824   return call;
3825 }
3826 
TraceMemoryOperation(bool is_store,MachineRepresentation rep,Node * index,uintptr_t offset,wasm::WasmCodePosition position)3827 Node* WasmGraphBuilder::TraceMemoryOperation(bool is_store,
3828                                              MachineRepresentation rep,
3829                                              Node* index, uintptr_t offset,
3830                                              wasm::WasmCodePosition position) {
3831   int kAlign = 4;  // Ensure that the LSB is 0, such that this looks like a Smi.
3832   TNode<RawPtrT> info =
3833       gasm_->StackSlot(sizeof(wasm::MemoryTracingInfo), kAlign);
3834 
3835   Node* effective_offset = gasm_->IntAdd(gasm_->UintPtrConstant(offset), index);
3836   auto store = [&](int field_offset, MachineRepresentation rep, Node* data) {
3837     gasm_->Store(StoreRepresentation(rep, kNoWriteBarrier), info,
3838                  gasm_->Int32Constant(field_offset), data);
3839   };
3840   // Store effective_offset, is_store, and mem_rep.
3841   store(offsetof(wasm::MemoryTracingInfo, offset),
3842         MachineType::PointerRepresentation(), effective_offset);
3843   store(offsetof(wasm::MemoryTracingInfo, is_store),
3844         MachineRepresentation::kWord8,
3845         mcgraph()->Int32Constant(is_store ? 1 : 0));
3846   store(offsetof(wasm::MemoryTracingInfo, mem_rep),
3847         MachineRepresentation::kWord8,
3848         mcgraph()->Int32Constant(static_cast<int>(rep)));
3849 
3850   Node* args[] = {info};
3851   Node* call =
3852       BuildCallToRuntime(Runtime::kWasmTraceMemory, args, arraysize(args));
3853   SetSourcePosition(call, position);
3854   return call;
3855 }
3856 
3857 namespace {
GetLoadTransformation(MachineType memtype,wasm::LoadTransformationKind transform)3858 LoadTransformation GetLoadTransformation(
3859     MachineType memtype, wasm::LoadTransformationKind transform) {
3860   switch (transform) {
3861     case wasm::LoadTransformationKind::kSplat: {
3862       if (memtype == MachineType::Int8()) {
3863         return LoadTransformation::kS128Load8Splat;
3864       } else if (memtype == MachineType::Int16()) {
3865         return LoadTransformation::kS128Load16Splat;
3866       } else if (memtype == MachineType::Int32()) {
3867         return LoadTransformation::kS128Load32Splat;
3868       } else if (memtype == MachineType::Int64()) {
3869         return LoadTransformation::kS128Load64Splat;
3870       }
3871       break;
3872     }
3873     case wasm::LoadTransformationKind::kExtend: {
3874       if (memtype == MachineType::Int8()) {
3875         return LoadTransformation::kS128Load8x8S;
3876       } else if (memtype == MachineType::Uint8()) {
3877         return LoadTransformation::kS128Load8x8U;
3878       } else if (memtype == MachineType::Int16()) {
3879         return LoadTransformation::kS128Load16x4S;
3880       } else if (memtype == MachineType::Uint16()) {
3881         return LoadTransformation::kS128Load16x4U;
3882       } else if (memtype == MachineType::Int32()) {
3883         return LoadTransformation::kS128Load32x2S;
3884       } else if (memtype == MachineType::Uint32()) {
3885         return LoadTransformation::kS128Load32x2U;
3886       }
3887       break;
3888     }
3889     case wasm::LoadTransformationKind::kZeroExtend: {
3890       if (memtype == MachineType::Int32()) {
3891         return LoadTransformation::kS128Load32Zero;
3892       } else if (memtype == MachineType::Int64()) {
3893         return LoadTransformation::kS128Load64Zero;
3894       }
3895       break;
3896     }
3897   }
3898   UNREACHABLE();
3899 }
3900 
GetMemoryAccessKind(MachineGraph * mcgraph,MachineType memtype,bool use_trap_handler)3901 MemoryAccessKind GetMemoryAccessKind(MachineGraph* mcgraph, MachineType memtype,
3902                                      bool use_trap_handler) {
3903   if (memtype.representation() == MachineRepresentation::kWord8 ||
3904       mcgraph->machine()->UnalignedLoadSupported(memtype.representation())) {
3905     if (use_trap_handler) {
3906       return MemoryAccessKind::kProtected;
3907     }
3908     return MemoryAccessKind::kNormal;
3909   }
3910   // TODO(eholk): Support unaligned loads with trap handlers.
3911   DCHECK(!use_trap_handler);
3912   return MemoryAccessKind::kUnaligned;
3913 }
3914 }  // namespace
3915 
3916 // S390 simulator does not execute BE code, hence needs to also check if we are
3917 // running on a LE simulator.
3918 // TODO(miladfar): Remove SIM once V8_TARGET_BIG_ENDIAN includes the Sim.
3919 #if defined(V8_TARGET_BIG_ENDIAN) || defined(V8_TARGET_ARCH_S390_LE_SIM)
LoadTransformBigEndian(wasm::ValueType type,MachineType memtype,wasm::LoadTransformationKind transform,Node * index,uint64_t offset,uint32_t alignment,wasm::WasmCodePosition position)3920 Node* WasmGraphBuilder::LoadTransformBigEndian(
3921     wasm::ValueType type, MachineType memtype,
3922     wasm::LoadTransformationKind transform, Node* index, uint64_t offset,
3923     uint32_t alignment, wasm::WasmCodePosition position) {
3924 #define LOAD_EXTEND(num_lanes, bytes_per_load, replace_lane)                   \
3925   result = graph()->NewNode(mcgraph()->machine()->S128Zero());                 \
3926   Node* values[num_lanes];                                                     \
3927   for (int i = 0; i < num_lanes; i++) {                                        \
3928     values[i] = LoadMem(type, memtype, index, offset + i * bytes_per_load,     \
3929                         alignment, position);                                  \
3930     if (memtype.IsSigned()) {                                                  \
3931       /* sign extend */                                                        \
3932       values[i] = graph()->NewNode(mcgraph()->machine()->ChangeInt32ToInt64(), \
3933                                    values[i]);                                 \
3934     } else {                                                                   \
3935       /* zero extend */                                                        \
3936       values[i] = graph()->NewNode(                                            \
3937           mcgraph()->machine()->ChangeUint32ToUint64(), values[i]);            \
3938     }                                                                          \
3939   }                                                                            \
3940   for (int lane = 0; lane < num_lanes; lane++) {                               \
3941     result = graph()->NewNode(mcgraph()->machine()->replace_lane(lane),        \
3942                               result, values[lane]);                           \
3943   }
3944   Node* result;
3945   LoadTransformation transformation = GetLoadTransformation(memtype, transform);
3946 
3947   switch (transformation) {
3948     case LoadTransformation::kS128Load8Splat: {
3949       result = LoadMem(type, memtype, index, offset, alignment, position);
3950       result = graph()->NewNode(mcgraph()->machine()->I8x16Splat(), result);
3951       break;
3952     }
3953     case LoadTransformation::kS128Load8x8S:
3954     case LoadTransformation::kS128Load8x8U: {
3955       LOAD_EXTEND(8, 1, I16x8ReplaceLane)
3956       break;
3957     }
3958     case LoadTransformation::kS128Load16Splat: {
3959       result = LoadMem(type, memtype, index, offset, alignment, position);
3960       result = graph()->NewNode(mcgraph()->machine()->I16x8Splat(), result);
3961       break;
3962     }
3963     case LoadTransformation::kS128Load16x4S:
3964     case LoadTransformation::kS128Load16x4U: {
3965       LOAD_EXTEND(4, 2, I32x4ReplaceLane)
3966       break;
3967     }
3968     case LoadTransformation::kS128Load32Splat: {
3969       result = LoadMem(type, memtype, index, offset, alignment, position);
3970       result = graph()->NewNode(mcgraph()->machine()->I32x4Splat(), result);
3971       break;
3972     }
3973     case LoadTransformation::kS128Load32x2S:
3974     case LoadTransformation::kS128Load32x2U: {
3975       LOAD_EXTEND(2, 4, I64x2ReplaceLane)
3976       break;
3977     }
3978     case LoadTransformation::kS128Load64Splat: {
3979       result = LoadMem(type, memtype, index, offset, alignment, position);
3980       result = graph()->NewNode(mcgraph()->machine()->I64x2Splat(), result);
3981       break;
3982     }
3983     case LoadTransformation::kS128Load32Zero: {
3984       result = graph()->NewNode(mcgraph()->machine()->S128Zero());
3985       result = graph()->NewNode(
3986           mcgraph()->machine()->I32x4ReplaceLane(0), result,
3987           LoadMem(type, memtype, index, offset, alignment, position));
3988       break;
3989     }
3990     case LoadTransformation::kS128Load64Zero: {
3991       result = graph()->NewNode(mcgraph()->machine()->S128Zero());
3992       result = graph()->NewNode(
3993           mcgraph()->machine()->I64x2ReplaceLane(0), result,
3994           LoadMem(type, memtype, index, offset, alignment, position));
3995       break;
3996     }
3997     default:
3998       UNREACHABLE();
3999   }
4000 
4001   return result;
4002 #undef LOAD_EXTEND
4003 }
4004 #endif
4005 
LoadLane(MachineType memtype,Node * value,Node * index,uint32_t offset,uint8_t laneidx,wasm::WasmCodePosition position)4006 Node* WasmGraphBuilder::LoadLane(MachineType memtype, Node* value, Node* index,
4007                                  uint32_t offset, uint8_t laneidx,
4008                                  wasm::WasmCodePosition position) {
4009   has_simd_ = true;
4010   Node* load;
4011   uint8_t access_size = memtype.MemSize();
4012   index =
4013       BoundsCheckMem(access_size, index, offset, position, kCanOmitBoundsCheck);
4014 
4015   MemoryAccessKind load_kind =
4016       GetMemoryAccessKind(mcgraph(), memtype, use_trap_handler());
4017 
4018   load = SetEffect(graph()->NewNode(
4019       mcgraph()->machine()->LoadLane(load_kind, memtype, laneidx),
4020       MemBuffer(offset), index, value, effect(), control()));
4021 
4022   if (load_kind == MemoryAccessKind::kProtected) {
4023     SetSourcePosition(load, position);
4024   }
4025 
4026   if (FLAG_trace_wasm_memory) {
4027     TraceMemoryOperation(false, memtype.representation(), index, offset,
4028                          position);
4029   }
4030 
4031   return load;
4032 }
4033 
LoadTransform(wasm::ValueType type,MachineType memtype,wasm::LoadTransformationKind transform,Node * index,uint64_t offset,uint32_t alignment,wasm::WasmCodePosition position)4034 Node* WasmGraphBuilder::LoadTransform(wasm::ValueType type, MachineType memtype,
4035                                       wasm::LoadTransformationKind transform,
4036                                       Node* index, uint64_t offset,
4037                                       uint32_t alignment,
4038                                       wasm::WasmCodePosition position) {
4039   has_simd_ = true;
4040 
4041   Node* load;
4042   // {offset} is validated to be within uintptr_t range in {BoundsCheckMem}.
4043   uintptr_t capped_offset = static_cast<uintptr_t>(offset);
4044 
4045 #if defined(V8_TARGET_BIG_ENDIAN) || defined(V8_TARGET_ARCH_S390_LE_SIM)
4046   // LoadTransform cannot efficiently be executed on BE machines as a
4047   // single operation since loaded bytes need to be reversed first,
4048   // therefore we divide them into separate "load" and "operation" nodes.
4049   load = LoadTransformBigEndian(type, memtype, transform, index, offset,
4050                                 alignment, position);
4051   USE(GetMemoryAccessKind);
4052 #else
4053   // Wasm semantics throw on OOB. Introduce explicit bounds check and
4054   // conditioning when not using the trap handler.
4055 
4056   // Load extends always load 8 bytes.
4057   uint8_t access_size = transform == wasm::LoadTransformationKind::kExtend
4058                             ? 8
4059                             : memtype.MemSize();
4060   index =
4061       BoundsCheckMem(access_size, index, offset, position, kCanOmitBoundsCheck);
4062 
4063   LoadTransformation transformation = GetLoadTransformation(memtype, transform);
4064   MemoryAccessKind load_kind =
4065       GetMemoryAccessKind(mcgraph(), memtype, use_trap_handler());
4066 
4067   load = SetEffect(graph()->NewNode(
4068       mcgraph()->machine()->LoadTransform(load_kind, transformation),
4069       MemBuffer(capped_offset), index, effect(), control()));
4070 
4071   if (load_kind == MemoryAccessKind::kProtected) {
4072     SetSourcePosition(load, position);
4073   }
4074 #endif
4075 
4076   if (FLAG_trace_wasm_memory) {
4077     TraceMemoryOperation(false, memtype.representation(), index, capped_offset,
4078                          position);
4079   }
4080   return load;
4081 }
4082 
LoadMem(wasm::ValueType type,MachineType memtype,Node * index,uint64_t offset,uint32_t alignment,wasm::WasmCodePosition position)4083 Node* WasmGraphBuilder::LoadMem(wasm::ValueType type, MachineType memtype,
4084                                 Node* index, uint64_t offset,
4085                                 uint32_t alignment,
4086                                 wasm::WasmCodePosition position) {
4087   Node* load;
4088 
4089   if (memtype.representation() == MachineRepresentation::kSimd128) {
4090     has_simd_ = true;
4091   }
4092 
4093   // Wasm semantics throw on OOB. Introduce explicit bounds check and
4094   // conditioning when not using the trap handler.
4095   index = BoundsCheckMem(memtype.MemSize(), index, offset, position,
4096                          kCanOmitBoundsCheck);
4097 
4098   // {offset} is validated to be within uintptr_t range in {BoundsCheckMem}.
4099   uintptr_t capped_offset = static_cast<uintptr_t>(offset);
4100   if (memtype.representation() == MachineRepresentation::kWord8 ||
4101       mcgraph()->machine()->UnalignedLoadSupported(memtype.representation())) {
4102     if (use_trap_handler()) {
4103       load = gasm_->ProtectedLoad(memtype, MemBuffer(capped_offset), index);
4104       SetSourcePosition(load, position);
4105     } else {
4106       load = gasm_->Load(memtype, MemBuffer(capped_offset), index);
4107     }
4108   } else {
4109     // TODO(eholk): Support unaligned loads with trap handlers.
4110     DCHECK(!use_trap_handler());
4111     load = gasm_->LoadUnaligned(memtype, MemBuffer(capped_offset), index);
4112   }
4113 
4114 #if defined(V8_TARGET_BIG_ENDIAN)
4115   load = BuildChangeEndiannessLoad(load, memtype, type);
4116 #endif
4117 
4118   if (type == wasm::kWasmI64 &&
4119       ElementSizeInBytes(memtype.representation()) < 8) {
4120     // TODO(titzer): TF zeroes the upper bits of 64-bit loads for subword sizes.
4121     load = memtype.IsSigned()
4122                ? gasm_->ChangeInt32ToInt64(load)     // sign extend
4123                : gasm_->ChangeUint32ToUint64(load);  // zero extend
4124   }
4125 
4126   if (FLAG_trace_wasm_memory) {
4127     TraceMemoryOperation(false, memtype.representation(), index, capped_offset,
4128                          position);
4129   }
4130 
4131   return load;
4132 }
4133 
StoreLane(MachineRepresentation mem_rep,Node * index,uint32_t offset,uint32_t alignment,Node * val,uint8_t laneidx,wasm::WasmCodePosition position,wasm::ValueType type)4134 Node* WasmGraphBuilder::StoreLane(MachineRepresentation mem_rep, Node* index,
4135                                   uint32_t offset, uint32_t alignment,
4136                                   Node* val, uint8_t laneidx,
4137                                   wasm::WasmCodePosition position,
4138                                   wasm::ValueType type) {
4139   Node* store;
4140   has_simd_ = true;
4141   index = BoundsCheckMem(i::ElementSizeInBytes(mem_rep), index, offset,
4142                          position, kCanOmitBoundsCheck);
4143 
4144   MachineType memtype = MachineType(mem_rep, MachineSemantic::kNone);
4145   MemoryAccessKind load_kind =
4146       GetMemoryAccessKind(mcgraph(), memtype, use_trap_handler());
4147 
4148   // {offset} is validated to be within uintptr_t range in {BoundsCheckMem}.
4149   uintptr_t capped_offset = static_cast<uintptr_t>(offset);
4150 
4151   store = SetEffect(graph()->NewNode(
4152       mcgraph()->machine()->StoreLane(load_kind, mem_rep, laneidx),
4153       MemBuffer(capped_offset), index, val, effect(), control()));
4154 
4155   if (load_kind == MemoryAccessKind::kProtected) {
4156     SetSourcePosition(store, position);
4157   }
4158 
4159   if (FLAG_trace_wasm_memory) {
4160     TraceMemoryOperation(true, mem_rep, index, capped_offset, position);
4161   }
4162 
4163   return store;
4164 }
4165 
StoreMem(MachineRepresentation mem_rep,Node * index,uint64_t offset,uint32_t alignment,Node * val,wasm::WasmCodePosition position,wasm::ValueType type)4166 Node* WasmGraphBuilder::StoreMem(MachineRepresentation mem_rep, Node* index,
4167                                  uint64_t offset, uint32_t alignment, Node* val,
4168                                  wasm::WasmCodePosition position,
4169                                  wasm::ValueType type) {
4170   Node* store;
4171 
4172   if (mem_rep == MachineRepresentation::kSimd128) {
4173     has_simd_ = true;
4174   }
4175 
4176   index = BoundsCheckMem(i::ElementSizeInBytes(mem_rep), index, offset,
4177                          position, kCanOmitBoundsCheck);
4178 
4179 #if defined(V8_TARGET_BIG_ENDIAN)
4180   val = BuildChangeEndiannessStore(val, mem_rep, type);
4181 #endif
4182 
4183   // {offset} is validated to be within uintptr_t range in {BoundsCheckMem}.
4184   uintptr_t capped_offset = static_cast<uintptr_t>(offset);
4185   if (mem_rep == MachineRepresentation::kWord8 ||
4186       mcgraph()->machine()->UnalignedStoreSupported(mem_rep)) {
4187     if (use_trap_handler()) {
4188       store =
4189           gasm_->ProtectedStore(mem_rep, MemBuffer(capped_offset), index, val);
4190       SetSourcePosition(store, position);
4191     } else {
4192       store = gasm_->Store(StoreRepresentation{mem_rep, kNoWriteBarrier},
4193                            MemBuffer(capped_offset), index, val);
4194     }
4195   } else {
4196     // TODO(eholk): Support unaligned stores with trap handlers.
4197     DCHECK(!use_trap_handler());
4198     UnalignedStoreRepresentation rep(mem_rep);
4199     store = gasm_->StoreUnaligned(rep, MemBuffer(capped_offset), index, val);
4200   }
4201 
4202   if (FLAG_trace_wasm_memory) {
4203     TraceMemoryOperation(true, mem_rep, index, capped_offset, position);
4204   }
4205 
4206   return store;
4207 }
4208 
4209 namespace {
GetAsmJsOOBValue(MachineRepresentation rep,MachineGraph * mcgraph)4210 Node* GetAsmJsOOBValue(MachineRepresentation rep, MachineGraph* mcgraph) {
4211   switch (rep) {
4212     case MachineRepresentation::kWord8:
4213     case MachineRepresentation::kWord16:
4214     case MachineRepresentation::kWord32:
4215       return mcgraph->Int32Constant(0);
4216     case MachineRepresentation::kWord64:
4217       return mcgraph->Int64Constant(0);
4218     case MachineRepresentation::kFloat32:
4219       return mcgraph->Float32Constant(std::numeric_limits<float>::quiet_NaN());
4220     case MachineRepresentation::kFloat64:
4221       return mcgraph->Float64Constant(std::numeric_limits<double>::quiet_NaN());
4222     default:
4223       UNREACHABLE();
4224   }
4225 }
4226 }  // namespace
4227 
BuildAsmjsLoadMem(MachineType type,Node * index)4228 Node* WasmGraphBuilder::BuildAsmjsLoadMem(MachineType type, Node* index) {
4229   DCHECK_NOT_NULL(instance_cache_);
4230   Node* mem_start = instance_cache_->mem_start;
4231   Node* mem_size = instance_cache_->mem_size;
4232   DCHECK_NOT_NULL(mem_start);
4233   DCHECK_NOT_NULL(mem_size);
4234 
4235   // Asm.js semantics are defined in terms of typed arrays, hence OOB
4236   // reads return {undefined} coerced to the result type (0 for integers, NaN
4237   // for float and double).
4238   // Note that we check against the memory size ignoring the size of the
4239   // stored value, which is conservative if misaligned. Technically, asm.js
4240   // should never have misaligned accesses.
4241   index = Uint32ToUintptr(index);
4242   Diamond bounds_check(
4243       graph(), mcgraph()->common(),
4244       graph()->NewNode(mcgraph()->machine()->UintLessThan(), index, mem_size),
4245       BranchHint::kTrue);
4246   bounds_check.Chain(control());
4247 
4248   if (untrusted_code_mitigations_) {
4249     // Condition the index with the memory mask.
4250     Node* mem_mask = instance_cache_->mem_mask;
4251     DCHECK_NOT_NULL(mem_mask);
4252     index = graph()->NewNode(mcgraph()->machine()->WordAnd(), index, mem_mask);
4253   }
4254 
4255   Node* load = graph()->NewNode(mcgraph()->machine()->Load(type), mem_start,
4256                                 index, effect(), bounds_check.if_true);
4257   SetEffectControl(bounds_check.EffectPhi(load, effect()), bounds_check.merge);
4258   return bounds_check.Phi(type.representation(), load,
4259                           GetAsmJsOOBValue(type.representation(), mcgraph()));
4260 }
4261 
Uint32ToUintptr(Node * node)4262 Node* WasmGraphBuilder::Uint32ToUintptr(Node* node) {
4263   if (mcgraph()->machine()->Is32()) return node;
4264   // Fold instances of ChangeUint32ToUint64(IntConstant) directly.
4265   Uint32Matcher matcher(node);
4266   if (matcher.HasResolvedValue()) {
4267     uintptr_t value = matcher.ResolvedValue();
4268     return mcgraph()->IntPtrConstant(bit_cast<intptr_t>(value));
4269   }
4270   return graph()->NewNode(mcgraph()->machine()->ChangeUint32ToUint64(), node);
4271 }
4272 
BuildAsmjsStoreMem(MachineType type,Node * index,Node * val)4273 Node* WasmGraphBuilder::BuildAsmjsStoreMem(MachineType type, Node* index,
4274                                            Node* val) {
4275   DCHECK_NOT_NULL(instance_cache_);
4276   Node* mem_start = instance_cache_->mem_start;
4277   Node* mem_size = instance_cache_->mem_size;
4278   DCHECK_NOT_NULL(mem_start);
4279   DCHECK_NOT_NULL(mem_size);
4280 
4281   // Asm.js semantics are to ignore OOB writes.
4282   // Note that we check against the memory size ignoring the size of the
4283   // stored value, which is conservative if misaligned. Technically, asm.js
4284   // should never have misaligned accesses.
4285   Diamond bounds_check(
4286       graph(), mcgraph()->common(),
4287       graph()->NewNode(mcgraph()->machine()->Uint32LessThan(), index, mem_size),
4288       BranchHint::kTrue);
4289   bounds_check.Chain(control());
4290 
4291   if (untrusted_code_mitigations_) {
4292     // Condition the index with the memory mask.
4293     Node* mem_mask = instance_cache_->mem_mask;
4294     DCHECK_NOT_NULL(mem_mask);
4295     index =
4296         graph()->NewNode(mcgraph()->machine()->Word32And(), index, mem_mask);
4297   }
4298 
4299   index = Uint32ToUintptr(index);
4300   const Operator* store_op = mcgraph()->machine()->Store(StoreRepresentation(
4301       type.representation(), WriteBarrierKind::kNoWriteBarrier));
4302   Node* store = graph()->NewNode(store_op, mem_start, index, val, effect(),
4303                                  bounds_check.if_true);
4304   SetEffectControl(bounds_check.EffectPhi(store, effect()), bounds_check.merge);
4305   return val;
4306 }
4307 
BuildF64x2Ceil(Node * input)4308 Node* WasmGraphBuilder::BuildF64x2Ceil(Node* input) {
4309   MachineType type = MachineType::Simd128();
4310   ExternalReference ref = ExternalReference::wasm_f64x2_ceil();
4311   return BuildCFuncInstruction(ref, type, input);
4312 }
4313 
BuildF64x2Floor(Node * input)4314 Node* WasmGraphBuilder::BuildF64x2Floor(Node* input) {
4315   MachineType type = MachineType::Simd128();
4316   ExternalReference ref = ExternalReference::wasm_f64x2_floor();
4317   return BuildCFuncInstruction(ref, type, input);
4318 }
4319 
BuildF64x2Trunc(Node * input)4320 Node* WasmGraphBuilder::BuildF64x2Trunc(Node* input) {
4321   MachineType type = MachineType::Simd128();
4322   ExternalReference ref = ExternalReference::wasm_f64x2_trunc();
4323   return BuildCFuncInstruction(ref, type, input);
4324 }
4325 
BuildF64x2NearestInt(Node * input)4326 Node* WasmGraphBuilder::BuildF64x2NearestInt(Node* input) {
4327   MachineType type = MachineType::Simd128();
4328   ExternalReference ref = ExternalReference::wasm_f64x2_nearest_int();
4329   return BuildCFuncInstruction(ref, type, input);
4330 }
4331 
BuildF32x4Ceil(Node * input)4332 Node* WasmGraphBuilder::BuildF32x4Ceil(Node* input) {
4333   MachineType type = MachineType::Simd128();
4334   ExternalReference ref = ExternalReference::wasm_f32x4_ceil();
4335   return BuildCFuncInstruction(ref, type, input);
4336 }
4337 
BuildF32x4Floor(Node * input)4338 Node* WasmGraphBuilder::BuildF32x4Floor(Node* input) {
4339   MachineType type = MachineType::Simd128();
4340   ExternalReference ref = ExternalReference::wasm_f32x4_floor();
4341   return BuildCFuncInstruction(ref, type, input);
4342 }
4343 
BuildF32x4Trunc(Node * input)4344 Node* WasmGraphBuilder::BuildF32x4Trunc(Node* input) {
4345   MachineType type = MachineType::Simd128();
4346   ExternalReference ref = ExternalReference::wasm_f32x4_trunc();
4347   return BuildCFuncInstruction(ref, type, input);
4348 }
4349 
BuildF32x4NearestInt(Node * input)4350 Node* WasmGraphBuilder::BuildF32x4NearestInt(Node* input) {
4351   MachineType type = MachineType::Simd128();
4352   ExternalReference ref = ExternalReference::wasm_f32x4_nearest_int();
4353   return BuildCFuncInstruction(ref, type, input);
4354 }
4355 
PrintDebugName(Node * node)4356 void WasmGraphBuilder::PrintDebugName(Node* node) {
4357   PrintF("#%d:%s", node->id(), node->op()->mnemonic());
4358 }
4359 
graph()4360 Graph* WasmGraphBuilder::graph() { return mcgraph()->graph(); }
4361 
4362 namespace {
CreateMachineSignature(Zone * zone,const wasm::FunctionSig * sig,WasmGraphBuilder::CallOrigin origin)4363 Signature<MachineRepresentation>* CreateMachineSignature(
4364     Zone* zone, const wasm::FunctionSig* sig,
4365     WasmGraphBuilder::CallOrigin origin) {
4366   Signature<MachineRepresentation>::Builder builder(zone, sig->return_count(),
4367                                                     sig->parameter_count());
4368   for (auto ret : sig->returns()) {
4369     if (origin == WasmGraphBuilder::kCalledFromJS) {
4370       builder.AddReturn(MachineRepresentation::kTagged);
4371     } else {
4372       builder.AddReturn(ret.machine_representation());
4373     }
4374   }
4375 
4376   for (auto param : sig->parameters()) {
4377     if (origin == WasmGraphBuilder::kCalledFromJS) {
4378       // Parameters coming from JavaScript are always tagged values. Especially
4379       // when the signature says that it's an I64 value, then a BigInt object is
4380       // provided by JavaScript, and not two 32-bit parameters.
4381       builder.AddParam(MachineRepresentation::kTagged);
4382     } else {
4383       builder.AddParam(param.machine_representation());
4384     }
4385   }
4386   return builder.Build();
4387 }
4388 
4389 }  // namespace
4390 
AddInt64LoweringReplacement(CallDescriptor * original,CallDescriptor * replacement)4391 void WasmGraphBuilder::AddInt64LoweringReplacement(
4392     CallDescriptor* original, CallDescriptor* replacement) {
4393   if (!lowering_special_case_) {
4394     lowering_special_case_ = std::make_unique<Int64LoweringSpecialCase>();
4395   }
4396   lowering_special_case_->replacements.insert({original, replacement});
4397 }
4398 
GetI32AtomicWaitCallDescriptor()4399 CallDescriptor* WasmGraphBuilder::GetI32AtomicWaitCallDescriptor() {
4400   if (i32_atomic_wait_descriptor_) return i32_atomic_wait_descriptor_;
4401 
4402   i32_atomic_wait_descriptor_ =
4403       GetBuiltinCallDescriptor<WasmI32AtomicWait64Descriptor>(
4404           this, StubCallMode::kCallWasmRuntimeStub);
4405 
4406   AddInt64LoweringReplacement(
4407       i32_atomic_wait_descriptor_,
4408       GetBuiltinCallDescriptor<WasmI32AtomicWait32Descriptor>(
4409           this, StubCallMode::kCallWasmRuntimeStub));
4410 
4411   return i32_atomic_wait_descriptor_;
4412 }
4413 
GetI64AtomicWaitCallDescriptor()4414 CallDescriptor* WasmGraphBuilder::GetI64AtomicWaitCallDescriptor() {
4415   if (i64_atomic_wait_descriptor_) return i64_atomic_wait_descriptor_;
4416 
4417   i64_atomic_wait_descriptor_ =
4418       GetBuiltinCallDescriptor<WasmI64AtomicWait64Descriptor>(
4419           this, StubCallMode::kCallWasmRuntimeStub);
4420 
4421   AddInt64LoweringReplacement(
4422       i64_atomic_wait_descriptor_,
4423       GetBuiltinCallDescriptor<WasmI64AtomicWait32Descriptor>(
4424           this, StubCallMode::kCallWasmRuntimeStub));
4425 
4426   return i64_atomic_wait_descriptor_;
4427 }
4428 
LowerInt64(Signature<MachineRepresentation> * sig)4429 void WasmGraphBuilder::LowerInt64(Signature<MachineRepresentation>* sig) {
4430   if (mcgraph()->machine()->Is64()) return;
4431   Int64Lowering r(mcgraph()->graph(), mcgraph()->machine(), mcgraph()->common(),
4432                   mcgraph()->zone(), sig, std::move(lowering_special_case_));
4433   r.LowerGraph();
4434 }
4435 
LowerInt64(CallOrigin origin)4436 void WasmGraphBuilder::LowerInt64(CallOrigin origin) {
4437   LowerInt64(CreateMachineSignature(mcgraph()->zone(), sig_, origin));
4438 }
4439 
SimdScalarLoweringForTesting()4440 void WasmGraphBuilder::SimdScalarLoweringForTesting() {
4441   SimdScalarLowering(mcgraph(), CreateMachineSignature(mcgraph()->zone(), sig_,
4442                                                        kCalledFromWasm))
4443       .LowerGraph();
4444 }
4445 
SetSourcePosition(Node * node,wasm::WasmCodePosition position)4446 void WasmGraphBuilder::SetSourcePosition(Node* node,
4447                                          wasm::WasmCodePosition position) {
4448   DCHECK_NE(position, wasm::kNoCodePosition);
4449   if (source_position_table_) {
4450     source_position_table_->SetSourcePosition(node, SourcePosition(position));
4451   }
4452 }
4453 
S128Zero()4454 Node* WasmGraphBuilder::S128Zero() {
4455   has_simd_ = true;
4456   return graph()->NewNode(mcgraph()->machine()->S128Zero());
4457 }
4458 
SimdOp(wasm::WasmOpcode opcode,Node * const * inputs)4459 Node* WasmGraphBuilder::SimdOp(wasm::WasmOpcode opcode, Node* const* inputs) {
4460   has_simd_ = true;
4461   switch (opcode) {
4462     case wasm::kExprF64x2Splat:
4463       return graph()->NewNode(mcgraph()->machine()->F64x2Splat(), inputs[0]);
4464     case wasm::kExprF64x2Abs:
4465       return graph()->NewNode(mcgraph()->machine()->F64x2Abs(), inputs[0]);
4466     case wasm::kExprF64x2Neg:
4467       return graph()->NewNode(mcgraph()->machine()->F64x2Neg(), inputs[0]);
4468     case wasm::kExprF64x2Sqrt:
4469       return graph()->NewNode(mcgraph()->machine()->F64x2Sqrt(), inputs[0]);
4470     case wasm::kExprF64x2Add:
4471       return graph()->NewNode(mcgraph()->machine()->F64x2Add(), inputs[0],
4472                               inputs[1]);
4473     case wasm::kExprF64x2Sub:
4474       return graph()->NewNode(mcgraph()->machine()->F64x2Sub(), inputs[0],
4475                               inputs[1]);
4476     case wasm::kExprF64x2Mul:
4477       return graph()->NewNode(mcgraph()->machine()->F64x2Mul(), inputs[0],
4478                               inputs[1]);
4479     case wasm::kExprF64x2Div:
4480       return graph()->NewNode(mcgraph()->machine()->F64x2Div(), inputs[0],
4481                               inputs[1]);
4482     case wasm::kExprF64x2Min:
4483       return graph()->NewNode(mcgraph()->machine()->F64x2Min(), inputs[0],
4484                               inputs[1]);
4485     case wasm::kExprF64x2Max:
4486       return graph()->NewNode(mcgraph()->machine()->F64x2Max(), inputs[0],
4487                               inputs[1]);
4488     case wasm::kExprF64x2Eq:
4489       return graph()->NewNode(mcgraph()->machine()->F64x2Eq(), inputs[0],
4490                               inputs[1]);
4491     case wasm::kExprF64x2Ne:
4492       return graph()->NewNode(mcgraph()->machine()->F64x2Ne(), inputs[0],
4493                               inputs[1]);
4494     case wasm::kExprF64x2Lt:
4495       return graph()->NewNode(mcgraph()->machine()->F64x2Lt(), inputs[0],
4496                               inputs[1]);
4497     case wasm::kExprF64x2Le:
4498       return graph()->NewNode(mcgraph()->machine()->F64x2Le(), inputs[0],
4499                               inputs[1]);
4500     case wasm::kExprF64x2Gt:
4501       return graph()->NewNode(mcgraph()->machine()->F64x2Lt(), inputs[1],
4502                               inputs[0]);
4503     case wasm::kExprF64x2Ge:
4504       return graph()->NewNode(mcgraph()->machine()->F64x2Le(), inputs[1],
4505                               inputs[0]);
4506     case wasm::kExprF64x2Qfma:
4507       return graph()->NewNode(mcgraph()->machine()->F64x2Qfma(), inputs[0],
4508                               inputs[1], inputs[2]);
4509     case wasm::kExprF64x2Qfms:
4510       return graph()->NewNode(mcgraph()->machine()->F64x2Qfms(), inputs[0],
4511                               inputs[1], inputs[2]);
4512     case wasm::kExprF64x2Pmin:
4513       return graph()->NewNode(mcgraph()->machine()->F64x2Pmin(), inputs[0],
4514                               inputs[1]);
4515     case wasm::kExprF64x2Pmax:
4516       return graph()->NewNode(mcgraph()->machine()->F64x2Pmax(), inputs[0],
4517                               inputs[1]);
4518     case wasm::kExprF64x2Ceil:
4519       // Architecture support for F64x2Ceil and Float64RoundUp is the same.
4520       if (!mcgraph()->machine()->Float64RoundUp().IsSupported())
4521         return BuildF64x2Ceil(inputs[0]);
4522       return graph()->NewNode(mcgraph()->machine()->F64x2Ceil(), inputs[0]);
4523     case wasm::kExprF64x2Floor:
4524       // Architecture support for F64x2Floor and Float64RoundDown is the same.
4525       if (!mcgraph()->machine()->Float64RoundDown().IsSupported())
4526         return BuildF64x2Floor(inputs[0]);
4527       return graph()->NewNode(mcgraph()->machine()->F64x2Floor(), inputs[0]);
4528     case wasm::kExprF64x2Trunc:
4529       // Architecture support for F64x2Trunc and Float64RoundTruncate is the
4530       // same.
4531       if (!mcgraph()->machine()->Float64RoundTruncate().IsSupported())
4532         return BuildF64x2Trunc(inputs[0]);
4533       return graph()->NewNode(mcgraph()->machine()->F64x2Trunc(), inputs[0]);
4534     case wasm::kExprF64x2NearestInt:
4535       // Architecture support for F64x2NearestInt and Float64RoundTiesEven is
4536       // the same.
4537       if (!mcgraph()->machine()->Float64RoundTiesEven().IsSupported())
4538         return BuildF64x2NearestInt(inputs[0]);
4539       return graph()->NewNode(mcgraph()->machine()->F64x2NearestInt(),
4540                               inputs[0]);
4541     case wasm::kExprF32x4Splat:
4542       return graph()->NewNode(mcgraph()->machine()->F32x4Splat(), inputs[0]);
4543     case wasm::kExprF32x4SConvertI32x4:
4544       return graph()->NewNode(mcgraph()->machine()->F32x4SConvertI32x4(),
4545                               inputs[0]);
4546     case wasm::kExprF32x4UConvertI32x4:
4547       return graph()->NewNode(mcgraph()->machine()->F32x4UConvertI32x4(),
4548                               inputs[0]);
4549     case wasm::kExprF32x4Abs:
4550       return graph()->NewNode(mcgraph()->machine()->F32x4Abs(), inputs[0]);
4551     case wasm::kExprF32x4Neg:
4552       return graph()->NewNode(mcgraph()->machine()->F32x4Neg(), inputs[0]);
4553     case wasm::kExprF32x4Sqrt:
4554       return graph()->NewNode(mcgraph()->machine()->F32x4Sqrt(), inputs[0]);
4555     case wasm::kExprF32x4RecipApprox:
4556       return graph()->NewNode(mcgraph()->machine()->F32x4RecipApprox(),
4557                               inputs[0]);
4558     case wasm::kExprF32x4RecipSqrtApprox:
4559       return graph()->NewNode(mcgraph()->machine()->F32x4RecipSqrtApprox(),
4560                               inputs[0]);
4561     case wasm::kExprF32x4Add:
4562       return graph()->NewNode(mcgraph()->machine()->F32x4Add(), inputs[0],
4563                               inputs[1]);
4564     case wasm::kExprF32x4AddHoriz:
4565       return graph()->NewNode(mcgraph()->machine()->F32x4AddHoriz(), inputs[0],
4566                               inputs[1]);
4567     case wasm::kExprF32x4Sub:
4568       return graph()->NewNode(mcgraph()->machine()->F32x4Sub(), inputs[0],
4569                               inputs[1]);
4570     case wasm::kExprF32x4Mul:
4571       return graph()->NewNode(mcgraph()->machine()->F32x4Mul(), inputs[0],
4572                               inputs[1]);
4573     case wasm::kExprF32x4Div:
4574       return graph()->NewNode(mcgraph()->machine()->F32x4Div(), inputs[0],
4575                               inputs[1]);
4576     case wasm::kExprF32x4Min:
4577       return graph()->NewNode(mcgraph()->machine()->F32x4Min(), inputs[0],
4578                               inputs[1]);
4579     case wasm::kExprF32x4Max:
4580       return graph()->NewNode(mcgraph()->machine()->F32x4Max(), inputs[0],
4581                               inputs[1]);
4582     case wasm::kExprF32x4Eq:
4583       return graph()->NewNode(mcgraph()->machine()->F32x4Eq(), inputs[0],
4584                               inputs[1]);
4585     case wasm::kExprF32x4Ne:
4586       return graph()->NewNode(mcgraph()->machine()->F32x4Ne(), inputs[0],
4587                               inputs[1]);
4588     case wasm::kExprF32x4Lt:
4589       return graph()->NewNode(mcgraph()->machine()->F32x4Lt(), inputs[0],
4590                               inputs[1]);
4591     case wasm::kExprF32x4Le:
4592       return graph()->NewNode(mcgraph()->machine()->F32x4Le(), inputs[0],
4593                               inputs[1]);
4594     case wasm::kExprF32x4Gt:
4595       return graph()->NewNode(mcgraph()->machine()->F32x4Lt(), inputs[1],
4596                               inputs[0]);
4597     case wasm::kExprF32x4Ge:
4598       return graph()->NewNode(mcgraph()->machine()->F32x4Le(), inputs[1],
4599                               inputs[0]);
4600     case wasm::kExprF32x4Qfma:
4601       return graph()->NewNode(mcgraph()->machine()->F32x4Qfma(), inputs[0],
4602                               inputs[1], inputs[2]);
4603     case wasm::kExprF32x4Qfms:
4604       return graph()->NewNode(mcgraph()->machine()->F32x4Qfms(), inputs[0],
4605                               inputs[1], inputs[2]);
4606     case wasm::kExprF32x4Pmin:
4607       return graph()->NewNode(mcgraph()->machine()->F32x4Pmin(), inputs[0],
4608                               inputs[1]);
4609     case wasm::kExprF32x4Pmax:
4610       return graph()->NewNode(mcgraph()->machine()->F32x4Pmax(), inputs[0],
4611                               inputs[1]);
4612     case wasm::kExprF32x4Ceil:
4613       // Architecture support for F32x4Ceil and Float32RoundUp is the same.
4614       if (!mcgraph()->machine()->Float32RoundUp().IsSupported())
4615         return BuildF32x4Ceil(inputs[0]);
4616       return graph()->NewNode(mcgraph()->machine()->F32x4Ceil(), inputs[0]);
4617     case wasm::kExprF32x4Floor:
4618       // Architecture support for F32x4Floor and Float32RoundDown is the same.
4619       if (!mcgraph()->machine()->Float32RoundDown().IsSupported())
4620         return BuildF32x4Floor(inputs[0]);
4621       return graph()->NewNode(mcgraph()->machine()->F32x4Floor(), inputs[0]);
4622     case wasm::kExprF32x4Trunc:
4623       // Architecture support for F32x4Trunc and Float32RoundTruncate is the
4624       // same.
4625       if (!mcgraph()->machine()->Float32RoundTruncate().IsSupported())
4626         return BuildF32x4Trunc(inputs[0]);
4627       return graph()->NewNode(mcgraph()->machine()->F32x4Trunc(), inputs[0]);
4628     case wasm::kExprF32x4NearestInt:
4629       // Architecture support for F32x4NearestInt and Float32RoundTiesEven is
4630       // the same.
4631       if (!mcgraph()->machine()->Float32RoundTiesEven().IsSupported())
4632         return BuildF32x4NearestInt(inputs[0]);
4633       return graph()->NewNode(mcgraph()->machine()->F32x4NearestInt(),
4634                               inputs[0]);
4635     case wasm::kExprI64x2Splat:
4636       return graph()->NewNode(mcgraph()->machine()->I64x2Splat(), inputs[0]);
4637     case wasm::kExprI64x2Neg:
4638       return graph()->NewNode(mcgraph()->machine()->I64x2Neg(), inputs[0]);
4639     case wasm::kExprI64x2SConvertI32x4Low:
4640       return graph()->NewNode(mcgraph()->machine()->I64x2SConvertI32x4Low(),
4641                               inputs[0]);
4642     case wasm::kExprI64x2SConvertI32x4High:
4643       return graph()->NewNode(mcgraph()->machine()->I64x2SConvertI32x4High(),
4644                               inputs[0]);
4645     case wasm::kExprI64x2UConvertI32x4Low:
4646       return graph()->NewNode(mcgraph()->machine()->I64x2UConvertI32x4Low(),
4647                               inputs[0]);
4648     case wasm::kExprI64x2UConvertI32x4High:
4649       return graph()->NewNode(mcgraph()->machine()->I64x2UConvertI32x4High(),
4650                               inputs[0]);
4651     case wasm::kExprI64x2BitMask:
4652       return graph()->NewNode(mcgraph()->machine()->I64x2BitMask(), inputs[0]);
4653     case wasm::kExprI64x2Shl:
4654       return graph()->NewNode(mcgraph()->machine()->I64x2Shl(), inputs[0],
4655                               inputs[1]);
4656     case wasm::kExprI64x2ShrS:
4657       return graph()->NewNode(mcgraph()->machine()->I64x2ShrS(), inputs[0],
4658                               inputs[1]);
4659     case wasm::kExprI64x2Add:
4660       return graph()->NewNode(mcgraph()->machine()->I64x2Add(), inputs[0],
4661                               inputs[1]);
4662     case wasm::kExprI64x2Sub:
4663       return graph()->NewNode(mcgraph()->machine()->I64x2Sub(), inputs[0],
4664                               inputs[1]);
4665     case wasm::kExprI64x2Mul:
4666       return graph()->NewNode(mcgraph()->machine()->I64x2Mul(), inputs[0],
4667                               inputs[1]);
4668     case wasm::kExprI64x2Eq:
4669       return graph()->NewNode(mcgraph()->machine()->I64x2Eq(), inputs[0],
4670                               inputs[1]);
4671     case wasm::kExprI64x2ShrU:
4672       return graph()->NewNode(mcgraph()->machine()->I64x2ShrU(), inputs[0],
4673                               inputs[1]);
4674     case wasm::kExprI64x2ExtMulLowI32x4S:
4675       return graph()->NewNode(mcgraph()->machine()->I64x2ExtMulLowI32x4S(),
4676                               inputs[0], inputs[1]);
4677     case wasm::kExprI64x2ExtMulHighI32x4S:
4678       return graph()->NewNode(mcgraph()->machine()->I64x2ExtMulHighI32x4S(),
4679                               inputs[0], inputs[1]);
4680     case wasm::kExprI64x2ExtMulLowI32x4U:
4681       return graph()->NewNode(mcgraph()->machine()->I64x2ExtMulLowI32x4U(),
4682                               inputs[0], inputs[1]);
4683     case wasm::kExprI64x2ExtMulHighI32x4U:
4684       return graph()->NewNode(mcgraph()->machine()->I64x2ExtMulHighI32x4U(),
4685                               inputs[0], inputs[1]);
4686     case wasm::kExprI64x2SignSelect:
4687       return graph()->NewNode(mcgraph()->machine()->I64x2SignSelect(),
4688                               inputs[0], inputs[1], inputs[2]);
4689     case wasm::kExprI32x4Splat:
4690       return graph()->NewNode(mcgraph()->machine()->I32x4Splat(), inputs[0]);
4691     case wasm::kExprI32x4SConvertF32x4:
4692       return graph()->NewNode(mcgraph()->machine()->I32x4SConvertF32x4(),
4693                               inputs[0]);
4694     case wasm::kExprI32x4UConvertF32x4:
4695       return graph()->NewNode(mcgraph()->machine()->I32x4UConvertF32x4(),
4696                               inputs[0]);
4697     case wasm::kExprI32x4SConvertI16x8Low:
4698       return graph()->NewNode(mcgraph()->machine()->I32x4SConvertI16x8Low(),
4699                               inputs[0]);
4700     case wasm::kExprI32x4SConvertI16x8High:
4701       return graph()->NewNode(mcgraph()->machine()->I32x4SConvertI16x8High(),
4702                               inputs[0]);
4703     case wasm::kExprI32x4Neg:
4704       return graph()->NewNode(mcgraph()->machine()->I32x4Neg(), inputs[0]);
4705     case wasm::kExprI32x4Shl:
4706       return graph()->NewNode(mcgraph()->machine()->I32x4Shl(), inputs[0],
4707                               inputs[1]);
4708     case wasm::kExprI32x4ShrS:
4709       return graph()->NewNode(mcgraph()->machine()->I32x4ShrS(), inputs[0],
4710                               inputs[1]);
4711     case wasm::kExprI32x4Add:
4712       return graph()->NewNode(mcgraph()->machine()->I32x4Add(), inputs[0],
4713                               inputs[1]);
4714     case wasm::kExprI32x4AddHoriz:
4715       return graph()->NewNode(mcgraph()->machine()->I32x4AddHoriz(), inputs[0],
4716                               inputs[1]);
4717     case wasm::kExprI32x4Sub:
4718       return graph()->NewNode(mcgraph()->machine()->I32x4Sub(), inputs[0],
4719                               inputs[1]);
4720     case wasm::kExprI32x4Mul:
4721       return graph()->NewNode(mcgraph()->machine()->I32x4Mul(), inputs[0],
4722                               inputs[1]);
4723     case wasm::kExprI32x4MinS:
4724       return graph()->NewNode(mcgraph()->machine()->I32x4MinS(), inputs[0],
4725                               inputs[1]);
4726     case wasm::kExprI32x4MaxS:
4727       return graph()->NewNode(mcgraph()->machine()->I32x4MaxS(), inputs[0],
4728                               inputs[1]);
4729     case wasm::kExprI32x4Eq:
4730       return graph()->NewNode(mcgraph()->machine()->I32x4Eq(), inputs[0],
4731                               inputs[1]);
4732     case wasm::kExprI32x4Ne:
4733       return graph()->NewNode(mcgraph()->machine()->I32x4Ne(), inputs[0],
4734                               inputs[1]);
4735     case wasm::kExprI32x4LtS:
4736       return graph()->NewNode(mcgraph()->machine()->I32x4GtS(), inputs[1],
4737                               inputs[0]);
4738     case wasm::kExprI32x4LeS:
4739       return graph()->NewNode(mcgraph()->machine()->I32x4GeS(), inputs[1],
4740                               inputs[0]);
4741     case wasm::kExprI32x4GtS:
4742       return graph()->NewNode(mcgraph()->machine()->I32x4GtS(), inputs[0],
4743                               inputs[1]);
4744     case wasm::kExprI32x4GeS:
4745       return graph()->NewNode(mcgraph()->machine()->I32x4GeS(), inputs[0],
4746                               inputs[1]);
4747     case wasm::kExprI32x4UConvertI16x8Low:
4748       return graph()->NewNode(mcgraph()->machine()->I32x4UConvertI16x8Low(),
4749                               inputs[0]);
4750     case wasm::kExprI32x4UConvertI16x8High:
4751       return graph()->NewNode(mcgraph()->machine()->I32x4UConvertI16x8High(),
4752                               inputs[0]);
4753     case wasm::kExprI32x4ShrU:
4754       return graph()->NewNode(mcgraph()->machine()->I32x4ShrU(), inputs[0],
4755                               inputs[1]);
4756     case wasm::kExprI32x4MinU:
4757       return graph()->NewNode(mcgraph()->machine()->I32x4MinU(), inputs[0],
4758                               inputs[1]);
4759     case wasm::kExprI32x4MaxU:
4760       return graph()->NewNode(mcgraph()->machine()->I32x4MaxU(), inputs[0],
4761                               inputs[1]);
4762     case wasm::kExprI32x4LtU:
4763       return graph()->NewNode(mcgraph()->machine()->I32x4GtU(), inputs[1],
4764                               inputs[0]);
4765     case wasm::kExprI32x4LeU:
4766       return graph()->NewNode(mcgraph()->machine()->I32x4GeU(), inputs[1],
4767                               inputs[0]);
4768     case wasm::kExprI32x4GtU:
4769       return graph()->NewNode(mcgraph()->machine()->I32x4GtU(), inputs[0],
4770                               inputs[1]);
4771     case wasm::kExprI32x4GeU:
4772       return graph()->NewNode(mcgraph()->machine()->I32x4GeU(), inputs[0],
4773                               inputs[1]);
4774     case wasm::kExprI32x4Abs:
4775       return graph()->NewNode(mcgraph()->machine()->I32x4Abs(), inputs[0]);
4776     case wasm::kExprI32x4BitMask:
4777       return graph()->NewNode(mcgraph()->machine()->I32x4BitMask(), inputs[0]);
4778     case wasm::kExprI32x4DotI16x8S:
4779       return graph()->NewNode(mcgraph()->machine()->I32x4DotI16x8S(), inputs[0],
4780                               inputs[1]);
4781     case wasm::kExprI32x4ExtMulLowI16x8S:
4782       return graph()->NewNode(mcgraph()->machine()->I32x4ExtMulLowI16x8S(),
4783                               inputs[0], inputs[1]);
4784     case wasm::kExprI32x4ExtMulHighI16x8S:
4785       return graph()->NewNode(mcgraph()->machine()->I32x4ExtMulHighI16x8S(),
4786                               inputs[0], inputs[1]);
4787     case wasm::kExprI32x4ExtMulLowI16x8U:
4788       return graph()->NewNode(mcgraph()->machine()->I32x4ExtMulLowI16x8U(),
4789                               inputs[0], inputs[1]);
4790     case wasm::kExprI32x4ExtMulHighI16x8U:
4791       return graph()->NewNode(mcgraph()->machine()->I32x4ExtMulHighI16x8U(),
4792                               inputs[0], inputs[1]);
4793     case wasm::kExprI32x4SignSelect:
4794       return graph()->NewNode(mcgraph()->machine()->I32x4SignSelect(),
4795                               inputs[0], inputs[1], inputs[2]);
4796     case wasm::kExprI32x4ExtAddPairwiseI16x8S:
4797       return graph()->NewNode(mcgraph()->machine()->I32x4ExtAddPairwiseI16x8S(),
4798                               inputs[0]);
4799     case wasm::kExprI32x4ExtAddPairwiseI16x8U:
4800       return graph()->NewNode(mcgraph()->machine()->I32x4ExtAddPairwiseI16x8U(),
4801                               inputs[0]);
4802     case wasm::kExprI16x8Splat:
4803       return graph()->NewNode(mcgraph()->machine()->I16x8Splat(), inputs[0]);
4804     case wasm::kExprI16x8SConvertI8x16Low:
4805       return graph()->NewNode(mcgraph()->machine()->I16x8SConvertI8x16Low(),
4806                               inputs[0]);
4807     case wasm::kExprI16x8SConvertI8x16High:
4808       return graph()->NewNode(mcgraph()->machine()->I16x8SConvertI8x16High(),
4809                               inputs[0]);
4810     case wasm::kExprI16x8Shl:
4811       return graph()->NewNode(mcgraph()->machine()->I16x8Shl(), inputs[0],
4812                               inputs[1]);
4813     case wasm::kExprI16x8ShrS:
4814       return graph()->NewNode(mcgraph()->machine()->I16x8ShrS(), inputs[0],
4815                               inputs[1]);
4816     case wasm::kExprI16x8Neg:
4817       return graph()->NewNode(mcgraph()->machine()->I16x8Neg(), inputs[0]);
4818     case wasm::kExprI16x8SConvertI32x4:
4819       return graph()->NewNode(mcgraph()->machine()->I16x8SConvertI32x4(),
4820                               inputs[0], inputs[1]);
4821     case wasm::kExprI16x8Add:
4822       return graph()->NewNode(mcgraph()->machine()->I16x8Add(), inputs[0],
4823                               inputs[1]);
4824     case wasm::kExprI16x8AddSatS:
4825       return graph()->NewNode(mcgraph()->machine()->I16x8AddSatS(), inputs[0],
4826                               inputs[1]);
4827     case wasm::kExprI16x8AddHoriz:
4828       return graph()->NewNode(mcgraph()->machine()->I16x8AddHoriz(), inputs[0],
4829                               inputs[1]);
4830     case wasm::kExprI16x8Sub:
4831       return graph()->NewNode(mcgraph()->machine()->I16x8Sub(), inputs[0],
4832                               inputs[1]);
4833     case wasm::kExprI16x8SubSatS:
4834       return graph()->NewNode(mcgraph()->machine()->I16x8SubSatS(), inputs[0],
4835                               inputs[1]);
4836     case wasm::kExprI16x8Mul:
4837       return graph()->NewNode(mcgraph()->machine()->I16x8Mul(), inputs[0],
4838                               inputs[1]);
4839     case wasm::kExprI16x8MinS:
4840       return graph()->NewNode(mcgraph()->machine()->I16x8MinS(), inputs[0],
4841                               inputs[1]);
4842     case wasm::kExprI16x8MaxS:
4843       return graph()->NewNode(mcgraph()->machine()->I16x8MaxS(), inputs[0],
4844                               inputs[1]);
4845     case wasm::kExprI16x8Eq:
4846       return graph()->NewNode(mcgraph()->machine()->I16x8Eq(), inputs[0],
4847                               inputs[1]);
4848     case wasm::kExprI16x8Ne:
4849       return graph()->NewNode(mcgraph()->machine()->I16x8Ne(), inputs[0],
4850                               inputs[1]);
4851     case wasm::kExprI16x8LtS:
4852       return graph()->NewNode(mcgraph()->machine()->I16x8GtS(), inputs[1],
4853                               inputs[0]);
4854     case wasm::kExprI16x8LeS:
4855       return graph()->NewNode(mcgraph()->machine()->I16x8GeS(), inputs[1],
4856                               inputs[0]);
4857     case wasm::kExprI16x8GtS:
4858       return graph()->NewNode(mcgraph()->machine()->I16x8GtS(), inputs[0],
4859                               inputs[1]);
4860     case wasm::kExprI16x8GeS:
4861       return graph()->NewNode(mcgraph()->machine()->I16x8GeS(), inputs[0],
4862                               inputs[1]);
4863     case wasm::kExprI16x8UConvertI8x16Low:
4864       return graph()->NewNode(mcgraph()->machine()->I16x8UConvertI8x16Low(),
4865                               inputs[0]);
4866     case wasm::kExprI16x8UConvertI8x16High:
4867       return graph()->NewNode(mcgraph()->machine()->I16x8UConvertI8x16High(),
4868                               inputs[0]);
4869     case wasm::kExprI16x8UConvertI32x4:
4870       return graph()->NewNode(mcgraph()->machine()->I16x8UConvertI32x4(),
4871                               inputs[0], inputs[1]);
4872     case wasm::kExprI16x8ShrU:
4873       return graph()->NewNode(mcgraph()->machine()->I16x8ShrU(), inputs[0],
4874                               inputs[1]);
4875     case wasm::kExprI16x8AddSatU:
4876       return graph()->NewNode(mcgraph()->machine()->I16x8AddSatU(), inputs[0],
4877                               inputs[1]);
4878     case wasm::kExprI16x8SubSatU:
4879       return graph()->NewNode(mcgraph()->machine()->I16x8SubSatU(), inputs[0],
4880                               inputs[1]);
4881     case wasm::kExprI16x8MinU:
4882       return graph()->NewNode(mcgraph()->machine()->I16x8MinU(), inputs[0],
4883                               inputs[1]);
4884     case wasm::kExprI16x8MaxU:
4885       return graph()->NewNode(mcgraph()->machine()->I16x8MaxU(), inputs[0],
4886                               inputs[1]);
4887     case wasm::kExprI16x8LtU:
4888       return graph()->NewNode(mcgraph()->machine()->I16x8GtU(), inputs[1],
4889                               inputs[0]);
4890     case wasm::kExprI16x8LeU:
4891       return graph()->NewNode(mcgraph()->machine()->I16x8GeU(), inputs[1],
4892                               inputs[0]);
4893     case wasm::kExprI16x8GtU:
4894       return graph()->NewNode(mcgraph()->machine()->I16x8GtU(), inputs[0],
4895                               inputs[1]);
4896     case wasm::kExprI16x8GeU:
4897       return graph()->NewNode(mcgraph()->machine()->I16x8GeU(), inputs[0],
4898                               inputs[1]);
4899     case wasm::kExprI16x8RoundingAverageU:
4900       return graph()->NewNode(mcgraph()->machine()->I16x8RoundingAverageU(),
4901                               inputs[0], inputs[1]);
4902     case wasm::kExprI16x8Q15MulRSatS:
4903       return graph()->NewNode(mcgraph()->machine()->I16x8Q15MulRSatS(),
4904                               inputs[0], inputs[1]);
4905     case wasm::kExprI16x8Abs:
4906       return graph()->NewNode(mcgraph()->machine()->I16x8Abs(), inputs[0]);
4907     case wasm::kExprI16x8BitMask:
4908       return graph()->NewNode(mcgraph()->machine()->I16x8BitMask(), inputs[0]);
4909     case wasm::kExprI16x8ExtMulLowI8x16S:
4910       return graph()->NewNode(mcgraph()->machine()->I16x8ExtMulLowI8x16S(),
4911                               inputs[0], inputs[1]);
4912     case wasm::kExprI16x8ExtMulHighI8x16S:
4913       return graph()->NewNode(mcgraph()->machine()->I16x8ExtMulHighI8x16S(),
4914                               inputs[0], inputs[1]);
4915     case wasm::kExprI16x8ExtMulLowI8x16U:
4916       return graph()->NewNode(mcgraph()->machine()->I16x8ExtMulLowI8x16U(),
4917                               inputs[0], inputs[1]);
4918     case wasm::kExprI16x8ExtMulHighI8x16U:
4919       return graph()->NewNode(mcgraph()->machine()->I16x8ExtMulHighI8x16U(),
4920                               inputs[0], inputs[1]);
4921     case wasm::kExprI16x8SignSelect:
4922       return graph()->NewNode(mcgraph()->machine()->I16x8SignSelect(),
4923                               inputs[0], inputs[1], inputs[2]);
4924     case wasm::kExprI16x8ExtAddPairwiseI8x16S:
4925       return graph()->NewNode(mcgraph()->machine()->I16x8ExtAddPairwiseI8x16S(),
4926                               inputs[0]);
4927     case wasm::kExprI16x8ExtAddPairwiseI8x16U:
4928       return graph()->NewNode(mcgraph()->machine()->I16x8ExtAddPairwiseI8x16U(),
4929                               inputs[0]);
4930     case wasm::kExprI8x16Splat:
4931       return graph()->NewNode(mcgraph()->machine()->I8x16Splat(), inputs[0]);
4932     case wasm::kExprI8x16Neg:
4933       return graph()->NewNode(mcgraph()->machine()->I8x16Neg(), inputs[0]);
4934     case wasm::kExprI8x16Shl:
4935       return graph()->NewNode(mcgraph()->machine()->I8x16Shl(), inputs[0],
4936                               inputs[1]);
4937     case wasm::kExprI8x16ShrS:
4938       return graph()->NewNode(mcgraph()->machine()->I8x16ShrS(), inputs[0],
4939                               inputs[1]);
4940     case wasm::kExprI8x16SConvertI16x8:
4941       return graph()->NewNode(mcgraph()->machine()->I8x16SConvertI16x8(),
4942                               inputs[0], inputs[1]);
4943     case wasm::kExprI8x16Add:
4944       return graph()->NewNode(mcgraph()->machine()->I8x16Add(), inputs[0],
4945                               inputs[1]);
4946     case wasm::kExprI8x16AddSatS:
4947       return graph()->NewNode(mcgraph()->machine()->I8x16AddSatS(), inputs[0],
4948                               inputs[1]);
4949     case wasm::kExprI8x16Sub:
4950       return graph()->NewNode(mcgraph()->machine()->I8x16Sub(), inputs[0],
4951                               inputs[1]);
4952     case wasm::kExprI8x16SubSatS:
4953       return graph()->NewNode(mcgraph()->machine()->I8x16SubSatS(), inputs[0],
4954                               inputs[1]);
4955     case wasm::kExprI8x16Mul:
4956       return graph()->NewNode(mcgraph()->machine()->I8x16Mul(), inputs[0],
4957                               inputs[1]);
4958     case wasm::kExprI8x16MinS:
4959       return graph()->NewNode(mcgraph()->machine()->I8x16MinS(), inputs[0],
4960                               inputs[1]);
4961     case wasm::kExprI8x16MaxS:
4962       return graph()->NewNode(mcgraph()->machine()->I8x16MaxS(), inputs[0],
4963                               inputs[1]);
4964     case wasm::kExprI8x16Eq:
4965       return graph()->NewNode(mcgraph()->machine()->I8x16Eq(), inputs[0],
4966                               inputs[1]);
4967     case wasm::kExprI8x16Ne:
4968       return graph()->NewNode(mcgraph()->machine()->I8x16Ne(), inputs[0],
4969                               inputs[1]);
4970     case wasm::kExprI8x16LtS:
4971       return graph()->NewNode(mcgraph()->machine()->I8x16GtS(), inputs[1],
4972                               inputs[0]);
4973     case wasm::kExprI8x16LeS:
4974       return graph()->NewNode(mcgraph()->machine()->I8x16GeS(), inputs[1],
4975                               inputs[0]);
4976     case wasm::kExprI8x16GtS:
4977       return graph()->NewNode(mcgraph()->machine()->I8x16GtS(), inputs[0],
4978                               inputs[1]);
4979     case wasm::kExprI8x16GeS:
4980       return graph()->NewNode(mcgraph()->machine()->I8x16GeS(), inputs[0],
4981                               inputs[1]);
4982     case wasm::kExprI8x16ShrU:
4983       return graph()->NewNode(mcgraph()->machine()->I8x16ShrU(), inputs[0],
4984                               inputs[1]);
4985     case wasm::kExprI8x16UConvertI16x8:
4986       return graph()->NewNode(mcgraph()->machine()->I8x16UConvertI16x8(),
4987                               inputs[0], inputs[1]);
4988     case wasm::kExprI8x16AddSatU:
4989       return graph()->NewNode(mcgraph()->machine()->I8x16AddSatU(), inputs[0],
4990                               inputs[1]);
4991     case wasm::kExprI8x16SubSatU:
4992       return graph()->NewNode(mcgraph()->machine()->I8x16SubSatU(), inputs[0],
4993                               inputs[1]);
4994     case wasm::kExprI8x16MinU:
4995       return graph()->NewNode(mcgraph()->machine()->I8x16MinU(), inputs[0],
4996                               inputs[1]);
4997     case wasm::kExprI8x16MaxU:
4998       return graph()->NewNode(mcgraph()->machine()->I8x16MaxU(), inputs[0],
4999                               inputs[1]);
5000     case wasm::kExprI8x16LtU:
5001       return graph()->NewNode(mcgraph()->machine()->I8x16GtU(), inputs[1],
5002                               inputs[0]);
5003     case wasm::kExprI8x16LeU:
5004       return graph()->NewNode(mcgraph()->machine()->I8x16GeU(), inputs[1],
5005                               inputs[0]);
5006     case wasm::kExprI8x16GtU:
5007       return graph()->NewNode(mcgraph()->machine()->I8x16GtU(), inputs[0],
5008                               inputs[1]);
5009     case wasm::kExprI8x16GeU:
5010       return graph()->NewNode(mcgraph()->machine()->I8x16GeU(), inputs[0],
5011                               inputs[1]);
5012     case wasm::kExprI8x16RoundingAverageU:
5013       return graph()->NewNode(mcgraph()->machine()->I8x16RoundingAverageU(),
5014                               inputs[0], inputs[1]);
5015     case wasm::kExprI8x16Popcnt:
5016       return graph()->NewNode(mcgraph()->machine()->I8x16Popcnt(), inputs[0]);
5017     case wasm::kExprI8x16Abs:
5018       return graph()->NewNode(mcgraph()->machine()->I8x16Abs(), inputs[0]);
5019     case wasm::kExprI8x16BitMask:
5020       return graph()->NewNode(mcgraph()->machine()->I8x16BitMask(), inputs[0]);
5021     case wasm::kExprI8x16SignSelect:
5022       return graph()->NewNode(mcgraph()->machine()->I8x16SignSelect(),
5023                               inputs[0], inputs[1], inputs[2]);
5024     case wasm::kExprS128And:
5025       return graph()->NewNode(mcgraph()->machine()->S128And(), inputs[0],
5026                               inputs[1]);
5027     case wasm::kExprS128Or:
5028       return graph()->NewNode(mcgraph()->machine()->S128Or(), inputs[0],
5029                               inputs[1]);
5030     case wasm::kExprS128Xor:
5031       return graph()->NewNode(mcgraph()->machine()->S128Xor(), inputs[0],
5032                               inputs[1]);
5033     case wasm::kExprS128Not:
5034       return graph()->NewNode(mcgraph()->machine()->S128Not(), inputs[0]);
5035     case wasm::kExprS128Select:
5036       return graph()->NewNode(mcgraph()->machine()->S128Select(), inputs[2],
5037                               inputs[0], inputs[1]);
5038     case wasm::kExprS128AndNot:
5039       return graph()->NewNode(mcgraph()->machine()->S128AndNot(), inputs[0],
5040                               inputs[1]);
5041     case wasm::kExprV32x4AnyTrue:
5042       return graph()->NewNode(mcgraph()->machine()->V32x4AnyTrue(), inputs[0]);
5043     case wasm::kExprV32x4AllTrue:
5044       return graph()->NewNode(mcgraph()->machine()->V32x4AllTrue(), inputs[0]);
5045     case wasm::kExprV16x8AnyTrue:
5046       return graph()->NewNode(mcgraph()->machine()->V16x8AnyTrue(), inputs[0]);
5047     case wasm::kExprV16x8AllTrue:
5048       return graph()->NewNode(mcgraph()->machine()->V16x8AllTrue(), inputs[0]);
5049     case wasm::kExprV8x16AnyTrue:
5050       return graph()->NewNode(mcgraph()->machine()->V8x16AnyTrue(), inputs[0]);
5051     case wasm::kExprV8x16AllTrue:
5052       return graph()->NewNode(mcgraph()->machine()->V8x16AllTrue(), inputs[0]);
5053     case wasm::kExprI8x16Swizzle:
5054       return graph()->NewNode(mcgraph()->machine()->I8x16Swizzle(), inputs[0],
5055                               inputs[1]);
5056     default:
5057       FATAL_UNSUPPORTED_OPCODE(opcode);
5058   }
5059 }
5060 
SimdLaneOp(wasm::WasmOpcode opcode,uint8_t lane,Node * const * inputs)5061 Node* WasmGraphBuilder::SimdLaneOp(wasm::WasmOpcode opcode, uint8_t lane,
5062                                    Node* const* inputs) {
5063   has_simd_ = true;
5064   switch (opcode) {
5065     case wasm::kExprF64x2ExtractLane:
5066       return graph()->NewNode(mcgraph()->machine()->F64x2ExtractLane(lane),
5067                               inputs[0]);
5068     case wasm::kExprF64x2ReplaceLane:
5069       return graph()->NewNode(mcgraph()->machine()->F64x2ReplaceLane(lane),
5070                               inputs[0], inputs[1]);
5071     case wasm::kExprF32x4ExtractLane:
5072       return graph()->NewNode(mcgraph()->machine()->F32x4ExtractLane(lane),
5073                               inputs[0]);
5074     case wasm::kExprF32x4ReplaceLane:
5075       return graph()->NewNode(mcgraph()->machine()->F32x4ReplaceLane(lane),
5076                               inputs[0], inputs[1]);
5077     case wasm::kExprI64x2ExtractLane:
5078       return graph()->NewNode(mcgraph()->machine()->I64x2ExtractLane(lane),
5079                               inputs[0]);
5080     case wasm::kExprI64x2ReplaceLane:
5081       return graph()->NewNode(mcgraph()->machine()->I64x2ReplaceLane(lane),
5082                               inputs[0], inputs[1]);
5083     case wasm::kExprI32x4ExtractLane:
5084       return graph()->NewNode(mcgraph()->machine()->I32x4ExtractLane(lane),
5085                               inputs[0]);
5086     case wasm::kExprI32x4ReplaceLane:
5087       return graph()->NewNode(mcgraph()->machine()->I32x4ReplaceLane(lane),
5088                               inputs[0], inputs[1]);
5089     case wasm::kExprI16x8ExtractLaneS:
5090       return graph()->NewNode(mcgraph()->machine()->I16x8ExtractLaneS(lane),
5091                               inputs[0]);
5092     case wasm::kExprI16x8ExtractLaneU:
5093       return graph()->NewNode(mcgraph()->machine()->I16x8ExtractLaneU(lane),
5094                               inputs[0]);
5095     case wasm::kExprI16x8ReplaceLane:
5096       return graph()->NewNode(mcgraph()->machine()->I16x8ReplaceLane(lane),
5097                               inputs[0], inputs[1]);
5098     case wasm::kExprI8x16ExtractLaneS:
5099       return graph()->NewNode(mcgraph()->machine()->I8x16ExtractLaneS(lane),
5100                               inputs[0]);
5101     case wasm::kExprI8x16ExtractLaneU:
5102       return graph()->NewNode(mcgraph()->machine()->I8x16ExtractLaneU(lane),
5103                               inputs[0]);
5104     case wasm::kExprI8x16ReplaceLane:
5105       return graph()->NewNode(mcgraph()->machine()->I8x16ReplaceLane(lane),
5106                               inputs[0], inputs[1]);
5107     default:
5108       FATAL_UNSUPPORTED_OPCODE(opcode);
5109   }
5110 }
5111 
Simd8x16ShuffleOp(const uint8_t shuffle[16],Node * const * inputs)5112 Node* WasmGraphBuilder::Simd8x16ShuffleOp(const uint8_t shuffle[16],
5113                                           Node* const* inputs) {
5114   has_simd_ = true;
5115   return graph()->NewNode(mcgraph()->machine()->I8x16Shuffle(shuffle),
5116                           inputs[0], inputs[1]);
5117 }
5118 
AtomicOp(wasm::WasmOpcode opcode,Node * const * inputs,uint32_t alignment,uint64_t offset,wasm::WasmCodePosition position)5119 Node* WasmGraphBuilder::AtomicOp(wasm::WasmOpcode opcode, Node* const* inputs,
5120                                  uint32_t alignment, uint64_t offset,
5121                                  wasm::WasmCodePosition position) {
5122   struct AtomicOpInfo {
5123     enum Type : int8_t {
5124       kNoInput = 0,
5125       kOneInput = 1,
5126       kTwoInputs = 2,
5127       kSpecial
5128     };
5129 
5130     using OperatorByType =
5131         const Operator* (MachineOperatorBuilder::*)(MachineType);
5132     using OperatorByRep =
5133         const Operator* (MachineOperatorBuilder::*)(MachineRepresentation);
5134 
5135     const Type type;
5136     const MachineType machine_type;
5137     const OperatorByType operator_by_type = nullptr;
5138     const OperatorByRep operator_by_rep = nullptr;
5139 
5140     constexpr AtomicOpInfo(Type t, MachineType m, OperatorByType o)
5141         : type(t), machine_type(m), operator_by_type(o) {}
5142     constexpr AtomicOpInfo(Type t, MachineType m, OperatorByRep o)
5143         : type(t), machine_type(m), operator_by_rep(o) {}
5144 
5145     // Constexpr, hence just a table lookup in most compilers.
5146     static constexpr AtomicOpInfo Get(wasm::WasmOpcode opcode) {
5147       switch (opcode) {
5148 #define CASE(Name, Type, MachType, Op) \
5149   case wasm::kExpr##Name:              \
5150     return {Type, MachineType::MachType(), &MachineOperatorBuilder::Op};
5151 
5152         // Binops.
5153         CASE(I32AtomicAdd, kOneInput, Uint32, Word32AtomicAdd)
5154         CASE(I64AtomicAdd, kOneInput, Uint64, Word64AtomicAdd)
5155         CASE(I32AtomicAdd8U, kOneInput, Uint8, Word32AtomicAdd)
5156         CASE(I32AtomicAdd16U, kOneInput, Uint16, Word32AtomicAdd)
5157         CASE(I64AtomicAdd8U, kOneInput, Uint8, Word64AtomicAdd)
5158         CASE(I64AtomicAdd16U, kOneInput, Uint16, Word64AtomicAdd)
5159         CASE(I64AtomicAdd32U, kOneInput, Uint32, Word64AtomicAdd)
5160         CASE(I32AtomicSub, kOneInput, Uint32, Word32AtomicSub)
5161         CASE(I64AtomicSub, kOneInput, Uint64, Word64AtomicSub)
5162         CASE(I32AtomicSub8U, kOneInput, Uint8, Word32AtomicSub)
5163         CASE(I32AtomicSub16U, kOneInput, Uint16, Word32AtomicSub)
5164         CASE(I64AtomicSub8U, kOneInput, Uint8, Word64AtomicSub)
5165         CASE(I64AtomicSub16U, kOneInput, Uint16, Word64AtomicSub)
5166         CASE(I64AtomicSub32U, kOneInput, Uint32, Word64AtomicSub)
5167         CASE(I32AtomicAnd, kOneInput, Uint32, Word32AtomicAnd)
5168         CASE(I64AtomicAnd, kOneInput, Uint64, Word64AtomicAnd)
5169         CASE(I32AtomicAnd8U, kOneInput, Uint8, Word32AtomicAnd)
5170         CASE(I32AtomicAnd16U, kOneInput, Uint16, Word32AtomicAnd)
5171         CASE(I64AtomicAnd8U, kOneInput, Uint8, Word64AtomicAnd)
5172         CASE(I64AtomicAnd16U, kOneInput, Uint16, Word64AtomicAnd)
5173         CASE(I64AtomicAnd32U, kOneInput, Uint32, Word64AtomicAnd)
5174         CASE(I32AtomicOr, kOneInput, Uint32, Word32AtomicOr)
5175         CASE(I64AtomicOr, kOneInput, Uint64, Word64AtomicOr)
5176         CASE(I32AtomicOr8U, kOneInput, Uint8, Word32AtomicOr)
5177         CASE(I32AtomicOr16U, kOneInput, Uint16, Word32AtomicOr)
5178         CASE(I64AtomicOr8U, kOneInput, Uint8, Word64AtomicOr)
5179         CASE(I64AtomicOr16U, kOneInput, Uint16, Word64AtomicOr)
5180         CASE(I64AtomicOr32U, kOneInput, Uint32, Word64AtomicOr)
5181         CASE(I32AtomicXor, kOneInput, Uint32, Word32AtomicXor)
5182         CASE(I64AtomicXor, kOneInput, Uint64, Word64AtomicXor)
5183         CASE(I32AtomicXor8U, kOneInput, Uint8, Word32AtomicXor)
5184         CASE(I32AtomicXor16U, kOneInput, Uint16, Word32AtomicXor)
5185         CASE(I64AtomicXor8U, kOneInput, Uint8, Word64AtomicXor)
5186         CASE(I64AtomicXor16U, kOneInput, Uint16, Word64AtomicXor)
5187         CASE(I64AtomicXor32U, kOneInput, Uint32, Word64AtomicXor)
5188         CASE(I32AtomicExchange, kOneInput, Uint32, Word32AtomicExchange)
5189         CASE(I64AtomicExchange, kOneInput, Uint64, Word64AtomicExchange)
5190         CASE(I32AtomicExchange8U, kOneInput, Uint8, Word32AtomicExchange)
5191         CASE(I32AtomicExchange16U, kOneInput, Uint16, Word32AtomicExchange)
5192         CASE(I64AtomicExchange8U, kOneInput, Uint8, Word64AtomicExchange)
5193         CASE(I64AtomicExchange16U, kOneInput, Uint16, Word64AtomicExchange)
5194         CASE(I64AtomicExchange32U, kOneInput, Uint32, Word64AtomicExchange)
5195 
5196         // Compare-exchange.
5197         CASE(I32AtomicCompareExchange, kTwoInputs, Uint32,
5198              Word32AtomicCompareExchange)
5199         CASE(I64AtomicCompareExchange, kTwoInputs, Uint64,
5200              Word64AtomicCompareExchange)
5201         CASE(I32AtomicCompareExchange8U, kTwoInputs, Uint8,
5202              Word32AtomicCompareExchange)
5203         CASE(I32AtomicCompareExchange16U, kTwoInputs, Uint16,
5204              Word32AtomicCompareExchange)
5205         CASE(I64AtomicCompareExchange8U, kTwoInputs, Uint8,
5206              Word64AtomicCompareExchange)
5207         CASE(I64AtomicCompareExchange16U, kTwoInputs, Uint16,
5208              Word64AtomicCompareExchange)
5209         CASE(I64AtomicCompareExchange32U, kTwoInputs, Uint32,
5210              Word64AtomicCompareExchange)
5211 
5212         // Load.
5213         CASE(I32AtomicLoad, kNoInput, Uint32, Word32AtomicLoad)
5214         CASE(I64AtomicLoad, kNoInput, Uint64, Word64AtomicLoad)
5215         CASE(I32AtomicLoad8U, kNoInput, Uint8, Word32AtomicLoad)
5216         CASE(I32AtomicLoad16U, kNoInput, Uint16, Word32AtomicLoad)
5217         CASE(I64AtomicLoad8U, kNoInput, Uint8, Word64AtomicLoad)
5218         CASE(I64AtomicLoad16U, kNoInput, Uint16, Word64AtomicLoad)
5219         CASE(I64AtomicLoad32U, kNoInput, Uint32, Word64AtomicLoad)
5220 
5221         // Store.
5222         CASE(I32AtomicStore, kOneInput, Uint32, Word32AtomicStore)
5223         CASE(I64AtomicStore, kOneInput, Uint64, Word64AtomicStore)
5224         CASE(I32AtomicStore8U, kOneInput, Uint8, Word32AtomicStore)
5225         CASE(I32AtomicStore16U, kOneInput, Uint16, Word32AtomicStore)
5226         CASE(I64AtomicStore8U, kOneInput, Uint8, Word64AtomicStore)
5227         CASE(I64AtomicStore16U, kOneInput, Uint16, Word64AtomicStore)
5228         CASE(I64AtomicStore32U, kOneInput, Uint32, Word64AtomicStore)
5229 
5230 #undef CASE
5231 
5232         case wasm::kExprAtomicNotify:
5233           return {kSpecial, MachineType::Int32(), OperatorByType{nullptr}};
5234         case wasm::kExprI32AtomicWait:
5235           return {kSpecial, MachineType::Int32(), OperatorByType{nullptr}};
5236         case wasm::kExprI64AtomicWait:
5237           return {kSpecial, MachineType::Int64(), OperatorByType{nullptr}};
5238         default:
5239 #if V8_HAS_CXX14_CONSTEXPR
5240           UNREACHABLE();
5241 #else
5242           // Return something for older GCC.
5243           return {kSpecial, MachineType::Int64(), OperatorByType{nullptr}};
5244 #endif
5245       }
5246     }
5247   };
5248 
5249   AtomicOpInfo info = AtomicOpInfo::Get(opcode);
5250 
5251   Node* index = CheckBoundsAndAlignment(info.machine_type.MemSize(), inputs[0],
5252                                         offset, position);
5253 
5254   // {offset} is validated to be within uintptr_t range in {BoundsCheckMem}.
5255   uintptr_t capped_offset = static_cast<uintptr_t>(offset);
5256   if (info.type != AtomicOpInfo::kSpecial) {
5257     const Operator* op =
5258         info.operator_by_type
5259             ? (mcgraph()->machine()->*info.operator_by_type)(info.machine_type)
5260             : (mcgraph()->machine()->*info.operator_by_rep)(
5261                   info.machine_type.representation());
5262 
5263     Node* input_nodes[6] = {MemBuffer(capped_offset), index};
5264     int num_actual_inputs = info.type;
5265     std::copy_n(inputs + 1, num_actual_inputs, input_nodes + 2);
5266     input_nodes[num_actual_inputs + 2] = effect();
5267     input_nodes[num_actual_inputs + 3] = control();
5268     return gasm_->AddNode(
5269         graph()->NewNode(op, num_actual_inputs + 4, input_nodes));
5270   }
5271 
5272   // After we've bounds-checked, compute the effective offset.
5273   Node* effective_offset =
5274       gasm_->IntAdd(gasm_->UintPtrConstant(capped_offset), index);
5275 
5276   switch (opcode) {
5277     case wasm::kExprAtomicNotify: {
5278       auto* call_descriptor =
5279           GetBuiltinCallDescriptor<WasmAtomicNotifyDescriptor>(
5280               this, StubCallMode::kCallWasmRuntimeStub);
5281       Node* call_target = mcgraph()->RelocatableIntPtrConstant(
5282           wasm::WasmCode::kWasmAtomicNotify, RelocInfo::WASM_STUB_CALL);
5283       return gasm_->Call(call_descriptor, call_target, effective_offset,
5284                          inputs[1]);
5285     }
5286 
5287     case wasm::kExprI32AtomicWait: {
5288       auto* call_descriptor = GetI32AtomicWaitCallDescriptor();
5289 
5290       intptr_t target = mcgraph()->machine()->Is64()
5291                             ? wasm::WasmCode::kWasmI32AtomicWait64
5292                             : wasm::WasmCode::kWasmI32AtomicWait32;
5293       Node* call_target = mcgraph()->RelocatableIntPtrConstant(
5294           target, RelocInfo::WASM_STUB_CALL);
5295 
5296       return gasm_->Call(call_descriptor, call_target, effective_offset,
5297                          inputs[1], inputs[2]);
5298     }
5299 
5300     case wasm::kExprI64AtomicWait: {
5301       auto* call_descriptor = GetI64AtomicWaitCallDescriptor();
5302 
5303       intptr_t target = mcgraph()->machine()->Is64()
5304                             ? wasm::WasmCode::kWasmI64AtomicWait64
5305                             : wasm::WasmCode::kWasmI64AtomicWait32;
5306       Node* call_target = mcgraph()->RelocatableIntPtrConstant(
5307           target, RelocInfo::WASM_STUB_CALL);
5308 
5309       return gasm_->Call(call_descriptor, call_target, effective_offset,
5310                          inputs[1], inputs[2]);
5311     }
5312 
5313     default:
5314       FATAL_UNSUPPORTED_OPCODE(opcode);
5315   }
5316 }
5317 
AtomicFence()5318 Node* WasmGraphBuilder::AtomicFence() {
5319   return SetEffect(graph()->NewNode(mcgraph()->machine()->MemBarrier(),
5320                                     effect(), control()));
5321 }
5322 
MemoryInit(uint32_t data_segment_index,Node * dst,Node * src,Node * size,wasm::WasmCodePosition position)5323 Node* WasmGraphBuilder::MemoryInit(uint32_t data_segment_index, Node* dst,
5324                                    Node* src, Node* size,
5325                                    wasm::WasmCodePosition position) {
5326   // The data segment index must be in bounds since it is required by
5327   // validation.
5328   DCHECK_LT(data_segment_index, env_->module->num_declared_data_segments);
5329 
5330   Node* function = graph()->NewNode(mcgraph()->common()->ExternalConstant(
5331       ExternalReference::wasm_memory_init()));
5332 
5333   Node* stack_slot = StoreArgsInStackSlot(
5334       {{MachineType::PointerRepresentation(), instance_node_.get()},
5335        {MachineRepresentation::kWord32, dst},
5336        {MachineRepresentation::kWord32, src},
5337        {MachineRepresentation::kWord32,
5338         gasm_->Uint32Constant(data_segment_index)},
5339        {MachineRepresentation::kWord32, size}});
5340 
5341   MachineType sig_types[] = {MachineType::Int32(), MachineType::Pointer()};
5342   MachineSignature sig(1, 1, sig_types);
5343   Node* call = SetEffect(BuildCCall(&sig, function, stack_slot));
5344   return TrapIfFalse(wasm::kTrapMemOutOfBounds, call, position);
5345 }
5346 
DataDrop(uint32_t data_segment_index,wasm::WasmCodePosition position)5347 Node* WasmGraphBuilder::DataDrop(uint32_t data_segment_index,
5348                                  wasm::WasmCodePosition position) {
5349   DCHECK_LT(data_segment_index, env_->module->num_declared_data_segments);
5350 
5351   Node* seg_size_array =
5352       LOAD_INSTANCE_FIELD(DataSegmentSizes, MachineType::Pointer());
5353   STATIC_ASSERT(wasm::kV8MaxWasmDataSegments <= kMaxUInt32 >> 2);
5354   const Operator* store_op = mcgraph()->machine()->Store(
5355       StoreRepresentation(MachineRepresentation::kWord32, kNoWriteBarrier));
5356   return SetEffect(
5357       graph()->NewNode(store_op, seg_size_array,
5358                        mcgraph()->IntPtrConstant(data_segment_index << 2),
5359                        mcgraph()->Int32Constant(0), effect(), control()));
5360 }
5361 
StoreArgsInStackSlot(std::initializer_list<std::pair<MachineRepresentation,Node * >> args)5362 Node* WasmGraphBuilder::StoreArgsInStackSlot(
5363     std::initializer_list<std::pair<MachineRepresentation, Node*>> args) {
5364   int slot_size = 0;
5365   for (auto arg : args) {
5366     slot_size += ElementSizeInBytes(arg.first);
5367   }
5368   DCHECK_LT(0, slot_size);
5369   Node* stack_slot =
5370       graph()->NewNode(mcgraph()->machine()->StackSlot(slot_size));
5371 
5372   int offset = 0;
5373   for (auto arg : args) {
5374     MachineRepresentation type = arg.first;
5375     Node* value = arg.second;
5376     gasm_->Store(StoreRepresentation(type, kNoWriteBarrier), stack_slot,
5377                  mcgraph()->Int32Constant(offset), value);
5378     offset += ElementSizeInBytes(type);
5379   }
5380   return stack_slot;
5381 }
5382 
MemoryCopy(Node * dst,Node * src,Node * size,wasm::WasmCodePosition position)5383 Node* WasmGraphBuilder::MemoryCopy(Node* dst, Node* src, Node* size,
5384                                    wasm::WasmCodePosition position) {
5385   Node* function = graph()->NewNode(mcgraph()->common()->ExternalConstant(
5386       ExternalReference::wasm_memory_copy()));
5387 
5388   Node* stack_slot = StoreArgsInStackSlot(
5389       {{MachineType::PointerRepresentation(), instance_node_.get()},
5390        {MachineRepresentation::kWord32, dst},
5391        {MachineRepresentation::kWord32, src},
5392        {MachineRepresentation::kWord32, size}});
5393 
5394   MachineType sig_types[] = {MachineType::Int32(), MachineType::Pointer()};
5395   MachineSignature sig(1, 1, sig_types);
5396   Node* call = SetEffect(BuildCCall(&sig, function, stack_slot));
5397   return TrapIfFalse(wasm::kTrapMemOutOfBounds, call, position);
5398 }
5399 
MemoryFill(Node * dst,Node * value,Node * size,wasm::WasmCodePosition position)5400 Node* WasmGraphBuilder::MemoryFill(Node* dst, Node* value, Node* size,
5401                                    wasm::WasmCodePosition position) {
5402   Node* function = graph()->NewNode(mcgraph()->common()->ExternalConstant(
5403       ExternalReference::wasm_memory_fill()));
5404 
5405   Node* stack_slot = StoreArgsInStackSlot(
5406       {{MachineType::PointerRepresentation(), instance_node_.get()},
5407        {MachineRepresentation::kWord32, dst},
5408        {MachineRepresentation::kWord32, value},
5409        {MachineRepresentation::kWord32, size}});
5410 
5411   MachineType sig_types[] = {MachineType::Int32(), MachineType::Pointer()};
5412   MachineSignature sig(1, 1, sig_types);
5413   Node* call = SetEffect(BuildCCall(&sig, function, stack_slot));
5414   return TrapIfFalse(wasm::kTrapMemOutOfBounds, call, position);
5415 }
5416 
TableInit(uint32_t table_index,uint32_t elem_segment_index,Node * dst,Node * src,Node * size,wasm::WasmCodePosition position)5417 Node* WasmGraphBuilder::TableInit(uint32_t table_index,
5418                                   uint32_t elem_segment_index, Node* dst,
5419                                   Node* src, Node* size,
5420                                   wasm::WasmCodePosition position) {
5421   auto call_descriptor = GetBuiltinCallDescriptor<WasmTableInitDescriptor>(
5422       this, StubCallMode::kCallWasmRuntimeStub);
5423 
5424   intptr_t target = wasm::WasmCode::kWasmTableInit;
5425   Node* call_target =
5426       mcgraph()->RelocatableIntPtrConstant(target, RelocInfo::WASM_STUB_CALL);
5427 
5428   return gasm_->Call(
5429       call_descriptor, call_target, dst, src, size,
5430       graph()->NewNode(mcgraph()->common()->NumberConstant(table_index)),
5431       graph()->NewNode(
5432           mcgraph()->common()->NumberConstant(elem_segment_index)));
5433 }
5434 
ElemDrop(uint32_t elem_segment_index,wasm::WasmCodePosition position)5435 Node* WasmGraphBuilder::ElemDrop(uint32_t elem_segment_index,
5436                                  wasm::WasmCodePosition position) {
5437   // The elem segment index must be in bounds since it is required by
5438   // validation.
5439   DCHECK_LT(elem_segment_index, env_->module->elem_segments.size());
5440 
5441   Node* dropped_elem_segments =
5442       LOAD_INSTANCE_FIELD(DroppedElemSegments, MachineType::Pointer());
5443   const Operator* store_op = mcgraph()->machine()->Store(
5444       StoreRepresentation(MachineRepresentation::kWord8, kNoWriteBarrier));
5445   return SetEffect(
5446       graph()->NewNode(store_op, dropped_elem_segments,
5447                        mcgraph()->IntPtrConstant(elem_segment_index),
5448                        mcgraph()->Int32Constant(1), effect(), control()));
5449 }
5450 
TableCopy(uint32_t table_dst_index,uint32_t table_src_index,Node * dst,Node * src,Node * size,wasm::WasmCodePosition position)5451 Node* WasmGraphBuilder::TableCopy(uint32_t table_dst_index,
5452                                   uint32_t table_src_index, Node* dst,
5453                                   Node* src, Node* size,
5454                                   wasm::WasmCodePosition position) {
5455   auto call_descriptor = GetBuiltinCallDescriptor<WasmTableCopyDescriptor>(
5456       this, StubCallMode::kCallWasmRuntimeStub);
5457 
5458   intptr_t target = wasm::WasmCode::kWasmTableCopy;
5459   Node* call_target =
5460       mcgraph()->RelocatableIntPtrConstant(target, RelocInfo::WASM_STUB_CALL);
5461 
5462   return gasm_->Call(
5463       call_descriptor, call_target, dst, src, size,
5464       graph()->NewNode(mcgraph()->common()->NumberConstant(table_dst_index)),
5465       graph()->NewNode(mcgraph()->common()->NumberConstant(table_src_index)));
5466 }
5467 
TableGrow(uint32_t table_index,Node * value,Node * delta)5468 Node* WasmGraphBuilder::TableGrow(uint32_t table_index, Node* value,
5469                                   Node* delta) {
5470   Node* args[] = {
5471       graph()->NewNode(mcgraph()->common()->NumberConstant(table_index)), value,
5472       BuildConvertUint32ToSmiWithSaturation(delta, FLAG_wasm_max_table_size)};
5473   Node* result =
5474       BuildCallToRuntime(Runtime::kWasmTableGrow, args, arraysize(args));
5475   return BuildChangeSmiToInt32(result);
5476 }
5477 
TableSize(uint32_t table_index)5478 Node* WasmGraphBuilder::TableSize(uint32_t table_index) {
5479   Node* tables = LOAD_INSTANCE_FIELD(Tables, MachineType::TaggedPointer());
5480   Node* table = LOAD_FIXED_ARRAY_SLOT_ANY(tables, table_index);
5481 
5482   int length_field_size = WasmTableObject::kCurrentLengthOffsetEnd -
5483                           WasmTableObject::kCurrentLengthOffset + 1;
5484   Node* length_smi = gasm_->Load(
5485       assert_size(length_field_size, MachineType::TaggedSigned()), table,
5486       wasm::ObjectAccess::ToTagged(WasmTableObject::kCurrentLengthOffset));
5487 
5488   return BuildChangeSmiToInt32(length_smi);
5489 }
5490 
TableFill(uint32_t table_index,Node * start,Node * value,Node * count)5491 Node* WasmGraphBuilder::TableFill(uint32_t table_index, Node* start,
5492                                   Node* value, Node* count) {
5493   Node* args[] = {
5494       graph()->NewNode(mcgraph()->common()->NumberConstant(table_index)),
5495       BuildConvertUint32ToSmiWithSaturation(start, FLAG_wasm_max_table_size),
5496       value,
5497       BuildConvertUint32ToSmiWithSaturation(count, FLAG_wasm_max_table_size)};
5498 
5499   return BuildCallToRuntime(Runtime::kWasmTableFill, args, arraysize(args));
5500 }
5501 
5502 namespace {
5503 
FieldType(const wasm::StructType * type,uint32_t field_index,bool is_signed)5504 MachineType FieldType(const wasm::StructType* type, uint32_t field_index,
5505                       bool is_signed) {
5506   return MachineType::TypeForRepresentation(
5507       type->field(field_index).machine_representation(), is_signed);
5508 }
5509 
FieldOffset(MachineGraph * graph,const wasm::StructType * type,uint32_t field_index)5510 Node* FieldOffset(MachineGraph* graph, const wasm::StructType* type,
5511                   uint32_t field_index) {
5512   int offset = WasmStruct::kHeaderSize + type->field_offset(field_index) -
5513                kHeapObjectTag;
5514   return graph->IntPtrConstant(offset);
5515 }
5516 
5517 // It's guaranteed that struct/array fields are aligned to min(field_size,
5518 // kTaggedSize), with the latter being 4 or 8 depending on platform and
5519 // pointer compression. So on our most common configurations, 8-byte types
5520 // must use unaligned loads/stores.
LoadWithTaggedAlignment(WasmGraphAssembler * gasm,MachineType type,Node * base,Node * offset)5521 Node* LoadWithTaggedAlignment(WasmGraphAssembler* gasm, MachineType type,
5522                               Node* base, Node* offset) {
5523   if (ElementSizeInBytes(type.representation()) > kTaggedSize) {
5524     return gasm->LoadUnaligned(type, base, offset);
5525   } else {
5526     return gasm->Load(type, base, offset);
5527   }
5528 }
5529 
5530 // Same alignment considerations as above.
StoreWithTaggedAlignment(WasmGraphAssembler * gasm,Node * base,Node * offset,Node * value,wasm::ValueType type)5531 Node* StoreWithTaggedAlignment(WasmGraphAssembler* gasm, Node* base,
5532                                Node* offset, Node* value,
5533                                wasm::ValueType type) {
5534   MachineRepresentation rep = type.machine_representation();
5535   if (ElementSizeInBytes(rep) > kTaggedSize) {
5536     return gasm->StoreUnaligned(rep, base, offset, value);
5537   } else {
5538     WriteBarrierKind write_barrier =
5539         type.is_reference_type() ? kPointerWriteBarrier : kNoWriteBarrier;
5540     StoreRepresentation store_rep(rep, write_barrier);
5541     return gasm->Store(store_rep, base, offset, value);
5542   }
5543 }
5544 
5545 // Set a field of a struct, without checking if the struct is null.
5546 // Helper method for StructNewWithRtt and StructSet.
StoreStructFieldUnchecked(MachineGraph * graph,WasmGraphAssembler * gasm,Node * struct_object,const wasm::StructType * type,uint32_t field_index,Node * value)5547 Node* StoreStructFieldUnchecked(MachineGraph* graph, WasmGraphAssembler* gasm,
5548                                 Node* struct_object,
5549                                 const wasm::StructType* type,
5550                                 uint32_t field_index, Node* value) {
5551   return StoreWithTaggedAlignment(gasm, struct_object,
5552                                   FieldOffset(graph, type, field_index), value,
5553                                   type->field(field_index));
5554 }
5555 
ArrayElementOffset(GraphAssembler * gasm,Node * index,wasm::ValueType element_type)5556 Node* ArrayElementOffset(GraphAssembler* gasm, Node* index,
5557                          wasm::ValueType element_type) {
5558   return gasm->Int32Add(
5559       gasm->Int32Constant(WasmArray::kHeaderSize - kHeapObjectTag),
5560       gasm->Int32Mul(index,
5561                      gasm->Int32Constant(element_type.element_size_bytes())));
5562 }
5563 
ArrayLength(GraphAssembler * gasm,Node * array)5564 Node* ArrayLength(GraphAssembler* gasm, Node* array) {
5565   return gasm->Load(
5566       MachineType::Uint32(), array,
5567       gasm->Int32Constant(WasmArray::kLengthOffset - kHeapObjectTag));
5568 }
5569 
5570 }  // namespace
5571 
StructNewWithRtt(uint32_t struct_index,const wasm::StructType * type,Node * rtt,Vector<Node * > fields)5572 Node* WasmGraphBuilder::StructNewWithRtt(uint32_t struct_index,
5573                                          const wasm::StructType* type,
5574                                          Node* rtt, Vector<Node*> fields) {
5575   Node* s = CALL_BUILTIN(
5576       WasmAllocateStructWithRtt, rtt,
5577       LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer()));
5578   for (uint32_t i = 0; i < type->field_count(); i++) {
5579     StoreStructFieldUnchecked(mcgraph(), gasm_.get(), s, type, i, fields[i]);
5580   }
5581   return s;
5582 }
5583 
ArrayNewWithRtt(uint32_t array_index,const wasm::ArrayType * type,Node * length,Node * initial_value,Node * rtt)5584 Node* WasmGraphBuilder::ArrayNewWithRtt(uint32_t array_index,
5585                                         const wasm::ArrayType* type,
5586                                         Node* length, Node* initial_value,
5587                                         Node* rtt) {
5588   wasm::ValueType element_type = type->element_type();
5589   Node* a = CALL_BUILTIN(
5590       WasmAllocateArrayWithRtt, rtt, BuildChangeUint31ToSmi(length),
5591       graph()->NewNode(mcgraph()->common()->NumberConstant(
5592           element_type.element_size_bytes())),
5593       LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer()));
5594   auto loop = gasm_->MakeLoopLabel(MachineRepresentation::kWord32);
5595   auto done = gasm_->MakeLabel();
5596   Node* start_offset =
5597       gasm_->Int32Constant(WasmArray::kHeaderSize - kHeapObjectTag);
5598   Node* element_size = gasm_->Int32Constant(element_type.element_size_bytes());
5599   Node* end_offset =
5600       gasm_->Int32Add(start_offset, gasm_->Int32Mul(element_size, length));
5601   // "Goto" requires the graph's end to have been set up.
5602   // TODO(jkummerow): Figure out if there's a more elegant solution.
5603   Graph* g = mcgraph()->graph();
5604   if (!g->end()) {
5605     g->SetEnd(g->NewNode(mcgraph()->common()->End(0)));
5606   }
5607   gasm_->Goto(&loop, start_offset);
5608   gasm_->Bind(&loop);
5609   {
5610     Node* offset = loop.PhiAt(0);
5611     Node* check = gasm_->Uint32LessThan(offset, end_offset);
5612     gasm_->GotoIfNot(check, &done);
5613     StoreWithTaggedAlignment(gasm_.get(), a, offset, initial_value,
5614                              type->element_type());
5615     offset = gasm_->Int32Add(offset, element_size);
5616     gasm_->Goto(&loop, offset);
5617   }
5618   gasm_->Bind(&done);
5619   return a;
5620 }
5621 
RttCanon(wasm::HeapType type)5622 Node* WasmGraphBuilder::RttCanon(wasm::HeapType type) {
5623   if (type.is_generic()) {
5624     switch (type.representation()) {
5625       case wasm::HeapType::kEq:
5626         return LOAD_FULL_POINTER(
5627             BuildLoadIsolateRoot(),
5628             IsolateData::root_slot_offset(RootIndex::kWasmRttEqrefMap));
5629       case wasm::HeapType::kExtern:
5630         return LOAD_FULL_POINTER(
5631             BuildLoadIsolateRoot(),
5632             IsolateData::root_slot_offset(RootIndex::kWasmRttExternrefMap));
5633       case wasm::HeapType::kFunc:
5634         return LOAD_FULL_POINTER(
5635             BuildLoadIsolateRoot(),
5636             IsolateData::root_slot_offset(RootIndex::kWasmRttFuncrefMap));
5637       case wasm::HeapType::kI31:
5638         return LOAD_FULL_POINTER(
5639             BuildLoadIsolateRoot(),
5640             IsolateData::root_slot_offset(RootIndex::kWasmRttI31refMap));
5641       default:
5642         UNREACHABLE();
5643     }
5644   }
5645   Node* maps_list =
5646       LOAD_INSTANCE_FIELD(ManagedObjectMaps, MachineType::TaggedPointer());
5647   return LOAD_FIXED_ARRAY_SLOT_PTR(maps_list, type.ref_index());
5648 }
5649 
RttSub(wasm::HeapType type,Node * parent_rtt)5650 Node* WasmGraphBuilder::RttSub(wasm::HeapType type, Node* parent_rtt) {
5651   return CALL_BUILTIN(
5652       WasmAllocateRtt,
5653       graph()->NewNode(
5654           mcgraph()->common()->NumberConstant(type.representation())),
5655       parent_rtt,
5656       LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer()));
5657 }
5658 
IsI31(GraphAssembler * gasm,Node * object)5659 Node* IsI31(GraphAssembler* gasm, Node* object) {
5660   if (COMPRESS_POINTERS_BOOL) {
5661     return gasm->Word32Equal(
5662         gasm->Word32And(object, gasm->Int32Constant(kSmiTagMask)),
5663         gasm->Int32Constant(kSmiTag));
5664   } else {
5665     return gasm->WordEqual(
5666         gasm->WordAnd(object, gasm->IntPtrConstant(kSmiTagMask)),
5667         gasm->IntPtrConstant(kSmiTag));
5668   }
5669 }
5670 
AssertFalse(MachineGraph * mcgraph,GraphAssembler * gasm,Node * condition)5671 void AssertFalse(MachineGraph* mcgraph, GraphAssembler* gasm, Node* condition) {
5672 #if DEBUG
5673   if (FLAG_debug_code) {
5674     auto ok = gasm->MakeLabel();
5675     gasm->GotoIfNot(condition, &ok);
5676     EnsureEnd(mcgraph);
5677     gasm->Unreachable();
5678     gasm->Bind(&ok);
5679   }
5680 #endif
5681 }
5682 
RefTest(Node * object,Node * rtt,CheckForNull null_check,CheckForI31 i31_check,RttIsI31 rtt_is_i31)5683 Node* WasmGraphBuilder::RefTest(Node* object, Node* rtt,
5684                                 CheckForNull null_check, CheckForI31 i31_check,
5685                                 RttIsI31 rtt_is_i31) {
5686   auto done = gasm_->MakeLabel(MachineRepresentation::kWord32);
5687   bool need_done_label = false;
5688   if (i31_check == kWithI31Check) {
5689     if (rtt_is_i31 == kRttIsI31) {
5690       return IsI31(gasm_.get(), object);
5691     }
5692     gasm_->GotoIf(IsI31(gasm_.get(), object), &done, gasm_->Int32Constant(0));
5693     need_done_label = true;
5694   } else {
5695     AssertFalse(mcgraph(), gasm_.get(), IsI31(gasm_.get(), object));
5696   }
5697   if (null_check == kWithNullCheck) {
5698     gasm_->GotoIf(gasm_->WordEqual(object, RefNull()), &done,
5699                   gasm_->Int32Constant(0));
5700     need_done_label = true;
5701   }
5702 
5703   Node* map = gasm_->Load(MachineType::TaggedPointer(), object,
5704                           HeapObject::kMapOffset - kHeapObjectTag);
5705   // TODO(7748): Add a fast path for map == rtt.
5706   Node* subtype_check = BuildChangeSmiToInt32(CALL_BUILTIN(
5707       WasmIsRttSubtype, map, rtt,
5708       LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer())));
5709 
5710   if (need_done_label) {
5711     gasm_->Goto(&done, subtype_check);
5712     gasm_->Bind(&done);
5713     subtype_check = done.PhiAt(0);
5714   }
5715   return subtype_check;
5716 }
5717 
RefCast(Node * object,Node * rtt,CheckForNull null_check,CheckForI31 i31_check,RttIsI31 rtt_is_i31,wasm::WasmCodePosition position)5718 Node* WasmGraphBuilder::RefCast(Node* object, Node* rtt,
5719                                 CheckForNull null_check, CheckForI31 i31_check,
5720                                 RttIsI31 rtt_is_i31,
5721                                 wasm::WasmCodePosition position) {
5722   if (i31_check == kWithI31Check) {
5723     if (rtt_is_i31 == kRttIsI31) {
5724       TrapIfFalse(wasm::kTrapIllegalCast, IsI31(gasm_.get(), object), position);
5725       return object;
5726     } else {
5727       TrapIfTrue(wasm::kTrapIllegalCast, IsI31(gasm_.get(), object), position);
5728     }
5729   } else {
5730     AssertFalse(mcgraph(), gasm_.get(), IsI31(gasm_.get(), object));
5731   }
5732   if (null_check == kWithNullCheck) {
5733     TrapIfTrue(wasm::kTrapIllegalCast, gasm_->WordEqual(object, RefNull()),
5734                position);
5735   }
5736   Node* map = gasm_->Load(MachineType::TaggedPointer(), object,
5737                           HeapObject::kMapOffset - kHeapObjectTag);
5738   // TODO(7748): Add a fast path for map == rtt.
5739   Node* check_result = BuildChangeSmiToInt32(CALL_BUILTIN(
5740       WasmIsRttSubtype, map, rtt,
5741       LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer())));
5742   TrapIfFalse(wasm::kTrapIllegalCast, check_result, position);
5743   return object;
5744 }
5745 
BrOnCast(Node * object,Node * rtt,CheckForNull null_check,CheckForI31 i31_check,RttIsI31 rtt_is_i31,Node ** match_control,Node ** match_effect,Node ** no_match_control,Node ** no_match_effect)5746 Node* WasmGraphBuilder::BrOnCast(Node* object, Node* rtt,
5747                                  CheckForNull null_check, CheckForI31 i31_check,
5748                                  RttIsI31 rtt_is_i31, Node** match_control,
5749                                  Node** match_effect, Node** no_match_control,
5750                                  Node** no_match_effect) {
5751   // We have up to 3 control nodes to merge; the EffectPhi needs an additional
5752   // input.
5753   base::SmallVector<Node*, 3> merge_controls;
5754   base::SmallVector<Node*, 4> merge_effects;
5755 
5756   Node* is_i31 = IsI31(gasm_.get(), object);
5757   if (i31_check == kWithI31Check) {
5758     if (rtt_is_i31 == kRttIsI31) {
5759       BranchExpectFalse(is_i31, match_control, no_match_control);
5760       return nullptr;
5761     } else {
5762       Node* i31_branch = graph()->NewNode(
5763           mcgraph()->common()->Branch(BranchHint::kFalse), is_i31, control());
5764       SetControl(graph()->NewNode(mcgraph()->common()->IfFalse(), i31_branch));
5765       merge_controls.emplace_back(
5766           graph()->NewNode(mcgraph()->common()->IfTrue(), i31_branch));
5767       merge_effects.emplace_back(effect());
5768     }
5769   } else {
5770     AssertFalse(mcgraph(), gasm_.get(), is_i31);
5771   }
5772 
5773   if (null_check == kWithNullCheck) {
5774     Node* null_branch =
5775         graph()->NewNode(mcgraph()->common()->Branch(BranchHint::kFalse),
5776                          gasm_->WordEqual(object, RefNull()), control());
5777     SetControl(graph()->NewNode(mcgraph()->common()->IfFalse(), null_branch));
5778     merge_controls.emplace_back(
5779         graph()->NewNode(mcgraph()->common()->IfTrue(), null_branch));
5780     merge_effects.emplace_back(effect());
5781   }
5782 
5783   // At this point, {object} is neither null nor an i31ref/Smi.
5784   Node* map = gasm_->Load(MachineType::TaggedPointer(), object,
5785                           HeapObject::kMapOffset - kHeapObjectTag);
5786   // TODO(7748): Add a fast path for map == rtt.
5787   Node* subtype_check = BuildChangeSmiToInt32(CALL_BUILTIN(
5788       WasmIsRttSubtype, map, rtt,
5789       LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer())));
5790   Node* cast_branch =
5791       graph()->NewNode(mcgraph()->common()->Branch(BranchHint::kFalse),
5792                        subtype_check, control());
5793   *match_control = graph()->NewNode(mcgraph()->common()->IfTrue(), cast_branch);
5794   *match_effect = effect();
5795   Node* not_subtype =
5796       graph()->NewNode(mcgraph()->common()->IfFalse(), cast_branch);
5797 
5798   // Wire up the "cast attempt was unsuccessful" control nodes: merge them if
5799   // there is more than one.
5800   if (merge_controls.size() > 0) {
5801     merge_controls.emplace_back(not_subtype);
5802     merge_effects.emplace_back(effect());
5803     // Range is 1..3, so casting to int is safe.
5804     DCHECK_EQ(merge_controls.size(), merge_effects.size());
5805     unsigned count = static_cast<unsigned>(merge_controls.size());
5806     *no_match_control = Merge(count, merge_controls.data());
5807     // EffectPhis need their control dependency as an additional input.
5808     merge_effects.emplace_back(*no_match_control);
5809     *no_match_effect = EffectPhi(count, merge_effects.data());
5810   } else {
5811     *no_match_control = not_subtype;
5812     *no_match_effect = effect();
5813   }
5814   // Return value is not used, but we need it for compatibility
5815   // with graph-builder-interface.
5816   return nullptr;
5817 }
5818 
StructGet(Node * struct_object,const wasm::StructType * struct_type,uint32_t field_index,CheckForNull null_check,bool is_signed,wasm::WasmCodePosition position)5819 Node* WasmGraphBuilder::StructGet(Node* struct_object,
5820                                   const wasm::StructType* struct_type,
5821                                   uint32_t field_index, CheckForNull null_check,
5822                                   bool is_signed,
5823                                   wasm::WasmCodePosition position) {
5824   if (null_check == kWithNullCheck) {
5825     TrapIfTrue(wasm::kTrapNullDereference,
5826                gasm_->WordEqual(struct_object, RefNull()), position);
5827   }
5828   MachineType machine_type = FieldType(struct_type, field_index, is_signed);
5829   Node* offset = FieldOffset(mcgraph(), struct_type, field_index);
5830   return LoadWithTaggedAlignment(gasm_.get(), machine_type, struct_object,
5831                                  offset);
5832 }
5833 
StructSet(Node * struct_object,const wasm::StructType * struct_type,uint32_t field_index,Node * field_value,CheckForNull null_check,wasm::WasmCodePosition position)5834 Node* WasmGraphBuilder::StructSet(Node* struct_object,
5835                                   const wasm::StructType* struct_type,
5836                                   uint32_t field_index, Node* field_value,
5837                                   CheckForNull null_check,
5838                                   wasm::WasmCodePosition position) {
5839   if (null_check == kWithNullCheck) {
5840     TrapIfTrue(wasm::kTrapNullDereference,
5841                gasm_->WordEqual(struct_object, RefNull()), position);
5842   }
5843   return StoreStructFieldUnchecked(mcgraph(), gasm_.get(), struct_object,
5844                                    struct_type, field_index, field_value);
5845 }
5846 
BoundsCheck(Node * array,Node * index,wasm::WasmCodePosition position)5847 void WasmGraphBuilder::BoundsCheck(Node* array, Node* index,
5848                                    wasm::WasmCodePosition position) {
5849   Node* length = ArrayLength(gasm_.get(), array);
5850   TrapIfFalse(wasm::kTrapArrayOutOfBounds, gasm_->Uint32LessThan(index, length),
5851               position);
5852 }
5853 
ArrayGet(Node * array_object,const wasm::ArrayType * type,Node * index,CheckForNull null_check,bool is_signed,wasm::WasmCodePosition position)5854 Node* WasmGraphBuilder::ArrayGet(Node* array_object,
5855                                  const wasm::ArrayType* type, Node* index,
5856                                  CheckForNull null_check, bool is_signed,
5857                                  wasm::WasmCodePosition position) {
5858   if (null_check == kWithNullCheck) {
5859     TrapIfTrue(wasm::kTrapNullDereference,
5860                gasm_->WordEqual(array_object, RefNull()), position);
5861   }
5862   BoundsCheck(array_object, index, position);
5863   MachineType machine_type = MachineType::TypeForRepresentation(
5864       type->element_type().machine_representation(), is_signed);
5865   Node* offset = ArrayElementOffset(gasm_.get(), index, type->element_type());
5866   return LoadWithTaggedAlignment(gasm_.get(), machine_type, array_object,
5867                                  offset);
5868 }
5869 
ArraySet(Node * array_object,const wasm::ArrayType * type,Node * index,Node * value,CheckForNull null_check,wasm::WasmCodePosition position)5870 Node* WasmGraphBuilder::ArraySet(Node* array_object,
5871                                  const wasm::ArrayType* type, Node* index,
5872                                  Node* value, CheckForNull null_check,
5873                                  wasm::WasmCodePosition position) {
5874   if (null_check == kWithNullCheck) {
5875     TrapIfTrue(wasm::kTrapNullDereference,
5876                gasm_->WordEqual(array_object, RefNull()), position);
5877   }
5878   BoundsCheck(array_object, index, position);
5879   Node* offset = ArrayElementOffset(gasm_.get(), index, type->element_type());
5880   return StoreWithTaggedAlignment(gasm_.get(), array_object, offset, value,
5881                                   type->element_type());
5882 }
5883 
ArrayLen(Node * array_object,wasm::WasmCodePosition position)5884 Node* WasmGraphBuilder::ArrayLen(Node* array_object,
5885                                  wasm::WasmCodePosition position) {
5886   TrapIfTrue(wasm::kTrapNullDereference,
5887              gasm_->WordEqual(array_object, RefNull()), position);
5888   return ArrayLength(gasm_.get(), array_object);
5889 }
5890 
5891 // 1 bit V8 Smi tag, 31 bits V8 Smi shift, 1 bit i31ref high-bit truncation.
5892 constexpr int kI31To32BitSmiShift = 33;
5893 
I31New(Node * input)5894 Node* WasmGraphBuilder::I31New(Node* input) {
5895   if (SmiValuesAre31Bits()) {
5896     return gasm_->Word32Shl(input, BuildSmiShiftBitsConstant32());
5897   }
5898   DCHECK(SmiValuesAre32Bits());
5899   input = BuildChangeInt32ToIntPtr(input);
5900   return gasm_->WordShl(input, gasm_->IntPtrConstant(kI31To32BitSmiShift));
5901 }
5902 
I31GetS(Node * input)5903 Node* WasmGraphBuilder::I31GetS(Node* input) {
5904   if (SmiValuesAre31Bits()) {
5905     input = BuildTruncateIntPtrToInt32(input);
5906     return gasm_->Word32SarShiftOutZeros(input, BuildSmiShiftBitsConstant32());
5907   }
5908   DCHECK(SmiValuesAre32Bits());
5909   return BuildTruncateIntPtrToInt32(
5910       gasm_->WordSar(input, gasm_->IntPtrConstant(kI31To32BitSmiShift)));
5911 }
5912 
I31GetU(Node * input)5913 Node* WasmGraphBuilder::I31GetU(Node* input) {
5914   if (SmiValuesAre31Bits()) {
5915     input = BuildTruncateIntPtrToInt32(input);
5916     return gasm_->Word32Shr(input, BuildSmiShiftBitsConstant32());
5917   }
5918   DCHECK(SmiValuesAre32Bits());
5919   return BuildTruncateIntPtrToInt32(
5920       gasm_->WordShr(input, gasm_->IntPtrConstant(kI31To32BitSmiShift)));
5921 }
5922 
5923 class WasmDecorator final : public GraphDecorator {
5924  public:
WasmDecorator(NodeOriginTable * origins,wasm::Decoder * decoder)5925   explicit WasmDecorator(NodeOriginTable* origins, wasm::Decoder* decoder)
5926       : origins_(origins), decoder_(decoder) {}
5927 
Decorate(Node * node)5928   void Decorate(Node* node) final {
5929     origins_->SetNodeOrigin(
5930         node, NodeOrigin("wasm graph creation", "n/a",
5931                          NodeOrigin::kWasmBytecode, decoder_->position()));
5932   }
5933 
5934  private:
5935   compiler::NodeOriginTable* origins_;
5936   wasm::Decoder* decoder_;
5937 };
5938 
AddBytecodePositionDecorator(NodeOriginTable * node_origins,wasm::Decoder * decoder)5939 void WasmGraphBuilder::AddBytecodePositionDecorator(
5940     NodeOriginTable* node_origins, wasm::Decoder* decoder) {
5941   DCHECK_NULL(decorator_);
5942   decorator_ = graph()->zone()->New<WasmDecorator>(node_origins, decoder);
5943   graph()->AddDecorator(decorator_);
5944 }
5945 
RemoveBytecodePositionDecorator()5946 void WasmGraphBuilder::RemoveBytecodePositionDecorator() {
5947   DCHECK_NOT_NULL(decorator_);
5948   graph()->RemoveDecorator(decorator_);
5949   decorator_ = nullptr;
5950 }
5951 
5952 namespace {
5953 
5954 class WasmWrapperGraphBuilder : public WasmGraphBuilder {
5955  public:
WasmWrapperGraphBuilder(Zone * zone,MachineGraph * mcgraph,const wasm::FunctionSig * sig,const wasm::WasmModule * module,compiler::SourcePositionTable * spt,StubCallMode stub_mode,wasm::WasmFeatures features)5956   WasmWrapperGraphBuilder(Zone* zone, MachineGraph* mcgraph,
5957                           const wasm::FunctionSig* sig,
5958                           const wasm::WasmModule* module,
5959                           compiler::SourcePositionTable* spt,
5960                           StubCallMode stub_mode, wasm::WasmFeatures features)
5961       : WasmGraphBuilder(nullptr, zone, mcgraph, sig, spt),
5962         module_(module),
5963         stub_mode_(stub_mode),
5964         enabled_features_(features) {}
5965 
GetI64ToBigIntCallDescriptor()5966   CallDescriptor* GetI64ToBigIntCallDescriptor() {
5967     if (i64_to_bigint_descriptor_) return i64_to_bigint_descriptor_;
5968 
5969     i64_to_bigint_descriptor_ =
5970         GetBuiltinCallDescriptor<I64ToBigIntDescriptor>(this, stub_mode_);
5971 
5972     AddInt64LoweringReplacement(
5973         i64_to_bigint_descriptor_,
5974         GetBuiltinCallDescriptor<I32PairToBigIntDescriptor>(this, stub_mode_));
5975     return i64_to_bigint_descriptor_;
5976   }
5977 
GetBigIntToI64CallDescriptor()5978   CallDescriptor* GetBigIntToI64CallDescriptor() {
5979     if (bigint_to_i64_descriptor_) return bigint_to_i64_descriptor_;
5980 
5981     bigint_to_i64_descriptor_ =
5982         GetBuiltinCallDescriptor<BigIntToI64Descriptor>(this, stub_mode_);
5983 
5984     AddInt64LoweringReplacement(
5985         bigint_to_i64_descriptor_,
5986         GetBuiltinCallDescriptor<BigIntToI32PairDescriptor>(this, stub_mode_));
5987     return bigint_to_i64_descriptor_;
5988   }
5989 
GetTargetForBuiltinCall(wasm::WasmCode::RuntimeStubId wasm_stub,Builtins::Name builtin_id)5990   Node* GetTargetForBuiltinCall(wasm::WasmCode::RuntimeStubId wasm_stub,
5991                                 Builtins::Name builtin_id) {
5992     return (stub_mode_ == StubCallMode::kCallWasmRuntimeStub)
5993                ? mcgraph()->RelocatableIntPtrConstant(wasm_stub,
5994                                                       RelocInfo::WASM_STUB_CALL)
5995                : GetBuiltinPointerTarget(builtin_id);
5996   }
5997 
BuildLoadUndefinedValueFromInstance()5998   Node* BuildLoadUndefinedValueFromInstance() {
5999     if (undefined_value_node_ == nullptr) {
6000       Node* isolate_root = graph()->NewNode(
6001           mcgraph()->machine()->Load(MachineType::Pointer()),
6002           instance_node_.get(),
6003           mcgraph()->Int32Constant(WASM_INSTANCE_OBJECT_OFFSET(IsolateRoot)),
6004           graph()->start(), graph()->start());
6005       undefined_value_node_ = graph()->NewNode(
6006           mcgraph()->machine()->Load(MachineType::Pointer()), isolate_root,
6007           mcgraph()->Int32Constant(
6008               IsolateData::root_slot_offset(RootIndex::kUndefinedValue)),
6009           isolate_root, graph()->start());
6010     }
6011     return undefined_value_node_.get();
6012   }
6013 
BuildChangeInt32ToNumber(Node * value)6014   Node* BuildChangeInt32ToNumber(Node* value) {
6015     // We expect most integers at runtime to be Smis, so it is important for
6016     // wrapper performance that Smi conversion be inlined.
6017     if (SmiValuesAre32Bits()) {
6018       return BuildChangeInt32ToSmi(value);
6019     }
6020     DCHECK(SmiValuesAre31Bits());
6021 
6022     auto builtin = gasm_->MakeDeferredLabel();
6023     auto done = gasm_->MakeLabel(MachineRepresentation::kTagged);
6024 
6025     // Double value to test if value can be a Smi, and if so, to convert it.
6026     Node* add = gasm_->Int32AddWithOverflow(value, value);
6027     Node* ovf = gasm_->Projection(1, add);
6028     gasm_->GotoIf(ovf, &builtin);
6029 
6030     // If it didn't overflow, the result is {2 * value} as pointer-sized value.
6031     Node* smi_tagged = BuildChangeInt32ToIntPtr(gasm_->Projection(0, add));
6032     gasm_->Goto(&done, smi_tagged);
6033 
6034     // Otherwise, call builtin, to convert to a HeapNumber.
6035     gasm_->Bind(&builtin);
6036     CommonOperatorBuilder* common = mcgraph()->common();
6037     Node* target =
6038         GetTargetForBuiltinCall(wasm::WasmCode::kWasmInt32ToHeapNumber,
6039                                 Builtins::kWasmInt32ToHeapNumber);
6040     if (!int32_to_heapnumber_operator_.is_set()) {
6041       auto call_descriptor = Linkage::GetStubCallDescriptor(
6042           mcgraph()->zone(), WasmInt32ToHeapNumberDescriptor(), 0,
6043           CallDescriptor::kNoFlags, Operator::kNoProperties, stub_mode_);
6044       int32_to_heapnumber_operator_.set(common->Call(call_descriptor));
6045     }
6046     Node* call =
6047         gasm_->Call(int32_to_heapnumber_operator_.get(), target, value);
6048     gasm_->Goto(&done, call);
6049     gasm_->Bind(&done);
6050     return done.PhiAt(0);
6051   }
6052 
BuildChangeTaggedToInt32(Node * value,Node * context)6053   Node* BuildChangeTaggedToInt32(Node* value, Node* context) {
6054     // We expect most integers at runtime to be Smis, so it is important for
6055     // wrapper performance that Smi conversion be inlined.
6056     auto builtin = gasm_->MakeDeferredLabel();
6057     auto done = gasm_->MakeLabel(MachineRepresentation::kWord32);
6058 
6059     // Test if value is a Smi.
6060     Node* is_smi =
6061         gasm_->Word32Equal(gasm_->Word32And(BuildTruncateIntPtrToInt32(value),
6062                                             gasm_->Int32Constant(kSmiTagMask)),
6063                            gasm_->Int32Constant(0));
6064     gasm_->GotoIfNot(is_smi, &builtin);
6065 
6066     // If Smi, convert to int32.
6067     Node* smi = BuildChangeSmiToInt32(value);
6068     gasm_->Goto(&done, smi);
6069 
6070     // Otherwise, call builtin which changes non-Smi to Int32.
6071     gasm_->Bind(&builtin);
6072     CommonOperatorBuilder* common = mcgraph()->common();
6073     Node* target =
6074         GetTargetForBuiltinCall(wasm::WasmCode::kWasmTaggedNonSmiToInt32,
6075                                 Builtins::kWasmTaggedNonSmiToInt32);
6076     if (!tagged_non_smi_to_int32_operator_.is_set()) {
6077       auto call_descriptor = Linkage::GetStubCallDescriptor(
6078           mcgraph()->zone(), WasmTaggedNonSmiToInt32Descriptor(), 0,
6079           CallDescriptor::kNoFlags, Operator::kNoProperties, stub_mode_);
6080       tagged_non_smi_to_int32_operator_.set(common->Call(call_descriptor));
6081     }
6082     Node* call = gasm_->Call(tagged_non_smi_to_int32_operator_.get(), target,
6083                              value, context);
6084     SetSourcePosition(call, 1);
6085     gasm_->Goto(&done, call);
6086     gasm_->Bind(&done);
6087     return done.PhiAt(0);
6088   }
6089 
BuildChangeFloat32ToNumber(Node * value)6090   Node* BuildChangeFloat32ToNumber(Node* value) {
6091     CommonOperatorBuilder* common = mcgraph()->common();
6092     Node* target = GetTargetForBuiltinCall(wasm::WasmCode::kWasmFloat32ToNumber,
6093                                            Builtins::kWasmFloat32ToNumber);
6094     if (!float32_to_number_operator_.is_set()) {
6095       auto call_descriptor = Linkage::GetStubCallDescriptor(
6096           mcgraph()->zone(), WasmFloat32ToNumberDescriptor(), 0,
6097           CallDescriptor::kNoFlags, Operator::kNoProperties, stub_mode_);
6098       float32_to_number_operator_.set(common->Call(call_descriptor));
6099     }
6100     return gasm_->Call(float32_to_number_operator_.get(), target, value);
6101   }
6102 
BuildChangeFloat64ToNumber(Node * value)6103   Node* BuildChangeFloat64ToNumber(Node* value) {
6104     CommonOperatorBuilder* common = mcgraph()->common();
6105     Node* target = GetTargetForBuiltinCall(wasm::WasmCode::kWasmFloat64ToNumber,
6106                                            Builtins::kWasmFloat64ToNumber);
6107     if (!float64_to_number_operator_.is_set()) {
6108       auto call_descriptor = Linkage::GetStubCallDescriptor(
6109           mcgraph()->zone(), WasmFloat64ToNumberDescriptor(), 0,
6110           CallDescriptor::kNoFlags, Operator::kNoProperties, stub_mode_);
6111       float64_to_number_operator_.set(common->Call(call_descriptor));
6112     }
6113     return gasm_->Call(float64_to_number_operator_.get(), target, value);
6114   }
6115 
BuildChangeTaggedToFloat64(Node * value,Node * context)6116   Node* BuildChangeTaggedToFloat64(Node* value, Node* context) {
6117     CommonOperatorBuilder* common = mcgraph()->common();
6118     Node* target = GetTargetForBuiltinCall(wasm::WasmCode::kWasmTaggedToFloat64,
6119                                            Builtins::kWasmTaggedToFloat64);
6120     if (!tagged_to_float64_operator_.is_set()) {
6121       auto call_descriptor = Linkage::GetStubCallDescriptor(
6122           mcgraph()->zone(), WasmTaggedToFloat64Descriptor(), 0,
6123           CallDescriptor::kNoFlags, Operator::kNoProperties, stub_mode_);
6124       tagged_to_float64_operator_.set(common->Call(call_descriptor));
6125     }
6126     Node* call =
6127         gasm_->Call(tagged_to_float64_operator_.get(), target, value, context);
6128     SetSourcePosition(call, 1);
6129     return call;
6130   }
6131 
AddArgumentNodes(Vector<Node * > args,int pos,int param_count,const wasm::FunctionSig * sig)6132   int AddArgumentNodes(Vector<Node*> args, int pos, int param_count,
6133                        const wasm::FunctionSig* sig) {
6134     // Convert wasm numbers to JS values.
6135     for (int i = 0; i < param_count; ++i) {
6136       Node* param =
6137           Param(i + 1);  // Start from index 1 to drop the instance_node.
6138       args[pos++] = ToJS(param, sig->GetParam(i));
6139     }
6140     return pos;
6141   }
6142 
ToJS(Node * node,wasm::ValueType type)6143   Node* ToJS(Node* node, wasm::ValueType type) {
6144     switch (type.kind()) {
6145       case wasm::ValueType::kI32:
6146         return BuildChangeInt32ToNumber(node);
6147       case wasm::ValueType::kS128:
6148         UNREACHABLE();
6149       case wasm::ValueType::kI64: {
6150         DCHECK(enabled_features_.has_bigint());
6151         return BuildChangeInt64ToBigInt(node);
6152       }
6153       case wasm::ValueType::kF32:
6154         return BuildChangeFloat32ToNumber(node);
6155       case wasm::ValueType::kF64:
6156         return BuildChangeFloat64ToNumber(node);
6157       case wasm::ValueType::kRef:
6158       case wasm::ValueType::kOptRef: {
6159         uint32_t representation = type.heap_representation();
6160         if (representation == wasm::HeapType::kExtern ||
6161             representation == wasm::HeapType::kExn ||
6162             representation == wasm::HeapType::kFunc) {
6163           return node;
6164         }
6165         if (representation == wasm::HeapType::kEq) {
6166           return BuildAllocateObjectWrapper(node);
6167         }
6168         if (type.has_index() && module_->has_signature(type.ref_index())) {
6169           // Typed function
6170           return node;
6171         }
6172         // TODO(7748): Figure out a JS interop story for arrays and structs.
6173         // If this is reached, then IsJSCompatibleSignature() is too permissive.
6174         UNREACHABLE();
6175       }
6176       case wasm::ValueType::kRtt:
6177         // TODO(7748): Figure out what to do for RTTs.
6178         UNIMPLEMENTED();
6179       case wasm::ValueType::kI8:
6180       case wasm::ValueType::kI16:
6181       case wasm::ValueType::kStmt:
6182       case wasm::ValueType::kBottom:
6183         UNREACHABLE();
6184     }
6185   }
6186 
6187   // TODO(7748): Temporary solution to allow round-tripping of Wasm objects
6188   // through JavaScript, where they show up as opaque boxes. This will disappear
6189   // once we have a proper WasmGC <-> JS interaction story.
BuildAllocateObjectWrapper(Node * input)6190   Node* BuildAllocateObjectWrapper(Node* input) {
6191     return CALL_BUILTIN(
6192         WasmAllocateObjectWrapper, input,
6193         LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer()));
6194   }
6195 
BuildUnpackObjectWrapper(Node * input)6196   Node* BuildUnpackObjectWrapper(Node* input) {
6197     Node* obj = CALL_BUILTIN(
6198         WasmGetOwnProperty, input,
6199         LOAD_FULL_POINTER(BuildLoadIsolateRoot(),
6200                           IsolateData::root_slot_offset(
6201                               RootIndex::kwasm_wrapped_object_symbol)),
6202         LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer()));
6203     // Invalid object wrappers (i.e. any other JS object that doesn't have the
6204     // magic hidden property) will return {undefined}. Map that to {null}.
6205     Node* undefined = LOAD_FULL_POINTER(
6206         BuildLoadIsolateRoot(),
6207         IsolateData::root_slot_offset(RootIndex::kUndefinedValue));
6208     Node* is_undefined = gasm_->WordEqual(obj, undefined);
6209     Diamond check(graph(), mcgraph()->common(), is_undefined,
6210                   BranchHint::kFalse);
6211     check.Chain(control());
6212     return check.Phi(MachineRepresentation::kTagged, RefNull(), obj);
6213   }
6214 
BuildChangeInt64ToBigInt(Node * input)6215   Node* BuildChangeInt64ToBigInt(Node* input) {
6216     const Operator* call =
6217         mcgraph()->common()->Call(GetI64ToBigIntCallDescriptor());
6218 
6219     Node* target;
6220     if (mcgraph()->machine()->Is64()) {
6221       target = GetTargetForBuiltinCall(wasm::WasmCode::kI64ToBigInt,
6222                                        Builtins::kI64ToBigInt);
6223     } else {
6224       DCHECK(mcgraph()->machine()->Is32());
6225       // On 32-bit platforms we already set the target to the
6226       // I32PairToBigInt builtin here, so that we don't have to replace the
6227       // target in the int64-lowering.
6228       target = GetTargetForBuiltinCall(wasm::WasmCode::kI32PairToBigInt,
6229                                        Builtins::kI32PairToBigInt);
6230     }
6231 
6232     return SetEffectControl(
6233         graph()->NewNode(call, target, input, effect(), control()));
6234   }
6235 
BuildChangeBigIntToInt64(Node * input,Node * context)6236   Node* BuildChangeBigIntToInt64(Node* input, Node* context) {
6237     const Operator* call =
6238         mcgraph()->common()->Call(GetBigIntToI64CallDescriptor());
6239 
6240     Node* target;
6241     if (mcgraph()->machine()->Is64()) {
6242       target = GetTargetForBuiltinCall(wasm::WasmCode::kBigIntToI64,
6243                                        Builtins::kBigIntToI64);
6244     } else {
6245       DCHECK(mcgraph()->machine()->Is32());
6246       // On 32-bit platforms we already set the target to the
6247       // BigIntToI32Pair builtin here, so that we don't have to replace the
6248       // target in the int64-lowering.
6249       target = GetTargetForBuiltinCall(wasm::WasmCode::kBigIntToI32Pair,
6250                                        Builtins::kBigIntToI32Pair);
6251     }
6252 
6253     return SetEffectControl(
6254         graph()->NewNode(call, target, input, context, effect(), control()));
6255   }
6256 
BuildCheckValidRefValue(Node * input,Node * js_context,wasm::ValueType type)6257   void BuildCheckValidRefValue(Node* input, Node* js_context,
6258                                wasm::ValueType type) {
6259     // Make sure ValueType fits in a Smi.
6260     STATIC_ASSERT(wasm::ValueType::kLastUsedBit + 1 <= kSmiValueSize);
6261     Node* inputs[] = {instance_node_.get(), input,
6262                       mcgraph()->IntPtrConstant(
6263                           IntToSmi(static_cast<int>(type.raw_bit_field())))};
6264 
6265     Node* check = BuildChangeSmiToInt32(SetEffect(BuildCallToRuntimeWithContext(
6266         Runtime::kWasmIsValidRefValue, js_context, inputs, 3)));
6267 
6268     Diamond type_check(graph(), mcgraph()->common(), check, BranchHint::kTrue);
6269     type_check.Chain(control());
6270     SetControl(type_check.if_false);
6271 
6272     Node* old_effect = effect();
6273     BuildCallToRuntimeWithContext(Runtime::kWasmThrowJSTypeError, js_context,
6274                                   nullptr, 0);
6275 
6276     SetEffectControl(type_check.EffectPhi(old_effect, effect()),
6277                      type_check.merge);
6278   }
6279 
FromJS(Node * input,Node * js_context,wasm::ValueType type)6280   Node* FromJS(Node* input, Node* js_context, wasm::ValueType type) {
6281     switch (type.kind()) {
6282       case wasm::ValueType::kRef:
6283       case wasm::ValueType::kOptRef: {
6284         switch (type.heap_representation()) {
6285           case wasm::HeapType::kExtern:
6286           case wasm::HeapType::kExn:
6287             return input;
6288           case wasm::HeapType::kFunc:
6289             BuildCheckValidRefValue(input, js_context, type);
6290             return input;
6291           case wasm::HeapType::kEq:
6292             BuildCheckValidRefValue(input, js_context, type);
6293             return BuildUnpackObjectWrapper(input);
6294           case wasm::HeapType::kI31:
6295             // If this is reached, then IsJSCompatibleSignature() is too
6296             // permissive.
6297             UNREACHABLE();
6298           default:
6299             if (module_->has_signature(type.ref_index())) {
6300               BuildCheckValidRefValue(input, js_context, type);
6301               return input;
6302             }
6303             // If this is reached, then IsJSCompatibleSignature() is too
6304             // permissive.
6305             UNREACHABLE();
6306         }
6307       }
6308       case wasm::ValueType::kF32:
6309         return graph()->NewNode(
6310             mcgraph()->machine()->TruncateFloat64ToFloat32(),
6311             BuildChangeTaggedToFloat64(input, js_context));
6312 
6313       case wasm::ValueType::kF64:
6314         return BuildChangeTaggedToFloat64(input, js_context);
6315 
6316       case wasm::ValueType::kI32:
6317         return BuildChangeTaggedToInt32(input, js_context);
6318 
6319       case wasm::ValueType::kI64:
6320         // i64 values can only come from BigInt.
6321         DCHECK(enabled_features_.has_bigint());
6322         return BuildChangeBigIntToInt64(input, js_context);
6323 
6324       case wasm::ValueType::kRtt:  // TODO(7748): Implement.
6325       case wasm::ValueType::kS128:
6326       case wasm::ValueType::kI8:
6327       case wasm::ValueType::kI16:
6328       case wasm::ValueType::kBottom:
6329       case wasm::ValueType::kStmt:
6330         UNREACHABLE();
6331         break;
6332     }
6333   }
6334 
SmiToFloat32(Node * input)6335   Node* SmiToFloat32(Node* input) {
6336     return graph()->NewNode(mcgraph()->machine()->RoundInt32ToFloat32(),
6337                             BuildChangeSmiToInt32(input));
6338   }
6339 
SmiToFloat64(Node * input)6340   Node* SmiToFloat64(Node* input) {
6341     return graph()->NewNode(mcgraph()->machine()->ChangeInt32ToFloat64(),
6342                             BuildChangeSmiToInt32(input));
6343   }
6344 
HeapNumberToFloat64(Node * input)6345   Node* HeapNumberToFloat64(Node* input) {
6346     return gasm_->Load(MachineType::Float64(), input,
6347                        wasm::ObjectAccess::ToTagged(HeapNumber::kValueOffset));
6348   }
6349 
FromJSFast(Node * input,wasm::ValueType type)6350   Node* FromJSFast(Node* input, wasm::ValueType type) {
6351     switch (type.kind()) {
6352       case wasm::ValueType::kI32:
6353         return BuildChangeSmiToInt32(input);
6354       case wasm::ValueType::kF32: {
6355         auto done = gasm_->MakeLabel(MachineRepresentation::kFloat32);
6356         auto heap_number = gasm_->MakeLabel();
6357         gasm_->GotoIfNot(IsSmi(input), &heap_number);
6358         gasm_->Goto(&done, SmiToFloat32(input));
6359         gasm_->Bind(&heap_number);
6360         Node* value =
6361             graph()->NewNode(mcgraph()->machine()->TruncateFloat64ToFloat32(),
6362                              HeapNumberToFloat64(input));
6363         gasm_->Goto(&done, value);
6364         gasm_->Bind(&done);
6365         return done.PhiAt(0);
6366       }
6367       case wasm::ValueType::kF64: {
6368         auto done = gasm_->MakeLabel(MachineRepresentation::kFloat64);
6369         auto heap_number = gasm_->MakeLabel();
6370         gasm_->GotoIfNot(IsSmi(input), &heap_number);
6371         gasm_->Goto(&done, SmiToFloat64(input));
6372         gasm_->Bind(&heap_number);
6373         gasm_->Goto(&done, HeapNumberToFloat64(input));
6374         gasm_->Bind(&done);
6375         return done.PhiAt(0);
6376       }
6377       case wasm::ValueType::kRef:
6378       case wasm::ValueType::kOptRef:
6379       case wasm::ValueType::kI64:
6380       case wasm::ValueType::kRtt:
6381       case wasm::ValueType::kS128:
6382       case wasm::ValueType::kI8:
6383       case wasm::ValueType::kI16:
6384       case wasm::ValueType::kBottom:
6385       case wasm::ValueType::kStmt:
6386         UNREACHABLE();
6387         break;
6388     }
6389   }
6390 
BuildModifyThreadInWasmFlag(bool new_value)6391   void BuildModifyThreadInWasmFlag(bool new_value) {
6392     if (!trap_handler::IsTrapHandlerEnabled()) return;
6393     Node* isolate_root = BuildLoadIsolateRoot();
6394 
6395     Node* thread_in_wasm_flag_address =
6396         gasm_->Load(MachineType::Pointer(), isolate_root,
6397                     Isolate::thread_in_wasm_flag_address_offset());
6398 
6399     if (FLAG_debug_code) {
6400       Node* flag_value = SetEffect(
6401           graph()->NewNode(mcgraph()->machine()->Load(MachineType::Pointer()),
6402                            thread_in_wasm_flag_address,
6403                            mcgraph()->Int32Constant(0), effect(), control()));
6404       Node* check =
6405           graph()->NewNode(mcgraph()->machine()->Word32Equal(), flag_value,
6406                            mcgraph()->Int32Constant(new_value ? 0 : 1));
6407 
6408       Diamond flag_check(graph(), mcgraph()->common(), check,
6409                          BranchHint::kTrue);
6410       flag_check.Chain(control());
6411       SetControl(flag_check.if_false);
6412       Node* message_id = graph()->NewNode(
6413           mcgraph()->common()->NumberConstant(static_cast<int32_t>(
6414               new_value ? AbortReason::kUnexpectedThreadInWasmSet
6415                         : AbortReason::kUnexpectedThreadInWasmUnset)));
6416 
6417       Node* old_effect = effect();
6418       BuildCallToRuntimeWithContext(Runtime::kAbort, NoContextConstant(),
6419                                     &message_id, 1);
6420 
6421       SetEffectControl(flag_check.EffectPhi(old_effect, effect()),
6422                        flag_check.merge);
6423     }
6424 
6425     SetEffect(graph()->NewNode(
6426         mcgraph()->machine()->Store(StoreRepresentation(
6427             MachineRepresentation::kWord32, kNoWriteBarrier)),
6428         thread_in_wasm_flag_address, mcgraph()->Int32Constant(0),
6429         mcgraph()->Int32Constant(new_value ? 1 : 0), effect(), control()));
6430   }
6431 
BuildLoadInstanceFromExportedFunctionData(Node * function_data)6432   Node* BuildLoadInstanceFromExportedFunctionData(Node* function_data) {
6433     return gasm_->Load(
6434         MachineType::AnyTagged(), function_data,
6435         WasmExportedFunctionData::kInstanceOffset - kHeapObjectTag);
6436   }
6437 
BuildMultiReturnFixedArrayFromIterable(const wasm::FunctionSig * sig,Node * iterable,Node * context)6438   Node* BuildMultiReturnFixedArrayFromIterable(const wasm::FunctionSig* sig,
6439                                                Node* iterable, Node* context) {
6440     Node* length = BuildChangeUint31ToSmi(
6441         mcgraph()->Uint32Constant(static_cast<uint32_t>(sig->return_count())));
6442     return CALL_BUILTIN(IterableToFixedArrayForWasm, iterable, length, context);
6443   }
6444 
6445   // Extract the FixedArray implementing
6446   // the backing storage of a JavaScript array.
BuildLoadArrayBackingStorage(Node * js_array)6447   Node* BuildLoadArrayBackingStorage(Node* js_array) {
6448     return gasm_->Load(MachineType::AnyTagged(), js_array,
6449                        JSObject::kElementsOffset - kHeapObjectTag);
6450   }
6451 
6452   // Generate a call to the AllocateJSArray builtin.
BuildCallAllocateJSArray(Node * array_length,Node * context)6453   Node* BuildCallAllocateJSArray(Node* array_length, Node* context) {
6454     // Since we don't check that args will fit in an array,
6455     // we make sure this is true based on statically known limits.
6456     STATIC_ASSERT(wasm::kV8MaxWasmFunctionMultiReturns <=
6457                   JSArray::kInitialMaxFastElementArray);
6458     return SetControl(CALL_BUILTIN(WasmAllocateJSArray, array_length, context));
6459   }
6460 
BuildCallAndReturn(bool is_import,Node * js_context,Node * function_data,base::SmallVector<Node *,16> args)6461   Node* BuildCallAndReturn(bool is_import, Node* js_context,
6462                            Node* function_data,
6463                            base::SmallVector<Node*, 16> args) {
6464     // Set the ThreadInWasm flag before we do the actual call.
6465     BuildModifyThreadInWasmFlag(true);
6466 
6467     const int rets_count = static_cast<int>(sig_->return_count());
6468     base::SmallVector<Node*, 1> rets(rets_count);
6469 
6470     if (is_import) {
6471       // Call to an imported function.
6472       // Load function index from {WasmExportedFunctionData}.
6473       Node* function_index =
6474           BuildLoadFunctionIndexFromExportedFunctionData(function_data);
6475       BuildImportCall(sig_, VectorOf(args), VectorOf(rets),
6476                       wasm::kNoCodePosition, function_index, kCallContinues);
6477     } else {
6478       // Call to a wasm function defined in this module.
6479       // The call target is the jump table slot for that function.
6480       Node* jump_table_start =
6481           LOAD_INSTANCE_FIELD(JumpTableStart, MachineType::Pointer());
6482       Node* jump_table_offset =
6483           BuildLoadJumpTableOffsetFromExportedFunctionData(function_data);
6484       Node* jump_table_slot = graph()->NewNode(
6485           mcgraph()->machine()->IntAdd(), jump_table_start, jump_table_offset);
6486       args[0] = jump_table_slot;
6487 
6488       BuildWasmCall(sig_, VectorOf(args), VectorOf(rets), wasm::kNoCodePosition,
6489                     nullptr, kNoRetpoline);
6490     }
6491 
6492     // Clear the ThreadInWasm flag.
6493     BuildModifyThreadInWasmFlag(false);
6494 
6495     Node* jsval;
6496     if (sig_->return_count() == 0) {
6497       jsval = BuildLoadUndefinedValueFromInstance();
6498     } else if (sig_->return_count() == 1) {
6499       jsval = ToJS(rets[0], sig_->GetReturn());
6500     } else {
6501       int32_t return_count = static_cast<int32_t>(sig_->return_count());
6502       Node* size =
6503           graph()->NewNode(mcgraph()->common()->NumberConstant(return_count));
6504 
6505       jsval = BuildCallAllocateJSArray(size, js_context);
6506 
6507       Node* fixed_array = BuildLoadArrayBackingStorage(jsval);
6508 
6509       for (int i = 0; i < return_count; ++i) {
6510         Node* value = ToJS(rets[i], sig_->GetReturn(i));
6511         STORE_FIXED_ARRAY_SLOT_ANY(fixed_array, i, value);
6512       }
6513     }
6514     return jsval;
6515   }
6516 
QualifiesForFastTransform(const wasm::FunctionSig *)6517   bool QualifiesForFastTransform(const wasm::FunctionSig*) {
6518     const int wasm_count = static_cast<int>(sig_->parameter_count());
6519     for (int i = 0; i < wasm_count; ++i) {
6520       wasm::ValueType type = sig_->GetParam(i);
6521       switch (type.kind()) {
6522         case wasm::ValueType::kRef:
6523         case wasm::ValueType::kOptRef:
6524         case wasm::ValueType::kI64:
6525         case wasm::ValueType::kRtt:
6526         case wasm::ValueType::kS128:
6527         case wasm::ValueType::kI8:
6528         case wasm::ValueType::kI16:
6529         case wasm::ValueType::kBottom:
6530         case wasm::ValueType::kStmt:
6531           return false;
6532         case wasm::ValueType::kI32:
6533         case wasm::ValueType::kF32:
6534         case wasm::ValueType::kF64:
6535           break;
6536       }
6537     }
6538     return true;
6539   }
6540 
IsSmi(Node * input)6541   Node* IsSmi(Node* input) {
6542     return gasm_->Word32Equal(
6543         gasm_->Word32And(BuildTruncateIntPtrToInt32(input),
6544                          gasm_->Int32Constant(kSmiTagMask)),
6545         gasm_->Int32Constant(0));
6546   }
6547 
CanTransformFast(Node * input,wasm::ValueType type,v8::internal::compiler::GraphAssemblerLabel<0> * slow_path)6548   void CanTransformFast(
6549       Node* input, wasm::ValueType type,
6550       v8::internal::compiler::GraphAssemblerLabel<0>* slow_path) {
6551     switch (type.kind()) {
6552       case wasm::ValueType::kI32: {
6553         gasm_->GotoIfNot(IsSmi(input), slow_path);
6554         return;
6555       }
6556       case wasm::ValueType::kF32:
6557       case wasm::ValueType::kF64: {
6558         auto done = gasm_->MakeLabel();
6559         gasm_->GotoIf(IsSmi(input), &done);
6560         Node* map =
6561             gasm_->Load(MachineType::TaggedPointer(), input,
6562                         wasm::ObjectAccess::ToTagged(HeapObject::kMapOffset));
6563         Node* heap_number_map = LOAD_FULL_POINTER(
6564             BuildLoadIsolateRoot(),
6565             IsolateData::root_slot_offset(RootIndex::kHeapNumberMap));
6566         Node* is_heap_number = gasm_->WordEqual(heap_number_map, map);
6567         gasm_->GotoIf(is_heap_number, &done);
6568         gasm_->Goto(slow_path);
6569         gasm_->Bind(&done);
6570         return;
6571       }
6572       case wasm::ValueType::kRef:
6573       case wasm::ValueType::kOptRef:
6574       case wasm::ValueType::kI64:
6575       case wasm::ValueType::kRtt:
6576       case wasm::ValueType::kS128:
6577       case wasm::ValueType::kI8:
6578       case wasm::ValueType::kI16:
6579       case wasm::ValueType::kBottom:
6580       case wasm::ValueType::kStmt:
6581         UNREACHABLE();
6582         break;
6583     }
6584   }
6585 
BuildJSToWasmWrapper(bool is_import)6586   void BuildJSToWasmWrapper(bool is_import) {
6587     const int wasm_count = static_cast<int>(sig_->parameter_count());
6588 
6589     // Build the start and the JS parameter nodes.
6590     SetEffectControl(Start(wasm_count + 5));
6591 
6592     // Create the js_closure and js_context parameters.
6593     Node* js_closure =
6594         graph()->NewNode(mcgraph()->common()->Parameter(
6595                              Linkage::kJSCallClosureParamIndex, "%closure"),
6596                          graph()->start());
6597     Node* js_context = graph()->NewNode(
6598         mcgraph()->common()->Parameter(
6599             Linkage::GetJSCallContextParamIndex(wasm_count + 1), "%context"),
6600         graph()->start());
6601 
6602     // Create the instance_node node to pass as parameter. It is loaded from
6603     // an actual reference to an instance or a placeholder reference,
6604     // called {WasmExportedFunction} via the {WasmExportedFunctionData}
6605     // structure.
6606     Node* function_data = BuildLoadFunctionDataFromJSFunction(js_closure);
6607     instance_node_.set(
6608         BuildLoadInstanceFromExportedFunctionData(function_data));
6609 
6610     if (!wasm::IsJSCompatibleSignature(sig_, module_, enabled_features_)) {
6611       // Throw a TypeError. Use the js_context of the calling javascript
6612       // function (passed as a parameter), such that the generated code is
6613       // js_context independent.
6614       BuildCallToRuntimeWithContext(Runtime::kWasmThrowJSTypeError, js_context,
6615                                     nullptr, 0);
6616       TerminateThrow(effect(), control());
6617       return;
6618     }
6619 
6620     const int args_count = wasm_count + 1;  // +1 for wasm_code.
6621 
6622     // Check whether the signature of the function allows for a fast
6623     // transformation (if any params exist that need transformation).
6624     // Create a fast transformation path, only if it does.
6625     bool include_fast_path = wasm_count && QualifiesForFastTransform(sig_);
6626 
6627     // Prepare Param() nodes. Param() nodes can only be created once,
6628     // so we need to use the same nodes along all possible transformation paths.
6629     base::SmallVector<Node*, 16> params(args_count);
6630     for (int i = 0; i < wasm_count; ++i) params[i + 1] = Param(i + 1);
6631 
6632     auto done = gasm_->MakeLabel(MachineRepresentation::kTagged);
6633     if (include_fast_path) {
6634       auto slow_path = gasm_->MakeDeferredLabel();
6635       // Check if the params received on runtime can be actually transformed
6636       // using the fast transformation. When a param that cannot be transformed
6637       // fast is encountered, skip checking the rest and fall back to the slow
6638       // path.
6639       for (int i = 0; i < wasm_count; ++i) {
6640         CanTransformFast(params[i + 1], sig_->GetParam(i), &slow_path);
6641       }
6642       // Convert JS parameters to wasm numbers using the fast transformation
6643       // and build the call.
6644       base::SmallVector<Node*, 16> args(args_count);
6645       for (int i = 0; i < wasm_count; ++i) {
6646         Node* wasm_param = FromJSFast(params[i + 1], sig_->GetParam(i));
6647         args[i + 1] = wasm_param;
6648       }
6649       Node* jsval =
6650           BuildCallAndReturn(is_import, js_context, function_data, args);
6651       gasm_->Goto(&done, jsval);
6652       gasm_->Bind(&slow_path);
6653     }
6654     // Convert JS parameters to wasm numbers using the default transformation
6655     // and build the call.
6656     base::SmallVector<Node*, 16> args(args_count);
6657     for (int i = 0; i < wasm_count; ++i) {
6658       Node* wasm_param = FromJS(params[i + 1], js_context, sig_->GetParam(i));
6659       args[i + 1] = wasm_param;
6660     }
6661     Node* jsval =
6662         BuildCallAndReturn(is_import, js_context, function_data, args);
6663     // If both the default and a fast transformation paths are present,
6664     // get the return value based on the path used.
6665     if (include_fast_path) {
6666       gasm_->Goto(&done, jsval);
6667       gasm_->Bind(&done);
6668       Return(done.PhiAt(0));
6669     } else {
6670       Return(jsval);
6671     }
6672     if (ContainsInt64(sig_)) LowerInt64(kCalledFromJS);
6673   }
6674 
BuildReceiverNode(Node * callable_node,Node * native_context,Node * undefined_node)6675   Node* BuildReceiverNode(Node* callable_node, Node* native_context,
6676                           Node* undefined_node) {
6677     // Check function strict bit.
6678     Node* shared_function_info = gasm_->Load(
6679         MachineType::TaggedPointer(), callable_node,
6680         wasm::ObjectAccess::SharedFunctionInfoOffsetInTaggedJSFunction());
6681     Node* flags =
6682         gasm_->Load(MachineType::Int32(), shared_function_info,
6683                     wasm::ObjectAccess::FlagsOffsetInSharedFunctionInfo());
6684     Node* strict_check =
6685         Binop(wasm::kExprI32And, flags,
6686               mcgraph()->Int32Constant(SharedFunctionInfo::IsNativeBit::kMask |
6687                                        SharedFunctionInfo::IsStrictBit::kMask));
6688 
6689     // Load global receiver if sloppy else use undefined.
6690     Diamond strict_d(graph(), mcgraph()->common(), strict_check,
6691                      BranchHint::kNone);
6692     Node* old_effect = effect();
6693     SetControl(strict_d.if_false);
6694     Node* global_proxy =
6695         LOAD_FIXED_ARRAY_SLOT_PTR(native_context, Context::GLOBAL_PROXY_INDEX);
6696     SetEffectControl(strict_d.EffectPhi(old_effect, global_proxy),
6697                      strict_d.merge);
6698     return strict_d.Phi(MachineRepresentation::kTagged, undefined_node,
6699                         global_proxy);
6700   }
6701 
BuildWasmToJSWrapper(WasmImportCallKind kind,int expected_arity)6702   bool BuildWasmToJSWrapper(WasmImportCallKind kind, int expected_arity) {
6703     int wasm_count = static_cast<int>(sig_->parameter_count());
6704 
6705     // Build the start and the parameter nodes.
6706     SetEffectControl(Start(wasm_count + 4));
6707 
6708     instance_node_.set(Param(wasm::kWasmInstanceParameterIndex));
6709 
6710     Node* native_context =
6711         LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer());
6712 
6713     if (kind == WasmImportCallKind::kRuntimeTypeError) {
6714       // =======================================================================
6715       // === Runtime TypeError =================================================
6716       // =======================================================================
6717       BuildCallToRuntimeWithContext(Runtime::kWasmThrowJSTypeError,
6718                                     native_context, nullptr, 0);
6719       TerminateThrow(effect(), control());
6720       return false;
6721     }
6722 
6723     // The callable is passed as the last parameter, after Wasm arguments.
6724     Node* callable_node = Param(wasm_count + 1);
6725 
6726     Node* undefined_node = BuildLoadUndefinedValueFromInstance();
6727 
6728     Node* call = nullptr;
6729 
6730     // Clear the ThreadInWasm flag.
6731     BuildModifyThreadInWasmFlag(false);
6732 
6733     switch (kind) {
6734       // =======================================================================
6735       // === JS Functions with matching arity ==================================
6736       // =======================================================================
6737       case WasmImportCallKind::kJSFunctionArityMatch: {
6738         base::SmallVector<Node*, 16> args(wasm_count + 7);
6739         int pos = 0;
6740         Node* function_context =
6741             gasm_->Load(MachineType::TaggedPointer(), callable_node,
6742                         wasm::ObjectAccess::ContextOffsetInTaggedJSFunction());
6743         args[pos++] = callable_node;  // target callable.
6744 
6745         // Determine receiver at runtime.
6746         args[pos++] =
6747             BuildReceiverNode(callable_node, native_context, undefined_node);
6748 
6749         auto call_descriptor = Linkage::GetJSCallDescriptor(
6750             graph()->zone(), false, wasm_count + 1, CallDescriptor::kNoFlags);
6751 
6752         // Convert wasm numbers to JS values.
6753         pos = AddArgumentNodes(VectorOf(args), pos, wasm_count, sig_);
6754 
6755         args[pos++] = undefined_node;                        // new target
6756         args[pos++] = mcgraph()->Int32Constant(wasm_count);  // argument count
6757         args[pos++] = function_context;
6758         args[pos++] = effect();
6759         args[pos++] = control();
6760 
6761         DCHECK_EQ(pos, args.size());
6762         call = graph()->NewNode(mcgraph()->common()->Call(call_descriptor), pos,
6763                                 args.begin());
6764         break;
6765       }
6766 #ifdef V8_NO_ARGUMENTS_ADAPTOR
6767       // =======================================================================
6768       // === JS Functions with mismatching arity ===============================
6769       // =======================================================================
6770       case WasmImportCallKind::kJSFunctionArityMismatch: {
6771         int pushed_count = std::max(expected_arity, wasm_count);
6772         base::SmallVector<Node*, 16> args(pushed_count + 7);
6773         int pos = 0;
6774 
6775         args[pos++] = callable_node;  // target callable.
6776         // Determine receiver at runtime.
6777         args[pos++] =
6778             BuildReceiverNode(callable_node, native_context, undefined_node);
6779 
6780         // Convert wasm numbers to JS values.
6781         pos = AddArgumentNodes(VectorOf(args), pos, wasm_count, sig_);
6782         for (int i = wasm_count; i < expected_arity; ++i) {
6783           args[pos++] = undefined_node;
6784         }
6785         args[pos++] = undefined_node;                        // new target
6786         args[pos++] = mcgraph()->Int32Constant(wasm_count);  // argument count
6787 
6788         Node* function_context =
6789             gasm_->Load(MachineType::TaggedPointer(), callable_node,
6790                         wasm::ObjectAccess::ContextOffsetInTaggedJSFunction());
6791         args[pos++] = function_context;
6792         args[pos++] = effect();
6793         args[pos++] = control();
6794         DCHECK_EQ(pos, args.size());
6795 
6796         auto call_descriptor = Linkage::GetJSCallDescriptor(
6797             graph()->zone(), false, pushed_count + 1, CallDescriptor::kNoFlags);
6798         call = graph()->NewNode(mcgraph()->common()->Call(call_descriptor), pos,
6799                                 args.begin());
6800         break;
6801       }
6802 #else
6803       // =======================================================================
6804       // === JS Functions with mismatching arity ===============================
6805       // =======================================================================
6806       case WasmImportCallKind::kJSFunctionArityMismatch: {
6807         base::SmallVector<Node*, 16> args(wasm_count + 9);
6808         int pos = 0;
6809         Node* function_context =
6810             gasm_->Load(MachineType::TaggedPointer(), callable_node,
6811                         wasm::ObjectAccess::ContextOffsetInTaggedJSFunction());
6812         args[pos++] =
6813             GetBuiltinPointerTarget(Builtins::kArgumentsAdaptorTrampoline);
6814         args[pos++] = callable_node;                         // target callable
6815         args[pos++] = undefined_node;                        // new target
6816         args[pos++] = mcgraph()->Int32Constant(wasm_count);  // argument count
6817 
6818         // Load shared function info, and then the formal parameter count.
6819         Node* shared_function_info = gasm_->Load(
6820             MachineType::TaggedPointer(), callable_node,
6821             wasm::ObjectAccess::SharedFunctionInfoOffsetInTaggedJSFunction());
6822         Node* formal_param_count = SetEffect(graph()->NewNode(
6823             mcgraph()->machine()->Load(MachineType::Uint16()),
6824             shared_function_info,
6825             mcgraph()->Int32Constant(
6826                 wasm::ObjectAccess::
6827                     FormalParameterCountOffsetInSharedFunctionInfo()),
6828             effect(), control()));
6829         args[pos++] = formal_param_count;
6830 
6831         // Determine receiver at runtime.
6832         args[pos++] =
6833             BuildReceiverNode(callable_node, native_context, undefined_node);
6834 
6835         auto call_descriptor = Linkage::GetStubCallDescriptor(
6836             mcgraph()->zone(), ArgumentsAdaptorDescriptor{}, 1 + wasm_count,
6837             CallDescriptor::kNoFlags, Operator::kNoProperties,
6838             StubCallMode::kCallBuiltinPointer);
6839 
6840         // Convert wasm numbers to JS values.
6841         pos = AddArgumentNodes(VectorOf(args), pos, wasm_count, sig_);
6842         args[pos++] = function_context;
6843         args[pos++] = effect();
6844         args[pos++] = control();
6845 
6846         DCHECK_EQ(pos, args.size());
6847         call = graph()->NewNode(mcgraph()->common()->Call(call_descriptor), pos,
6848                                 args.begin());
6849         break;
6850       }
6851 #endif
6852       // =======================================================================
6853       // === General case of unknown callable ==================================
6854       // =======================================================================
6855       case WasmImportCallKind::kUseCallBuiltin: {
6856         base::SmallVector<Node*, 16> args(wasm_count + 7);
6857         int pos = 0;
6858         args[pos++] = GetBuiltinPointerTarget(Builtins::kCall_ReceiverIsAny);
6859         args[pos++] = callable_node;
6860         args[pos++] = mcgraph()->Int32Constant(wasm_count);  // argument count
6861         args[pos++] = undefined_node;                        // receiver
6862 
6863         auto call_descriptor = Linkage::GetStubCallDescriptor(
6864             graph()->zone(), CallTrampolineDescriptor{}, wasm_count + 1,
6865             CallDescriptor::kNoFlags, Operator::kNoProperties,
6866             StubCallMode::kCallBuiltinPointer);
6867 
6868         // Convert wasm numbers to JS values.
6869         pos = AddArgumentNodes(VectorOf(args), pos, wasm_count, sig_);
6870 
6871         // The native_context is sufficient here, because all kind of callables
6872         // which depend on the context provide their own context. The context
6873         // here is only needed if the target is a constructor to throw a
6874         // TypeError, if the target is a native function, or if the target is a
6875         // callable JSObject, which can only be constructed by the runtime.
6876         args[pos++] = native_context;
6877         args[pos++] = effect();
6878         args[pos++] = control();
6879 
6880         DCHECK_EQ(pos, args.size());
6881         call = graph()->NewNode(mcgraph()->common()->Call(call_descriptor), pos,
6882                                 args.begin());
6883         break;
6884       }
6885       default:
6886         UNREACHABLE();
6887     }
6888     DCHECK_NOT_NULL(call);
6889 
6890     SetEffect(call);
6891     SetSourcePosition(call, 0);
6892 
6893     // Convert the return value(s) back.
6894     if (sig_->return_count() <= 1) {
6895       Node* val = sig_->return_count() == 0
6896                       ? mcgraph()->Int32Constant(0)
6897                       : FromJS(call, native_context, sig_->GetReturn());
6898       BuildModifyThreadInWasmFlag(true);
6899       Return(val);
6900     } else {
6901       Node* fixed_array =
6902           BuildMultiReturnFixedArrayFromIterable(sig_, call, native_context);
6903       base::SmallVector<Node*, 8> wasm_values(sig_->return_count());
6904       for (unsigned i = 0; i < sig_->return_count(); ++i) {
6905         wasm_values[i] = FromJS(LOAD_FIXED_ARRAY_SLOT_ANY(fixed_array, i),
6906                                 native_context, sig_->GetReturn(i));
6907       }
6908       BuildModifyThreadInWasmFlag(true);
6909       Return(VectorOf(wasm_values));
6910     }
6911 
6912     if (ContainsInt64(sig_)) LowerInt64(kCalledFromWasm);
6913     return true;
6914   }
6915 
BuildCapiCallWrapper(Address address)6916   void BuildCapiCallWrapper(Address address) {
6917     // Store arguments on our stack, then align the stack for calling to C.
6918     int param_bytes = 0;
6919     for (wasm::ValueType type : sig_->parameters()) {
6920       param_bytes += type.element_size_bytes();
6921     }
6922     int return_bytes = 0;
6923     for (wasm::ValueType type : sig_->returns()) {
6924       return_bytes += type.element_size_bytes();
6925     }
6926 
6927     int stack_slot_bytes = std::max(param_bytes, return_bytes);
6928     Node* values = stack_slot_bytes == 0
6929                        ? mcgraph()->IntPtrConstant(0)
6930                        : graph()->NewNode(mcgraph()->machine()->StackSlot(
6931                              stack_slot_bytes, kDoubleAlignment));
6932 
6933     int offset = 0;
6934     int param_count = static_cast<int>(sig_->parameter_count());
6935     for (int i = 0; i < param_count; ++i) {
6936       wasm::ValueType type = sig_->GetParam(i);
6937       // Start from the parameter with index 1 to drop the instance_node.
6938       // TODO(jkummerow): When a values is a reference type, we should pass it
6939       // in a GC-safe way, not just as a raw pointer.
6940       SetEffect(graph()->NewNode(GetSafeStoreOperator(offset, type), values,
6941                                  Int32Constant(offset), Param(i + 1), effect(),
6942                                  control()));
6943       offset += type.element_size_bytes();
6944     }
6945     // The function is passed as the last parameter, after Wasm arguments.
6946     Node* function_node = Param(param_count + 1);
6947     Node* shared = gasm_->Load(
6948         MachineType::AnyTagged(), function_node,
6949         wasm::ObjectAccess::SharedFunctionInfoOffsetInTaggedJSFunction());
6950     Node* sfi_data =
6951         gasm_->Load(MachineType::AnyTagged(), shared,
6952                     SharedFunctionInfo::kFunctionDataOffset - kHeapObjectTag);
6953     Node* host_data_foreign =
6954         gasm_->Load(MachineType::AnyTagged(), sfi_data,
6955                     WasmCapiFunctionData::kEmbedderDataOffset - kHeapObjectTag);
6956 
6957     BuildModifyThreadInWasmFlag(false);
6958     Node* isolate_root = BuildLoadIsolateRoot();
6959     Node* fp_value = graph()->NewNode(mcgraph()->machine()->LoadFramePointer());
6960     STORE_RAW(isolate_root, Isolate::c_entry_fp_offset(), fp_value,
6961               MachineType::PointerRepresentation(), kNoWriteBarrier);
6962 
6963     // TODO(jkummerow): Load the address from the {host_data}, and cache
6964     // wrappers per signature.
6965     const ExternalReference ref = ExternalReference::Create(address);
6966     Node* function =
6967         graph()->NewNode(mcgraph()->common()->ExternalConstant(ref));
6968 
6969     // Parameters: Address host_data_foreign, Address arguments.
6970     MachineType host_sig_types[] = {
6971         MachineType::Pointer(), MachineType::Pointer(), MachineType::Pointer()};
6972     MachineSignature host_sig(1, 2, host_sig_types);
6973     Node* return_value =
6974         BuildCCall(&host_sig, function, host_data_foreign, values);
6975 
6976     BuildModifyThreadInWasmFlag(true);
6977 
6978     Node* exception_branch = graph()->NewNode(
6979         mcgraph()->common()->Branch(BranchHint::kTrue),
6980         graph()->NewNode(mcgraph()->machine()->WordEqual(), return_value,
6981                          mcgraph()->IntPtrConstant(0)),
6982         control());
6983     SetControl(
6984         graph()->NewNode(mcgraph()->common()->IfFalse(), exception_branch));
6985     WasmThrowDescriptor interface_descriptor;
6986     auto call_descriptor = Linkage::GetStubCallDescriptor(
6987         mcgraph()->zone(), interface_descriptor,
6988         interface_descriptor.GetStackParameterCount(), CallDescriptor::kNoFlags,
6989         Operator::kNoProperties, StubCallMode::kCallWasmRuntimeStub);
6990     Node* call_target = mcgraph()->RelocatableIntPtrConstant(
6991         wasm::WasmCode::kWasmRethrow, RelocInfo::WASM_STUB_CALL);
6992     Node* throw_effect =
6993         graph()->NewNode(mcgraph()->common()->Call(call_descriptor),
6994                          call_target, return_value, effect(), control());
6995     TerminateThrow(throw_effect, control());
6996 
6997     SetControl(
6998         graph()->NewNode(mcgraph()->common()->IfTrue(), exception_branch));
6999     DCHECK_LT(sig_->return_count(), wasm::kV8MaxWasmFunctionMultiReturns);
7000     size_t return_count = sig_->return_count();
7001     if (return_count == 0) {
7002       Return(Int32Constant(0));
7003     } else {
7004       base::SmallVector<Node*, 8> returns(return_count);
7005       offset = 0;
7006       for (size_t i = 0; i < return_count; ++i) {
7007         wasm::ValueType type = sig_->GetReturn(i);
7008         Node* val = SetEffect(
7009             graph()->NewNode(GetSafeLoadOperator(offset, type), values,
7010                              Int32Constant(offset), effect(), control()));
7011         returns[i] = val;
7012         offset += type.element_size_bytes();
7013       }
7014       Return(VectorOf(returns));
7015     }
7016 
7017     if (ContainsInt64(sig_)) LowerInt64(kCalledFromWasm);
7018   }
7019 
BuildJSToJSWrapper(Isolate * isolate)7020   void BuildJSToJSWrapper(Isolate* isolate) {
7021     int wasm_count = static_cast<int>(sig_->parameter_count());
7022 
7023     // Build the start and the parameter nodes.
7024     int param_count = 1 /* closure */ + 1 /* receiver */ + wasm_count +
7025                       1 /* new.target */ + 1 /* #arg */ + 1 /* context */;
7026     SetEffectControl(Start(param_count));
7027     Node* closure = Param(Linkage::kJSCallClosureParamIndex);
7028     Node* context = Param(Linkage::GetJSCallContextParamIndex(wasm_count + 1));
7029 
7030     // Since JS-to-JS wrappers are specific to one Isolate, it is OK to embed
7031     // values (for undefined and root) directly into the instruction stream.
7032     isolate_root_node_ = mcgraph()->IntPtrConstant(isolate->isolate_root());
7033     undefined_value_node_ = graph()->NewNode(mcgraph()->common()->HeapConstant(
7034         isolate->factory()->undefined_value()));
7035 
7036     // Throw a TypeError if the signature is incompatible with JavaScript.
7037     if (!wasm::IsJSCompatibleSignature(sig_, module_, enabled_features_)) {
7038       BuildCallToRuntimeWithContext(Runtime::kWasmThrowJSTypeError, context,
7039                                     nullptr, 0);
7040       TerminateThrow(effect(), control());
7041       return;
7042     }
7043 
7044     // Load the original callable from the closure.
7045     Node* shared = LOAD_TAGGED_ANY(
7046         closure,
7047         wasm::ObjectAccess::ToTagged(JSFunction::kSharedFunctionInfoOffset));
7048     Node* func_data = LOAD_TAGGED_ANY(
7049         shared,
7050         wasm::ObjectAccess::ToTagged(SharedFunctionInfo::kFunctionDataOffset));
7051     Node* callable = LOAD_TAGGED_ANY(
7052         func_data,
7053         wasm::ObjectAccess::ToTagged(WasmJSFunctionData::kCallableOffset));
7054 
7055     // Call the underlying closure.
7056     base::SmallVector<Node*, 16> args(wasm_count + 7);
7057     int pos = 0;
7058     args[pos++] = GetBuiltinPointerTarget(Builtins::kCall_ReceiverIsAny);
7059     args[pos++] = callable;
7060     args[pos++] = mcgraph()->Int32Constant(wasm_count);   // argument count
7061     args[pos++] = BuildLoadUndefinedValueFromInstance();  // receiver
7062 
7063     auto call_descriptor = Linkage::GetStubCallDescriptor(
7064         graph()->zone(), CallTrampolineDescriptor{}, wasm_count + 1,
7065         CallDescriptor::kNoFlags, Operator::kNoProperties,
7066         StubCallMode::kCallBuiltinPointer);
7067 
7068     // Convert parameter JS values to wasm numbers and back to JS values.
7069     for (int i = 0; i < wasm_count; ++i) {
7070       Node* param = Param(i + 1);  // Start from index 1 to skip receiver.
7071       args[pos++] =
7072           ToJS(FromJS(param, context, sig_->GetParam(i)), sig_->GetParam(i));
7073     }
7074 
7075     args[pos++] = context;
7076     args[pos++] = effect();
7077     args[pos++] = control();
7078 
7079     DCHECK_EQ(pos, args.size());
7080     Node* call = SetEffect(graph()->NewNode(
7081         mcgraph()->common()->Call(call_descriptor), pos, args.begin()));
7082 
7083     // Convert return JS values to wasm numbers and back to JS values.
7084     Node* jsval;
7085     if (sig_->return_count() == 0) {
7086       jsval = BuildLoadUndefinedValueFromInstance();
7087     } else if (sig_->return_count() == 1) {
7088       jsval = ToJS(FromJS(call, context, sig_->GetReturn()), sig_->GetReturn());
7089     } else {
7090       Node* fixed_array =
7091           BuildMultiReturnFixedArrayFromIterable(sig_, call, context);
7092       int32_t return_count = static_cast<int32_t>(sig_->return_count());
7093       Node* size =
7094           graph()->NewNode(mcgraph()->common()->NumberConstant(return_count));
7095       jsval = BuildCallAllocateJSArray(size, context);
7096       Node* result_fixed_array = BuildLoadArrayBackingStorage(jsval);
7097       for (unsigned i = 0; i < sig_->return_count(); ++i) {
7098         const auto& type = sig_->GetReturn(i);
7099         Node* elem = LOAD_FIXED_ARRAY_SLOT_ANY(fixed_array, i);
7100         Node* cast = ToJS(FromJS(elem, context, type), type);
7101         STORE_FIXED_ARRAY_SLOT_ANY(result_fixed_array, i, cast);
7102       }
7103     }
7104     Return(jsval);
7105   }
7106 
BuildCWasmEntry()7107   void BuildCWasmEntry() {
7108     // +1 offset for first parameter index being -1.
7109     SetEffectControl(Start(CWasmEntryParameters::kNumParameters + 1));
7110 
7111     Node* code_entry = Param(CWasmEntryParameters::kCodeEntry);
7112     Node* object_ref = Param(CWasmEntryParameters::kObjectRef);
7113     Node* arg_buffer = Param(CWasmEntryParameters::kArgumentsBuffer);
7114     Node* c_entry_fp = Param(CWasmEntryParameters::kCEntryFp);
7115 
7116     Node* fp_value = graph()->NewNode(mcgraph()->machine()->LoadFramePointer());
7117     STORE_RAW(fp_value, TypedFrameConstants::kFirstPushedFrameValueOffset,
7118               c_entry_fp, MachineType::PointerRepresentation(),
7119               kNoWriteBarrier);
7120 
7121     int wasm_arg_count = static_cast<int>(sig_->parameter_count());
7122     base::SmallVector<Node*, 16> args(wasm_arg_count + 4);
7123 
7124     int pos = 0;
7125     args[pos++] = code_entry;
7126     args[pos++] = object_ref;
7127 
7128     int offset = 0;
7129     for (wasm::ValueType type : sig_->parameters()) {
7130       Node* arg_load = SetEffect(
7131           graph()->NewNode(GetSafeLoadOperator(offset, type), arg_buffer,
7132                            Int32Constant(offset), effect(), control()));
7133       args[pos++] = arg_load;
7134       offset += type.element_size_bytes();
7135     }
7136 
7137     args[pos++] = effect();
7138     args[pos++] = control();
7139 
7140     // Call the wasm code.
7141     auto call_descriptor = GetWasmCallDescriptor(mcgraph()->zone(), sig_);
7142 
7143     DCHECK_EQ(pos, args.size());
7144     Node* call = SetEffect(graph()->NewNode(
7145         mcgraph()->common()->Call(call_descriptor), pos, args.begin()));
7146 
7147     Node* if_success = graph()->NewNode(mcgraph()->common()->IfSuccess(), call);
7148     Node* if_exception =
7149         graph()->NewNode(mcgraph()->common()->IfException(), call, call);
7150 
7151     // Handle exception: return it.
7152     SetControl(if_exception);
7153     Return(if_exception);
7154 
7155     // Handle success: store the return value(s).
7156     SetControl(if_success);
7157     pos = 0;
7158     offset = 0;
7159     for (wasm::ValueType type : sig_->returns()) {
7160       Node* value = sig_->return_count() == 1
7161                         ? call
7162                         : graph()->NewNode(mcgraph()->common()->Projection(pos),
7163                                            call, control());
7164       SetEffect(graph()->NewNode(GetSafeStoreOperator(offset, type), arg_buffer,
7165                                  Int32Constant(offset), value, effect(),
7166                                  control()));
7167       offset += type.element_size_bytes();
7168       pos++;
7169     }
7170 
7171     Return(mcgraph()->IntPtrConstant(0));
7172 
7173     if (mcgraph()->machine()->Is32() && ContainsInt64(sig_)) {
7174       // No special lowering should be requested in the C entry.
7175       DCHECK_NULL(lowering_special_case_);
7176 
7177       MachineRepresentation sig_reps[] = {
7178           MachineType::PointerRepresentation(),  // return value
7179           MachineType::PointerRepresentation(),  // target
7180           MachineRepresentation::kTagged,        // object_ref
7181           MachineType::PointerRepresentation(),  // argv
7182           MachineType::PointerRepresentation()   // c_entry_fp
7183       };
7184       Signature<MachineRepresentation> c_entry_sig(1, 4, sig_reps);
7185       Int64Lowering r(mcgraph()->graph(), mcgraph()->machine(),
7186                       mcgraph()->common(), mcgraph()->zone(), &c_entry_sig);
7187       r.LowerGraph();
7188     }
7189   }
7190 
7191  private:
7192   const wasm::WasmModule* module_;
7193   StubCallMode stub_mode_;
7194   SetOncePointer<Node> undefined_value_node_;
7195   SetOncePointer<const Operator> int32_to_heapnumber_operator_;
7196   SetOncePointer<const Operator> tagged_non_smi_to_int32_operator_;
7197   SetOncePointer<const Operator> float32_to_number_operator_;
7198   SetOncePointer<const Operator> float64_to_number_operator_;
7199   SetOncePointer<const Operator> tagged_to_float64_operator_;
7200   wasm::WasmFeatures enabled_features_;
7201   CallDescriptor* bigint_to_i64_descriptor_ = nullptr;
7202   CallDescriptor* i64_to_bigint_descriptor_ = nullptr;
7203 };
7204 
7205 }  // namespace
7206 
NewJSToWasmCompilationJob(Isolate * isolate,wasm::WasmEngine * wasm_engine,const wasm::FunctionSig * sig,const wasm::WasmModule * module,bool is_import,const wasm::WasmFeatures & enabled_features)7207 std::unique_ptr<OptimizedCompilationJob> NewJSToWasmCompilationJob(
7208     Isolate* isolate, wasm::WasmEngine* wasm_engine,
7209     const wasm::FunctionSig* sig, const wasm::WasmModule* module,
7210     bool is_import, const wasm::WasmFeatures& enabled_features) {
7211   //----------------------------------------------------------------------------
7212   // Create the Graph.
7213   //----------------------------------------------------------------------------
7214   std::unique_ptr<Zone> zone = std::make_unique<Zone>(
7215       wasm_engine->allocator(), ZONE_NAME, kCompressGraphZone);
7216   Graph* graph = zone->New<Graph>(zone.get());
7217   CommonOperatorBuilder* common = zone->New<CommonOperatorBuilder>(zone.get());
7218   MachineOperatorBuilder* machine = zone->New<MachineOperatorBuilder>(
7219       zone.get(), MachineType::PointerRepresentation(),
7220       InstructionSelector::SupportedMachineOperatorFlags(),
7221       InstructionSelector::AlignmentRequirements());
7222   MachineGraph* mcgraph = zone->New<MachineGraph>(graph, common, machine);
7223 
7224   WasmWrapperGraphBuilder builder(zone.get(), mcgraph, sig, module, nullptr,
7225                                   StubCallMode::kCallBuiltinPointer,
7226                                   enabled_features);
7227   builder.BuildJSToWasmWrapper(is_import);
7228 
7229   //----------------------------------------------------------------------------
7230   // Create the compilation job.
7231   //----------------------------------------------------------------------------
7232   constexpr size_t kMaxNameLen = 128;
7233   constexpr size_t kNamePrefixLen = 11;
7234   auto name_buffer = std::unique_ptr<char[]>(new char[kMaxNameLen]);
7235   memcpy(name_buffer.get(), "js-to-wasm:", kNamePrefixLen);
7236   PrintSignature(VectorOf(name_buffer.get(), kMaxNameLen) + kNamePrefixLen,
7237                  sig);
7238 
7239   int params = static_cast<int>(sig->parameter_count());
7240   CallDescriptor* incoming = Linkage::GetJSCallDescriptor(
7241       zone.get(), false, params + 1, CallDescriptor::kNoFlags);
7242 
7243   return Pipeline::NewWasmHeapStubCompilationJob(
7244       isolate, wasm_engine, incoming, std::move(zone), graph,
7245       CodeKind::JS_TO_WASM_FUNCTION, std::move(name_buffer),
7246       WasmAssemblerOptions());
7247 }
7248 
ResolveWasmImportCall(Handle<JSReceiver> callable,const wasm::FunctionSig * expected_sig,const wasm::WasmModule * module,const wasm::WasmFeatures & enabled_features)7249 std::pair<WasmImportCallKind, Handle<JSReceiver>> ResolveWasmImportCall(
7250     Handle<JSReceiver> callable, const wasm::FunctionSig* expected_sig,
7251     const wasm::WasmModule* module,
7252     const wasm::WasmFeatures& enabled_features) {
7253   if (WasmExportedFunction::IsWasmExportedFunction(*callable)) {
7254     auto imported_function = Handle<WasmExportedFunction>::cast(callable);
7255     if (!imported_function->MatchesSignature(module, expected_sig)) {
7256       return std::make_pair(WasmImportCallKind::kLinkError, callable);
7257     }
7258     uint32_t func_index =
7259         static_cast<uint32_t>(imported_function->function_index());
7260     if (func_index >=
7261         imported_function->instance().module()->num_imported_functions) {
7262       return std::make_pair(WasmImportCallKind::kWasmToWasm, callable);
7263     }
7264     Isolate* isolate = callable->GetIsolate();
7265     // Resolve the shortcut to the underlying callable and continue.
7266     Handle<WasmInstanceObject> instance(imported_function->instance(), isolate);
7267     ImportedFunctionEntry entry(instance, func_index);
7268     callable = handle(entry.callable(), isolate);
7269   }
7270   if (WasmJSFunction::IsWasmJSFunction(*callable)) {
7271     auto js_function = Handle<WasmJSFunction>::cast(callable);
7272     if (!js_function->MatchesSignature(expected_sig)) {
7273       return std::make_pair(WasmImportCallKind::kLinkError, callable);
7274     }
7275     Isolate* isolate = callable->GetIsolate();
7276     // Resolve the short-cut to the underlying callable and continue.
7277     callable = handle(js_function->GetCallable(), isolate);
7278   }
7279   if (WasmCapiFunction::IsWasmCapiFunction(*callable)) {
7280     auto capi_function = Handle<WasmCapiFunction>::cast(callable);
7281     if (!capi_function->MatchesSignature(expected_sig)) {
7282       return std::make_pair(WasmImportCallKind::kLinkError, callable);
7283     }
7284     return std::make_pair(WasmImportCallKind::kWasmToCapi, callable);
7285   }
7286   // Assuming we are calling to JS, check whether this would be a runtime error.
7287   if (!wasm::IsJSCompatibleSignature(expected_sig, module, enabled_features)) {
7288     return std::make_pair(WasmImportCallKind::kRuntimeTypeError, callable);
7289   }
7290   // For JavaScript calls, determine whether the target has an arity match.
7291   if (callable->IsJSFunction()) {
7292     Handle<JSFunction> function = Handle<JSFunction>::cast(callable);
7293     Handle<SharedFunctionInfo> shared(function->shared(),
7294                                       function->GetIsolate());
7295 
7296 // Check for math intrinsics.
7297 #define COMPARE_SIG_FOR_BUILTIN(name)                                     \
7298   {                                                                       \
7299     const wasm::FunctionSig* sig =                                        \
7300         wasm::WasmOpcodes::Signature(wasm::kExpr##name);                  \
7301     if (!sig) sig = wasm::WasmOpcodes::AsmjsSignature(wasm::kExpr##name); \
7302     DCHECK_NOT_NULL(sig);                                                 \
7303     if (*expected_sig == *sig) {                                          \
7304       return std::make_pair(WasmImportCallKind::k##name, callable);       \
7305     }                                                                     \
7306   }
7307 #define COMPARE_SIG_FOR_BUILTIN_F64(name) \
7308   case Builtins::kMath##name:             \
7309     COMPARE_SIG_FOR_BUILTIN(F64##name);   \
7310     break;
7311 #define COMPARE_SIG_FOR_BUILTIN_F32_F64(name) \
7312   case Builtins::kMath##name:                 \
7313     COMPARE_SIG_FOR_BUILTIN(F64##name);       \
7314     COMPARE_SIG_FOR_BUILTIN(F32##name);       \
7315     break;
7316 
7317     if (FLAG_wasm_math_intrinsics && shared->HasBuiltinId()) {
7318       switch (shared->builtin_id()) {
7319         COMPARE_SIG_FOR_BUILTIN_F64(Acos);
7320         COMPARE_SIG_FOR_BUILTIN_F64(Asin);
7321         COMPARE_SIG_FOR_BUILTIN_F64(Atan);
7322         COMPARE_SIG_FOR_BUILTIN_F64(Cos);
7323         COMPARE_SIG_FOR_BUILTIN_F64(Sin);
7324         COMPARE_SIG_FOR_BUILTIN_F64(Tan);
7325         COMPARE_SIG_FOR_BUILTIN_F64(Exp);
7326         COMPARE_SIG_FOR_BUILTIN_F64(Log);
7327         COMPARE_SIG_FOR_BUILTIN_F64(Atan2);
7328         COMPARE_SIG_FOR_BUILTIN_F64(Pow);
7329         COMPARE_SIG_FOR_BUILTIN_F32_F64(Min);
7330         COMPARE_SIG_FOR_BUILTIN_F32_F64(Max);
7331         COMPARE_SIG_FOR_BUILTIN_F32_F64(Abs);
7332         COMPARE_SIG_FOR_BUILTIN_F32_F64(Ceil);
7333         COMPARE_SIG_FOR_BUILTIN_F32_F64(Floor);
7334         COMPARE_SIG_FOR_BUILTIN_F32_F64(Sqrt);
7335         case Builtins::kMathFround:
7336           COMPARE_SIG_FOR_BUILTIN(F32ConvertF64);
7337           break;
7338         default:
7339           break;
7340       }
7341     }
7342 
7343 #undef COMPARE_SIG_FOR_BUILTIN
7344 #undef COMPARE_SIG_FOR_BUILTIN_F64
7345 #undef COMPARE_SIG_FOR_BUILTIN_F32_F64
7346 
7347     if (IsClassConstructor(shared->kind())) {
7348       // Class constructor will throw anyway.
7349       return std::make_pair(WasmImportCallKind::kUseCallBuiltin, callable);
7350     }
7351 
7352     if (shared->internal_formal_parameter_count() ==
7353         expected_sig->parameter_count()) {
7354       return std::make_pair(WasmImportCallKind::kJSFunctionArityMatch,
7355                             callable);
7356     }
7357 
7358     // If function isn't compiled, compile it now.
7359     IsCompiledScope is_compiled_scope(
7360         shared->is_compiled_scope(callable->GetIsolate()));
7361     if (!is_compiled_scope.is_compiled()) {
7362       Compiler::Compile(function, Compiler::CLEAR_EXCEPTION,
7363                         &is_compiled_scope);
7364     }
7365 
7366     return std::make_pair(WasmImportCallKind::kJSFunctionArityMismatch,
7367                           callable);
7368   }
7369   // Unknown case. Use the call builtin.
7370   return std::make_pair(WasmImportCallKind::kUseCallBuiltin, callable);
7371 }
7372 
7373 namespace {
7374 
GetMathIntrinsicOpcode(WasmImportCallKind kind,const char ** name_ptr)7375 wasm::WasmOpcode GetMathIntrinsicOpcode(WasmImportCallKind kind,
7376                                         const char** name_ptr) {
7377 #define CASE(name)                          \
7378   case WasmImportCallKind::k##name:         \
7379     *name_ptr = "WasmMathIntrinsic:" #name; \
7380     return wasm::kExpr##name
7381   switch (kind) {
7382     CASE(F64Acos);
7383     CASE(F64Asin);
7384     CASE(F64Atan);
7385     CASE(F64Cos);
7386     CASE(F64Sin);
7387     CASE(F64Tan);
7388     CASE(F64Exp);
7389     CASE(F64Log);
7390     CASE(F64Atan2);
7391     CASE(F64Pow);
7392     CASE(F64Ceil);
7393     CASE(F64Floor);
7394     CASE(F64Sqrt);
7395     CASE(F64Min);
7396     CASE(F64Max);
7397     CASE(F64Abs);
7398     CASE(F32Min);
7399     CASE(F32Max);
7400     CASE(F32Abs);
7401     CASE(F32Ceil);
7402     CASE(F32Floor);
7403     CASE(F32Sqrt);
7404     CASE(F32ConvertF64);
7405     default:
7406       UNREACHABLE();
7407       return wasm::kExprUnreachable;
7408   }
7409 #undef CASE
7410 }
7411 
CompileWasmMathIntrinsic(wasm::WasmEngine * wasm_engine,WasmImportCallKind kind,const wasm::FunctionSig * sig)7412 wasm::WasmCompilationResult CompileWasmMathIntrinsic(
7413     wasm::WasmEngine* wasm_engine, WasmImportCallKind kind,
7414     const wasm::FunctionSig* sig) {
7415   DCHECK_EQ(1, sig->return_count());
7416 
7417   TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.wasm.detailed"),
7418                "wasm.CompileWasmMathIntrinsic");
7419 
7420   Zone zone(wasm_engine->allocator(), ZONE_NAME, kCompressGraphZone);
7421 
7422   // Compile a Wasm function with a single bytecode and let TurboFan
7423   // generate either inlined machine code or a call to a helper.
7424   SourcePositionTable* source_positions = nullptr;
7425   MachineGraph* mcgraph = zone.New<MachineGraph>(
7426       zone.New<Graph>(&zone), zone.New<CommonOperatorBuilder>(&zone),
7427       zone.New<MachineOperatorBuilder>(
7428           &zone, MachineType::PointerRepresentation(),
7429           InstructionSelector::SupportedMachineOperatorFlags(),
7430           InstructionSelector::AlignmentRequirements()));
7431 
7432   wasm::CompilationEnv env(
7433       nullptr, wasm::UseTrapHandler::kNoTrapHandler,
7434       wasm::RuntimeExceptionSupport::kNoRuntimeExceptionSupport,
7435       wasm::WasmFeatures::All(), wasm::LowerSimd::kNoLowerSimd);
7436 
7437   WasmGraphBuilder builder(&env, mcgraph->zone(), mcgraph, sig,
7438                            source_positions);
7439 
7440   // Set up the graph start.
7441   Node* start = builder.Start(static_cast<int>(sig->parameter_count() + 1 + 1));
7442   builder.SetEffectControl(start);
7443   builder.set_instance_node(builder.Param(wasm::kWasmInstanceParameterIndex));
7444 
7445   // Generate either a unop or a binop.
7446   Node* node = nullptr;
7447   const char* debug_name = "WasmMathIntrinsic";
7448   auto opcode = GetMathIntrinsicOpcode(kind, &debug_name);
7449   switch (sig->parameter_count()) {
7450     case 1:
7451       node = builder.Unop(opcode, builder.Param(1));
7452       break;
7453     case 2:
7454       node = builder.Binop(opcode, builder.Param(1), builder.Param(2));
7455       break;
7456     default:
7457       UNREACHABLE();
7458   }
7459 
7460   builder.Return(node);
7461 
7462   // Run the compiler pipeline to generate machine code.
7463   auto call_descriptor = GetWasmCallDescriptor(&zone, sig);
7464   if (mcgraph->machine()->Is32()) {
7465     call_descriptor = GetI32WasmCallDescriptor(&zone, call_descriptor);
7466   }
7467 
7468   wasm::WasmCompilationResult result = Pipeline::GenerateCodeForWasmNativeStub(
7469       wasm_engine, call_descriptor, mcgraph, CodeKind::WASM_FUNCTION,
7470       wasm::WasmCode::kFunction, debug_name, WasmStubAssemblerOptions(),
7471       source_positions);
7472   return result;
7473 }
7474 
7475 }  // namespace
7476 
CompileWasmImportCallWrapper(wasm::WasmEngine * wasm_engine,wasm::CompilationEnv * env,WasmImportCallKind kind,const wasm::FunctionSig * sig,bool source_positions,int expected_arity)7477 wasm::WasmCompilationResult CompileWasmImportCallWrapper(
7478     wasm::WasmEngine* wasm_engine, wasm::CompilationEnv* env,
7479     WasmImportCallKind kind, const wasm::FunctionSig* sig,
7480     bool source_positions, int expected_arity) {
7481   DCHECK_NE(WasmImportCallKind::kLinkError, kind);
7482   DCHECK_NE(WasmImportCallKind::kWasmToWasm, kind);
7483 
7484   // Check for math intrinsics first.
7485   if (FLAG_wasm_math_intrinsics &&
7486       kind >= WasmImportCallKind::kFirstMathIntrinsic &&
7487       kind <= WasmImportCallKind::kLastMathIntrinsic) {
7488     return CompileWasmMathIntrinsic(wasm_engine, kind, sig);
7489   }
7490 
7491   TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.wasm.detailed"),
7492                "wasm.CompileWasmImportCallWrapper");
7493   //----------------------------------------------------------------------------
7494   // Create the Graph
7495   //----------------------------------------------------------------------------
7496   Zone zone(wasm_engine->allocator(), ZONE_NAME, kCompressGraphZone);
7497   Graph* graph = zone.New<Graph>(&zone);
7498   CommonOperatorBuilder* common = zone.New<CommonOperatorBuilder>(&zone);
7499   MachineOperatorBuilder* machine = zone.New<MachineOperatorBuilder>(
7500       &zone, MachineType::PointerRepresentation(),
7501       InstructionSelector::SupportedMachineOperatorFlags(),
7502       InstructionSelector::AlignmentRequirements());
7503   MachineGraph* mcgraph = zone.New<MachineGraph>(graph, common, machine);
7504 
7505   SourcePositionTable* source_position_table =
7506       source_positions ? zone.New<SourcePositionTable>(graph) : nullptr;
7507 
7508   WasmWrapperGraphBuilder builder(
7509       &zone, mcgraph, sig, env->module, source_position_table,
7510       StubCallMode::kCallWasmRuntimeStub, env->enabled_features);
7511   builder.BuildWasmToJSWrapper(kind, expected_arity);
7512 
7513   // Build a name in the form "wasm-to-js-<kind>-<signature>".
7514   constexpr size_t kMaxNameLen = 128;
7515   char func_name[kMaxNameLen];
7516   int name_prefix_len = SNPrintF(VectorOf(func_name, kMaxNameLen),
7517                                  "wasm-to-js-%d-", static_cast<int>(kind));
7518   PrintSignature(VectorOf(func_name, kMaxNameLen) + name_prefix_len, sig, '-');
7519 
7520   // Schedule and compile to machine code.
7521   CallDescriptor* incoming =
7522       GetWasmCallDescriptor(&zone, sig, WasmGraphBuilder::kNoRetpoline,
7523                             WasmCallKind::kWasmImportWrapper);
7524   if (machine->Is32()) {
7525     incoming = GetI32WasmCallDescriptor(&zone, incoming);
7526   }
7527   wasm::WasmCompilationResult result = Pipeline::GenerateCodeForWasmNativeStub(
7528       wasm_engine, incoming, mcgraph, CodeKind::WASM_TO_JS_FUNCTION,
7529       wasm::WasmCode::kWasmToJsWrapper, func_name, WasmStubAssemblerOptions(),
7530       source_position_table);
7531   result.kind = wasm::WasmCompilationResult::kWasmToJsWrapper;
7532   return result;
7533 }
7534 
CompileWasmCapiCallWrapper(wasm::WasmEngine * wasm_engine,wasm::NativeModule * native_module,const wasm::FunctionSig * sig,Address address)7535 wasm::WasmCode* CompileWasmCapiCallWrapper(wasm::WasmEngine* wasm_engine,
7536                                            wasm::NativeModule* native_module,
7537                                            const wasm::FunctionSig* sig,
7538                                            Address address) {
7539   TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.wasm.detailed"),
7540                "wasm.CompileWasmCapiFunction");
7541 
7542   Zone zone(wasm_engine->allocator(), ZONE_NAME, kCompressGraphZone);
7543 
7544   // TODO(jkummerow): Extract common code into helper method.
7545   SourcePositionTable* source_positions = nullptr;
7546   MachineGraph* mcgraph = zone.New<MachineGraph>(
7547       zone.New<Graph>(&zone), zone.New<CommonOperatorBuilder>(&zone),
7548       zone.New<MachineOperatorBuilder>(
7549           &zone, MachineType::PointerRepresentation(),
7550           InstructionSelector::SupportedMachineOperatorFlags(),
7551           InstructionSelector::AlignmentRequirements()));
7552 
7553   WasmWrapperGraphBuilder builder(
7554       &zone, mcgraph, sig, native_module->module(), source_positions,
7555       StubCallMode::kCallWasmRuntimeStub, native_module->enabled_features());
7556 
7557   // Set up the graph start.
7558   int param_count = static_cast<int>(sig->parameter_count()) +
7559                     1 /* offset for first parameter index being -1 */ +
7560                     1 /* Wasm instance */ + 1 /* kExtraCallableParam */;
7561   Node* start = builder.Start(param_count);
7562   builder.SetEffectControl(start);
7563   builder.set_instance_node(builder.Param(wasm::kWasmInstanceParameterIndex));
7564   builder.BuildCapiCallWrapper(address);
7565 
7566   // Run the compiler pipeline to generate machine code.
7567   CallDescriptor* call_descriptor =
7568       GetWasmCallDescriptor(&zone, sig, WasmGraphBuilder::kNoRetpoline,
7569                             WasmCallKind::kWasmCapiFunction);
7570   if (mcgraph->machine()->Is32()) {
7571     call_descriptor = GetI32WasmCallDescriptor(&zone, call_descriptor);
7572   }
7573 
7574   const char* debug_name = "WasmCapiCall";
7575   wasm::WasmCompilationResult result = Pipeline::GenerateCodeForWasmNativeStub(
7576       wasm_engine, call_descriptor, mcgraph, CodeKind::WASM_TO_CAPI_FUNCTION,
7577       wasm::WasmCode::kWasmToCapiWrapper, debug_name,
7578       WasmStubAssemblerOptions(), source_positions);
7579   std::unique_ptr<wasm::WasmCode> wasm_code = native_module->AddCode(
7580       wasm::kAnonymousFuncIndex, result.code_desc, result.frame_slot_count,
7581       result.tagged_parameter_slots,
7582       result.protected_instructions_data.as_vector(),
7583       result.source_positions.as_vector(), wasm::WasmCode::kWasmToCapiWrapper,
7584       wasm::ExecutionTier::kNone, wasm::kNoDebugging);
7585   return native_module->PublishCode(std::move(wasm_code));
7586 }
7587 
CompileWasmToJSWrapper(Isolate * isolate,const wasm::FunctionSig * sig,WasmImportCallKind kind,int expected_arity)7588 MaybeHandle<Code> CompileWasmToJSWrapper(Isolate* isolate,
7589                                          const wasm::FunctionSig* sig,
7590                                          WasmImportCallKind kind,
7591                                          int expected_arity) {
7592   std::unique_ptr<Zone> zone = std::make_unique<Zone>(
7593       isolate->allocator(), ZONE_NAME, kCompressGraphZone);
7594 
7595   // Create the Graph
7596   Graph* graph = zone->New<Graph>(zone.get());
7597   CommonOperatorBuilder* common = zone->New<CommonOperatorBuilder>(zone.get());
7598   MachineOperatorBuilder* machine = zone->New<MachineOperatorBuilder>(
7599       zone.get(), MachineType::PointerRepresentation(),
7600       InstructionSelector::SupportedMachineOperatorFlags(),
7601       InstructionSelector::AlignmentRequirements());
7602   MachineGraph* mcgraph = zone->New<MachineGraph>(graph, common, machine);
7603 
7604   WasmWrapperGraphBuilder builder(zone.get(), mcgraph, sig, nullptr, nullptr,
7605                                   StubCallMode::kCallWasmRuntimeStub,
7606                                   wasm::WasmFeatures::FromIsolate(isolate));
7607   builder.BuildWasmToJSWrapper(kind, expected_arity);
7608 
7609   // Build a name in the form "wasm-to-js-<kind>-<signature>".
7610   constexpr size_t kMaxNameLen = 128;
7611   constexpr size_t kNamePrefixLen = 11;
7612   auto name_buffer = std::unique_ptr<char[]>(new char[kMaxNameLen]);
7613   memcpy(name_buffer.get(), "wasm-to-js:", kNamePrefixLen);
7614   PrintSignature(VectorOf(name_buffer.get(), kMaxNameLen) + kNamePrefixLen,
7615                  sig);
7616 
7617   // Generate the call descriptor.
7618   CallDescriptor* incoming =
7619       GetWasmCallDescriptor(zone.get(), sig, WasmGraphBuilder::kNoRetpoline,
7620                             WasmCallKind::kWasmImportWrapper);
7621 
7622   // Run the compilation job synchronously.
7623   std::unique_ptr<OptimizedCompilationJob> job(
7624       Pipeline::NewWasmHeapStubCompilationJob(
7625           isolate, isolate->wasm_engine(), incoming, std::move(zone), graph,
7626           CodeKind::WASM_TO_JS_FUNCTION, std::move(name_buffer),
7627           AssemblerOptions::Default(isolate)));
7628 
7629   // Compile the wrapper
7630   if (job->ExecuteJob(isolate->counters()->runtime_call_stats()) ==
7631           CompilationJob::FAILED ||
7632       job->FinalizeJob(isolate) == CompilationJob::FAILED) {
7633     return Handle<Code>();
7634   }
7635   Handle<Code> code = job->compilation_info()->code();
7636   return code;
7637 }
7638 
CompileJSToJSWrapper(Isolate * isolate,const wasm::FunctionSig * sig,const wasm::WasmModule * module)7639 MaybeHandle<Code> CompileJSToJSWrapper(Isolate* isolate,
7640                                        const wasm::FunctionSig* sig,
7641                                        const wasm::WasmModule* module) {
7642   std::unique_ptr<Zone> zone = std::make_unique<Zone>(
7643       isolate->allocator(), ZONE_NAME, kCompressGraphZone);
7644   Graph* graph = zone->New<Graph>(zone.get());
7645   CommonOperatorBuilder* common = zone->New<CommonOperatorBuilder>(zone.get());
7646   MachineOperatorBuilder* machine = zone->New<MachineOperatorBuilder>(
7647       zone.get(), MachineType::PointerRepresentation(),
7648       InstructionSelector::SupportedMachineOperatorFlags(),
7649       InstructionSelector::AlignmentRequirements());
7650   MachineGraph* mcgraph = zone->New<MachineGraph>(graph, common, machine);
7651 
7652   WasmWrapperGraphBuilder builder(zone.get(), mcgraph, sig, module, nullptr,
7653                                   StubCallMode::kCallBuiltinPointer,
7654                                   wasm::WasmFeatures::FromIsolate(isolate));
7655   builder.BuildJSToJSWrapper(isolate);
7656 
7657   int wasm_count = static_cast<int>(sig->parameter_count());
7658   CallDescriptor* incoming = Linkage::GetJSCallDescriptor(
7659       zone.get(), false, wasm_count + 1, CallDescriptor::kNoFlags);
7660 
7661   // Build a name in the form "js-to-js:<params>:<returns>".
7662   constexpr size_t kMaxNameLen = 128;
7663   constexpr size_t kNamePrefixLen = 9;
7664   auto name_buffer = std::unique_ptr<char[]>(new char[kMaxNameLen]);
7665   memcpy(name_buffer.get(), "js-to-js:", kNamePrefixLen);
7666   PrintSignature(VectorOf(name_buffer.get(), kMaxNameLen) + kNamePrefixLen,
7667                  sig);
7668 
7669   // Run the compilation job synchronously.
7670   std::unique_ptr<OptimizedCompilationJob> job(
7671       Pipeline::NewWasmHeapStubCompilationJob(
7672           isolate, isolate->wasm_engine(), incoming, std::move(zone), graph,
7673           CodeKind::JS_TO_JS_FUNCTION, std::move(name_buffer),
7674           AssemblerOptions::Default(isolate)));
7675 
7676   if (job->ExecuteJob(isolate->counters()->runtime_call_stats()) ==
7677           CompilationJob::FAILED ||
7678       job->FinalizeJob(isolate) == CompilationJob::FAILED) {
7679     return {};
7680   }
7681   Handle<Code> code = job->compilation_info()->code();
7682 
7683   return code;
7684 }
7685 
CompileCWasmEntry(Isolate * isolate,const wasm::FunctionSig * sig,const wasm::WasmModule * module)7686 Handle<Code> CompileCWasmEntry(Isolate* isolate, const wasm::FunctionSig* sig,
7687                                const wasm::WasmModule* module) {
7688   std::unique_ptr<Zone> zone = std::make_unique<Zone>(
7689       isolate->allocator(), ZONE_NAME, kCompressGraphZone);
7690   Graph* graph = zone->New<Graph>(zone.get());
7691   CommonOperatorBuilder* common = zone->New<CommonOperatorBuilder>(zone.get());
7692   MachineOperatorBuilder* machine = zone->New<MachineOperatorBuilder>(
7693       zone.get(), MachineType::PointerRepresentation(),
7694       InstructionSelector::SupportedMachineOperatorFlags(),
7695       InstructionSelector::AlignmentRequirements());
7696   MachineGraph* mcgraph = zone->New<MachineGraph>(graph, common, machine);
7697 
7698   WasmWrapperGraphBuilder builder(zone.get(), mcgraph, sig, module, nullptr,
7699                                   StubCallMode::kCallBuiltinPointer,
7700                                   wasm::WasmFeatures::FromIsolate(isolate));
7701   builder.BuildCWasmEntry();
7702 
7703   // Schedule and compile to machine code.
7704   MachineType sig_types[] = {MachineType::Pointer(),    // return
7705                              MachineType::Pointer(),    // target
7706                              MachineType::AnyTagged(),  // object_ref
7707                              MachineType::Pointer(),    // argv
7708                              MachineType::Pointer()};   // c_entry_fp
7709   MachineSignature incoming_sig(1, 4, sig_types);
7710   // Traps need the root register, for TailCallRuntime to call
7711   // Runtime::kThrowWasmError.
7712   CallDescriptor::Flags flags = CallDescriptor::kInitializeRootRegister;
7713   CallDescriptor* incoming =
7714       Linkage::GetSimplifiedCDescriptor(zone.get(), &incoming_sig, flags);
7715 
7716   // Build a name in the form "c-wasm-entry:<params>:<returns>".
7717   constexpr size_t kMaxNameLen = 128;
7718   constexpr size_t kNamePrefixLen = 13;
7719   auto name_buffer = std::unique_ptr<char[]>(new char[kMaxNameLen]);
7720   memcpy(name_buffer.get(), "c-wasm-entry:", kNamePrefixLen);
7721   PrintSignature(VectorOf(name_buffer.get(), kMaxNameLen) + kNamePrefixLen,
7722                  sig);
7723 
7724   // Run the compilation job synchronously.
7725   std::unique_ptr<OptimizedCompilationJob> job(
7726       Pipeline::NewWasmHeapStubCompilationJob(
7727           isolate, isolate->wasm_engine(), incoming, std::move(zone), graph,
7728           CodeKind::C_WASM_ENTRY, std::move(name_buffer),
7729           AssemblerOptions::Default(isolate)));
7730 
7731   CHECK_NE(job->ExecuteJob(isolate->counters()->runtime_call_stats(), nullptr),
7732            CompilationJob::FAILED);
7733   CHECK_NE(job->FinalizeJob(isolate), CompilationJob::FAILED);
7734 
7735   return job->compilation_info()->code();
7736 }
7737 
7738 namespace {
7739 
BuildGraphForWasmFunction(AccountingAllocator * allocator,wasm::CompilationEnv * env,const wasm::FunctionBody & func_body,int func_index,wasm::WasmFeatures * detected,MachineGraph * mcgraph,NodeOriginTable * node_origins,SourcePositionTable * source_positions)7740 bool BuildGraphForWasmFunction(AccountingAllocator* allocator,
7741                                wasm::CompilationEnv* env,
7742                                const wasm::FunctionBody& func_body,
7743                                int func_index, wasm::WasmFeatures* detected,
7744                                MachineGraph* mcgraph,
7745                                NodeOriginTable* node_origins,
7746                                SourcePositionTable* source_positions) {
7747   // Create a TF graph during decoding.
7748   WasmGraphBuilder builder(env, mcgraph->zone(), mcgraph, func_body.sig,
7749                            source_positions);
7750   wasm::VoidResult graph_construction_result =
7751       wasm::BuildTFGraph(allocator, env->enabled_features, env->module,
7752                          &builder, detected, func_body, node_origins);
7753   if (graph_construction_result.failed()) {
7754     if (FLAG_trace_wasm_compiler) {
7755       StdoutStream{} << "Compilation failed: "
7756                      << graph_construction_result.error().message()
7757                      << std::endl;
7758     }
7759     return false;
7760   }
7761 
7762   // Lower SIMD first, i64x2 nodes will be lowered to int64 nodes, then int64
7763   // lowering will take care of them.
7764   auto sig = CreateMachineSignature(mcgraph->zone(), func_body.sig,
7765                                     WasmGraphBuilder::kCalledFromWasm);
7766   if (builder.has_simd() &&
7767       (!CpuFeatures::SupportsWasmSimd128() || env->lower_simd)) {
7768     SimdScalarLowering(mcgraph, sig).LowerGraph();
7769 
7770     // SimdScalarLowering changes all v128 to 4 i32, so update the machine
7771     // signature for the call to LowerInt64.
7772     size_t return_count = 0;
7773     size_t param_count = 0;
7774     for (auto ret : sig->returns()) {
7775       return_count += ret == MachineRepresentation::kSimd128 ? 4 : 1;
7776     }
7777     for (auto param : sig->parameters()) {
7778       param_count += param == MachineRepresentation::kSimd128 ? 4 : 1;
7779     }
7780 
7781     Signature<MachineRepresentation>::Builder sig_builder(
7782         mcgraph->zone(), return_count, param_count);
7783     for (auto ret : sig->returns()) {
7784       if (ret == MachineRepresentation::kSimd128) {
7785         for (int i = 0; i < 4; ++i) {
7786           sig_builder.AddReturn(MachineRepresentation::kWord32);
7787         }
7788       } else {
7789         sig_builder.AddReturn(ret);
7790       }
7791     }
7792     for (auto param : sig->parameters()) {
7793       if (param == MachineRepresentation::kSimd128) {
7794         for (int i = 0; i < 4; ++i) {
7795           sig_builder.AddParam(MachineRepresentation::kWord32);
7796         }
7797       } else {
7798         sig_builder.AddParam(param);
7799       }
7800     }
7801     sig = sig_builder.Build();
7802   }
7803 
7804   builder.LowerInt64(sig);
7805 
7806   if (func_index >= FLAG_trace_wasm_ast_start &&
7807       func_index < FLAG_trace_wasm_ast_end) {
7808     PrintRawWasmCode(allocator, func_body, env->module, wasm::kPrintLocals);
7809   }
7810   return true;
7811 }
7812 
GetDebugName(Zone * zone,int index)7813 Vector<const char> GetDebugName(Zone* zone, int index) {
7814   // TODO(herhut): Use name from module if available.
7815   constexpr int kBufferLength = 24;
7816 
7817   EmbeddedVector<char, kBufferLength> name_vector;
7818   int name_len = SNPrintF(name_vector, "wasm-function#%d", index);
7819   DCHECK(name_len > 0 && name_len < name_vector.length());
7820 
7821   char* index_name = zone->NewArray<char>(name_len);
7822   memcpy(index_name, name_vector.begin(), name_len);
7823   return Vector<const char>(index_name, name_len);
7824 }
7825 
7826 }  // namespace
7827 
ExecuteTurbofanWasmCompilation(wasm::WasmEngine * wasm_engine,wasm::CompilationEnv * env,const wasm::FunctionBody & func_body,int func_index,Counters * counters,wasm::WasmFeatures * detected)7828 wasm::WasmCompilationResult ExecuteTurbofanWasmCompilation(
7829     wasm::WasmEngine* wasm_engine, wasm::CompilationEnv* env,
7830     const wasm::FunctionBody& func_body, int func_index, Counters* counters,
7831     wasm::WasmFeatures* detected) {
7832   TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("v8.wasm.detailed"),
7833                "wasm.CompileTopTier", "func_index", func_index, "body_size",
7834                func_body.end - func_body.start);
7835   Zone zone(wasm_engine->allocator(), ZONE_NAME, kCompressGraphZone);
7836   MachineGraph* mcgraph = zone.New<MachineGraph>(
7837       zone.New<Graph>(&zone), zone.New<CommonOperatorBuilder>(&zone),
7838       zone.New<MachineOperatorBuilder>(
7839           &zone, MachineType::PointerRepresentation(),
7840           InstructionSelector::SupportedMachineOperatorFlags(),
7841           InstructionSelector::AlignmentRequirements()));
7842 
7843   OptimizedCompilationInfo info(GetDebugName(&zone, func_index), &zone,
7844                                 CodeKind::WASM_FUNCTION);
7845   if (env->runtime_exception_support) {
7846     info.set_wasm_runtime_exception_support();
7847   }
7848 
7849   if (info.trace_turbo_json()) {
7850     TurboCfgFile tcf;
7851     tcf << AsC1VCompilation(&info);
7852   }
7853 
7854   NodeOriginTable* node_origins =
7855       info.trace_turbo_json() ? zone.New<NodeOriginTable>(mcgraph->graph())
7856                               : nullptr;
7857   SourcePositionTable* source_positions =
7858       mcgraph->zone()->New<SourcePositionTable>(mcgraph->graph());
7859   if (!BuildGraphForWasmFunction(wasm_engine->allocator(), env, func_body,
7860                                  func_index, detected, mcgraph, node_origins,
7861                                  source_positions)) {
7862     return wasm::WasmCompilationResult{};
7863   }
7864 
7865   if (node_origins) {
7866     node_origins->AddDecorator();
7867   }
7868 
7869   // Run the compiler pipeline to generate machine code.
7870   auto call_descriptor = GetWasmCallDescriptor(&zone, func_body.sig);
7871   if (mcgraph->machine()->Is32()) {
7872     call_descriptor = GetI32WasmCallDescriptor(&zone, call_descriptor);
7873   }
7874 
7875   if (ContainsSimd(func_body.sig) &&
7876       (!CpuFeatures::SupportsWasmSimd128() || env->lower_simd)) {
7877     call_descriptor = GetI32WasmCallDescriptorForSimd(&zone, call_descriptor);
7878   }
7879 
7880   Pipeline::GenerateCodeForWasmFunction(
7881       &info, wasm_engine, mcgraph, call_descriptor, source_positions,
7882       node_origins, func_body, env->module, func_index);
7883 
7884   if (counters) {
7885     counters->wasm_compile_function_peak_memory_bytes()->AddSample(
7886         static_cast<int>(mcgraph->graph()->zone()->allocation_size()));
7887   }
7888   auto result = info.ReleaseWasmCompilationResult();
7889   CHECK_NOT_NULL(result);  // Compilation expected to succeed.
7890   DCHECK_EQ(wasm::ExecutionTier::kTurbofan, result->result_tier);
7891   return std::move(*result);
7892 }
7893 
7894 namespace {
7895 // Helper for allocating either an GP or FP reg, or the next stack slot.
7896 class LinkageLocationAllocator {
7897  public:
7898   template <size_t kNumGpRegs, size_t kNumFpRegs>
LinkageLocationAllocator(const Register (& gp)[kNumGpRegs],const DoubleRegister (& fp)[kNumFpRegs])7899   constexpr LinkageLocationAllocator(const Register (&gp)[kNumGpRegs],
7900                                      const DoubleRegister (&fp)[kNumFpRegs])
7901       : allocator_(wasm::LinkageAllocator(gp, fp)) {}
7902 
Next(MachineRepresentation rep)7903   LinkageLocation Next(MachineRepresentation rep) {
7904     MachineType type = MachineType::TypeForRepresentation(rep);
7905     if (IsFloatingPoint(rep)) {
7906       if (allocator_.CanAllocateFP(rep)) {
7907         int reg_code = allocator_.NextFpReg(rep);
7908         return LinkageLocation::ForRegister(reg_code, type);
7909       }
7910     } else if (allocator_.CanAllocateGP()) {
7911       int reg_code = allocator_.NextGpReg();
7912       return LinkageLocation::ForRegister(reg_code, type);
7913     }
7914     // Cannot use register; use stack slot.
7915     int index = -1 - allocator_.NextStackSlot(rep);
7916     return LinkageLocation::ForCallerFrameSlot(index, type);
7917   }
7918 
SetStackOffset(int offset)7919   void SetStackOffset(int offset) { allocator_.SetStackOffset(offset); }
NumStackSlots() const7920   int NumStackSlots() const { return allocator_.NumStackSlots(); }
7921 
7922  private:
7923   wasm::LinkageAllocator allocator_;
7924 };
7925 }  // namespace
7926 
7927 // General code uses the above configuration data.
GetWasmCallDescriptor(Zone * zone,const wasm::FunctionSig * fsig,WasmGraphBuilder::UseRetpoline use_retpoline,WasmCallKind call_kind)7928 CallDescriptor* GetWasmCallDescriptor(
7929     Zone* zone, const wasm::FunctionSig* fsig,
7930     WasmGraphBuilder::UseRetpoline use_retpoline, WasmCallKind call_kind) {
7931   // The extra here is to accomodate the instance object as first parameter
7932   // and, when specified, the additional callable.
7933   bool extra_callable_param =
7934       call_kind == kWasmImportWrapper || call_kind == kWasmCapiFunction;
7935   int extra_params = extra_callable_param ? 2 : 1;
7936   LocationSignature::Builder locations(zone, fsig->return_count(),
7937                                        fsig->parameter_count() + extra_params);
7938 
7939   // Add register and/or stack parameter(s).
7940   LinkageLocationAllocator params(wasm::kGpParamRegisters,
7941                                   wasm::kFpParamRegisters);
7942 
7943   // The instance object.
7944   locations.AddParam(params.Next(MachineRepresentation::kTaggedPointer));
7945   const size_t param_offset = 1;  // Actual params start here.
7946 
7947   // Parameters are separated into two groups (first all untagged, then all
7948   // tagged parameters). This allows for easy iteration of tagged parameters
7949   // during frame iteration.
7950   const size_t parameter_count = fsig->parameter_count();
7951   for (size_t i = 0; i < parameter_count; i++) {
7952     MachineRepresentation param = fsig->GetParam(i).machine_representation();
7953     // Skip tagged parameters (e.g. any-ref).
7954     if (IsAnyTagged(param)) continue;
7955     auto l = params.Next(param);
7956     locations.AddParamAt(i + param_offset, l);
7957   }
7958   for (size_t i = 0; i < parameter_count; i++) {
7959     MachineRepresentation param = fsig->GetParam(i).machine_representation();
7960     // Skip untagged parameters.
7961     if (!IsAnyTagged(param)) continue;
7962     auto l = params.Next(param);
7963     locations.AddParamAt(i + param_offset, l);
7964   }
7965 
7966   // Import call wrappers have an additional (implicit) parameter, the callable.
7967   // For consistency with JS, we use the JSFunction register.
7968   if (extra_callable_param) {
7969     locations.AddParam(LinkageLocation::ForRegister(
7970         kJSFunctionRegister.code(), MachineType::TaggedPointer()));
7971   }
7972 
7973   // Add return location(s).
7974   LinkageLocationAllocator rets(wasm::kGpReturnRegisters,
7975                                 wasm::kFpReturnRegisters);
7976 
7977   int parameter_slots = params.NumStackSlots();
7978   if (ShouldPadArguments(parameter_slots)) parameter_slots++;
7979 
7980   rets.SetStackOffset(parameter_slots);
7981 
7982   const int return_count = static_cast<int>(locations.return_count_);
7983   for (int i = 0; i < return_count; i++) {
7984     MachineRepresentation ret = fsig->GetReturn(i).machine_representation();
7985     auto l = rets.Next(ret);
7986     locations.AddReturn(l);
7987   }
7988 
7989   const RegList kCalleeSaveRegisters = 0;
7990   const RegList kCalleeSaveFPRegisters = 0;
7991 
7992   // The target for wasm calls is always a code object.
7993   MachineType target_type = MachineType::Pointer();
7994   LinkageLocation target_loc = LinkageLocation::ForAnyRegister(target_type);
7995 
7996   CallDescriptor::Kind descriptor_kind;
7997   if (call_kind == kWasmFunction) {
7998     descriptor_kind = CallDescriptor::kCallWasmFunction;
7999   } else if (call_kind == kWasmImportWrapper) {
8000     descriptor_kind = CallDescriptor::kCallWasmImportWrapper;
8001   } else {
8002     DCHECK_EQ(call_kind, kWasmCapiFunction);
8003     descriptor_kind = CallDescriptor::kCallWasmCapiFunction;
8004   }
8005 
8006   CallDescriptor::Flags flags =
8007       use_retpoline ? CallDescriptor::kRetpoline : CallDescriptor::kNoFlags;
8008   return zone->New<CallDescriptor>(       // --
8009       descriptor_kind,                    // kind
8010       target_type,                        // target MachineType
8011       target_loc,                         // target location
8012       locations.Build(),                  // location_sig
8013       parameter_slots,                    // stack_parameter_count
8014       compiler::Operator::kNoProperties,  // properties
8015       kCalleeSaveRegisters,               // callee-saved registers
8016       kCalleeSaveFPRegisters,             // callee-saved fp regs
8017       flags,                              // flags
8018       "wasm-call",                        // debug name
8019       StackArgumentOrder::kDefault,       // order of the arguments in the stack
8020       0,                                  // allocatable registers
8021       rets.NumStackSlots() - parameter_slots);  // stack_return_count
8022 }
8023 
8024 namespace {
ReplaceTypeInCallDescriptorWith(Zone * zone,const CallDescriptor * call_descriptor,size_t num_replacements,MachineType input_type,MachineRepresentation output_type)8025 CallDescriptor* ReplaceTypeInCallDescriptorWith(
8026     Zone* zone, const CallDescriptor* call_descriptor, size_t num_replacements,
8027     MachineType input_type, MachineRepresentation output_type) {
8028   size_t parameter_count = call_descriptor->ParameterCount();
8029   size_t return_count = call_descriptor->ReturnCount();
8030   for (size_t i = 0; i < call_descriptor->ParameterCount(); i++) {
8031     if (call_descriptor->GetParameterType(i) == input_type) {
8032       parameter_count += num_replacements - 1;
8033     }
8034   }
8035   for (size_t i = 0; i < call_descriptor->ReturnCount(); i++) {
8036     if (call_descriptor->GetReturnType(i) == input_type) {
8037       return_count += num_replacements - 1;
8038     }
8039   }
8040   if (parameter_count == call_descriptor->ParameterCount() &&
8041       return_count == call_descriptor->ReturnCount()) {
8042     return const_cast<CallDescriptor*>(call_descriptor);
8043   }
8044 
8045   LocationSignature::Builder locations(zone, return_count, parameter_count);
8046 
8047   // The last parameter may be the special callable parameter. In that case we
8048   // have to preserve it as the last parameter, i.e. we allocate it in the new
8049   // location signature again in the same register.
8050   bool has_callable_param =
8051       (call_descriptor->GetInputLocation(call_descriptor->InputCount() - 1) ==
8052        LinkageLocation::ForRegister(kJSFunctionRegister.code(),
8053                                     MachineType::TaggedPointer()));
8054   LinkageLocationAllocator params(wasm::kGpParamRegisters,
8055                                   wasm::kFpParamRegisters);
8056   for (size_t i = 0, e = call_descriptor->ParameterCount() -
8057                          (has_callable_param ? 1 : 0);
8058        i < e; i++) {
8059     if (call_descriptor->GetParameterType(i) == input_type) {
8060       for (size_t j = 0; j < num_replacements; j++) {
8061         locations.AddParam(params.Next(output_type));
8062       }
8063     } else {
8064       locations.AddParam(
8065           params.Next(call_descriptor->GetParameterType(i).representation()));
8066     }
8067   }
8068   if (has_callable_param) {
8069     locations.AddParam(LinkageLocation::ForRegister(
8070         kJSFunctionRegister.code(), MachineType::TaggedPointer()));
8071   }
8072 
8073   LinkageLocationAllocator rets(wasm::kGpReturnRegisters,
8074                                 wasm::kFpReturnRegisters);
8075   rets.SetStackOffset(params.NumStackSlots());
8076   for (size_t i = 0; i < call_descriptor->ReturnCount(); i++) {
8077     if (call_descriptor->GetReturnType(i) == input_type) {
8078       for (size_t j = 0; j < num_replacements; j++) {
8079         locations.AddReturn(rets.Next(output_type));
8080       }
8081     } else {
8082       locations.AddReturn(
8083           rets.Next(call_descriptor->GetReturnType(i).representation()));
8084     }
8085   }
8086 
8087   return zone->New<CallDescriptor>(                    // --
8088       call_descriptor->kind(),                         // kind
8089       call_descriptor->GetInputType(0),                // target MachineType
8090       call_descriptor->GetInputLocation(0),            // target location
8091       locations.Build(),                               // location_sig
8092       params.NumStackSlots(),                          // stack_parameter_count
8093       call_descriptor->properties(),                   // properties
8094       call_descriptor->CalleeSavedRegisters(),         // callee-saved registers
8095       call_descriptor->CalleeSavedFPRegisters(),       // callee-saved fp regs
8096       call_descriptor->flags(),                        // flags
8097       call_descriptor->debug_name(),                   // debug name
8098       call_descriptor->GetStackArgumentOrder(),        // stack order
8099       call_descriptor->AllocatableRegisters(),         // allocatable registers
8100       rets.NumStackSlots() - params.NumStackSlots());  // stack_return_count
8101 }
8102 }  // namespace
8103 
GetI32WasmCallDescriptor(Zone * zone,const CallDescriptor * call_descriptor)8104 CallDescriptor* GetI32WasmCallDescriptor(
8105     Zone* zone, const CallDescriptor* call_descriptor) {
8106   return ReplaceTypeInCallDescriptorWith(zone, call_descriptor, 2,
8107                                          MachineType::Int64(),
8108                                          MachineRepresentation::kWord32);
8109 }
8110 
GetI32WasmCallDescriptorForSimd(Zone * zone,CallDescriptor * call_descriptor)8111 CallDescriptor* GetI32WasmCallDescriptorForSimd(
8112     Zone* zone, CallDescriptor* call_descriptor) {
8113   return ReplaceTypeInCallDescriptorWith(zone, call_descriptor, 4,
8114                                          MachineType::Simd128(),
8115                                          MachineRepresentation::kWord32);
8116 }
8117 
WasmAssemblerOptions()8118 AssemblerOptions WasmAssemblerOptions() {
8119   AssemblerOptions options;
8120   // Relocation info required to serialize {WasmCode} for proper functions.
8121   options.record_reloc_info_for_serialization = true;
8122   options.enable_root_array_delta_access = false;
8123   return options;
8124 }
8125 
WasmStubAssemblerOptions()8126 AssemblerOptions WasmStubAssemblerOptions() {
8127   AssemblerOptions options;
8128   // Relocation info not necessary because stubs are not serialized.
8129   options.record_reloc_info_for_serialization = false;
8130   options.enable_root_array_delta_access = false;
8131   return options;
8132 }
8133 
8134 #undef FATAL_UNSUPPORTED_OPCODE
8135 #undef CALL_BUILTIN
8136 #undef WASM_INSTANCE_OBJECT_SIZE
8137 #undef WASM_INSTANCE_OBJECT_OFFSET
8138 #undef LOAD_INSTANCE_FIELD
8139 #undef LOAD_TAGGED_POINTER
8140 #undef LOAD_TAGGED_ANY
8141 #undef LOAD_FIXED_ARRAY_SLOT
8142 #undef LOAD_FIXED_ARRAY_SLOT_SMI
8143 #undef LOAD_FIXED_ARRAY_SLOT_PTR
8144 #undef LOAD_FIXED_ARRAY_SLOT_ANY
8145 #undef STORE_RAW
8146 #undef STORE_RAW_NODE_OFFSET
8147 #undef STORE_FIXED_ARRAY_SLOT_SMI
8148 #undef STORE_FIXED_ARRAY_SLOT_ANY
8149 
8150 }  // namespace compiler
8151 }  // namespace internal
8152 }  // namespace v8
8153