• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/wasm/wasm-interpreter.h"
6 #include "src/wasm/ast-decoder.h"
7 #include "src/wasm/decoder.h"
8 #include "src/wasm/wasm-external-refs.h"
9 #include "src/wasm/wasm-module.h"
10 
11 #include "src/base/accounting-allocator.h"
12 #include "src/zone-containers.h"
13 
14 namespace v8 {
15 namespace internal {
16 namespace wasm {
17 
18 #if DEBUG
19 #define TRACE(...)                                        \
20   do {                                                    \
21     if (FLAG_trace_wasm_interpreter) PrintF(__VA_ARGS__); \
22   } while (false)
23 #else
24 #define TRACE(...)
25 #endif
26 
27 #define FOREACH_INTERNAL_OPCODE(V) V(Breakpoint, 0xFF)
28 
29 #define FOREACH_SIMPLE_BINOP(V) \
30   V(I32Add, uint32_t, +)        \
31   V(I32Sub, uint32_t, -)        \
32   V(I32Mul, uint32_t, *)        \
33   V(I32And, uint32_t, &)        \
34   V(I32Ior, uint32_t, |)        \
35   V(I32Xor, uint32_t, ^)        \
36   V(I32Eq, uint32_t, ==)        \
37   V(I32Ne, uint32_t, !=)        \
38   V(I32LtU, uint32_t, <)        \
39   V(I32LeU, uint32_t, <=)       \
40   V(I32GtU, uint32_t, >)        \
41   V(I32GeU, uint32_t, >=)       \
42   V(I32LtS, int32_t, <)         \
43   V(I32LeS, int32_t, <=)        \
44   V(I32GtS, int32_t, >)         \
45   V(I32GeS, int32_t, >=)        \
46   V(I64Add, uint64_t, +)        \
47   V(I64Sub, uint64_t, -)        \
48   V(I64Mul, uint64_t, *)        \
49   V(I64And, uint64_t, &)        \
50   V(I64Ior, uint64_t, |)        \
51   V(I64Xor, uint64_t, ^)        \
52   V(I64Eq, uint64_t, ==)        \
53   V(I64Ne, uint64_t, !=)        \
54   V(I64LtU, uint64_t, <)        \
55   V(I64LeU, uint64_t, <=)       \
56   V(I64GtU, uint64_t, >)        \
57   V(I64GeU, uint64_t, >=)       \
58   V(I64LtS, int64_t, <)         \
59   V(I64LeS, int64_t, <=)        \
60   V(I64GtS, int64_t, >)         \
61   V(I64GeS, int64_t, >=)        \
62   V(F32Add, float, +)           \
63   V(F32Mul, float, *)           \
64   V(F32Div, float, /)           \
65   V(F32Eq, float, ==)           \
66   V(F32Ne, float, !=)           \
67   V(F32Lt, float, <)            \
68   V(F32Le, float, <=)           \
69   V(F32Gt, float, >)            \
70   V(F32Ge, float, >=)           \
71   V(F64Add, double, +)          \
72   V(F64Mul, double, *)          \
73   V(F64Div, double, /)          \
74   V(F64Eq, double, ==)          \
75   V(F64Ne, double, !=)          \
76   V(F64Lt, double, <)           \
77   V(F64Le, double, <=)          \
78   V(F64Gt, double, >)           \
79   V(F64Ge, double, >=)
80 
81 #define FOREACH_OTHER_BINOP(V) \
82   V(I32DivS, int32_t)          \
83   V(I32DivU, uint32_t)         \
84   V(I32RemS, int32_t)          \
85   V(I32RemU, uint32_t)         \
86   V(I32Shl, uint32_t)          \
87   V(I32ShrU, uint32_t)         \
88   V(I32ShrS, int32_t)          \
89   V(I64DivS, int64_t)          \
90   V(I64DivU, uint64_t)         \
91   V(I64RemS, int64_t)          \
92   V(I64RemU, uint64_t)         \
93   V(I64Shl, uint64_t)          \
94   V(I64ShrU, uint64_t)         \
95   V(I64ShrS, int64_t)          \
96   V(I32Ror, int32_t)           \
97   V(I32Rol, int32_t)           \
98   V(I64Ror, int64_t)           \
99   V(I64Rol, int64_t)           \
100   V(F32Sub, float)             \
101   V(F32Min, float)             \
102   V(F32Max, float)             \
103   V(F32CopySign, float)        \
104   V(F64Min, double)            \
105   V(F64Max, double)            \
106   V(F64Sub, double)            \
107   V(F64CopySign, double)       \
108   V(I32AsmjsDivS, int32_t)     \
109   V(I32AsmjsDivU, uint32_t)    \
110   V(I32AsmjsRemS, int32_t)     \
111   V(I32AsmjsRemU, uint32_t)
112 
113 #define FOREACH_OTHER_UNOP(V)    \
114   V(I32Clz, uint32_t)            \
115   V(I32Ctz, uint32_t)            \
116   V(I32Popcnt, uint32_t)         \
117   V(I32Eqz, uint32_t)            \
118   V(I64Clz, uint64_t)            \
119   V(I64Ctz, uint64_t)            \
120   V(I64Popcnt, uint64_t)         \
121   V(I64Eqz, uint64_t)            \
122   V(F32Abs, float)               \
123   V(F32Neg, float)               \
124   V(F32Ceil, float)              \
125   V(F32Floor, float)             \
126   V(F32Trunc, float)             \
127   V(F32NearestInt, float)        \
128   V(F32Sqrt, float)              \
129   V(F64Abs, double)              \
130   V(F64Neg, double)              \
131   V(F64Ceil, double)             \
132   V(F64Floor, double)            \
133   V(F64Trunc, double)            \
134   V(F64NearestInt, double)       \
135   V(F64Sqrt, double)             \
136   V(I32SConvertF32, float)       \
137   V(I32SConvertF64, double)      \
138   V(I32UConvertF32, float)       \
139   V(I32UConvertF64, double)      \
140   V(I32ConvertI64, int64_t)      \
141   V(I64SConvertF32, float)       \
142   V(I64SConvertF64, double)      \
143   V(I64UConvertF32, float)       \
144   V(I64UConvertF64, double)      \
145   V(I64SConvertI32, int32_t)     \
146   V(I64UConvertI32, uint32_t)    \
147   V(F32SConvertI32, int32_t)     \
148   V(F32UConvertI32, uint32_t)    \
149   V(F32SConvertI64, int64_t)     \
150   V(F32UConvertI64, uint64_t)    \
151   V(F32ConvertF64, double)       \
152   V(F32ReinterpretI32, int32_t)  \
153   V(F64SConvertI32, int32_t)     \
154   V(F64UConvertI32, uint32_t)    \
155   V(F64SConvertI64, int64_t)     \
156   V(F64UConvertI64, uint64_t)    \
157   V(F64ConvertF32, float)        \
158   V(F64ReinterpretI64, int64_t)  \
159   V(I32ReinterpretF32, float)    \
160   V(I64ReinterpretF64, double)   \
161   V(I32AsmjsSConvertF32, float)  \
162   V(I32AsmjsUConvertF32, float)  \
163   V(I32AsmjsSConvertF64, double) \
164   V(I32AsmjsUConvertF64, double)
165 
ExecuteI32DivS(int32_t a,int32_t b,TrapReason * trap)166 static inline int32_t ExecuteI32DivS(int32_t a, int32_t b, TrapReason* trap) {
167   if (b == 0) {
168     *trap = kTrapDivByZero;
169     return 0;
170   }
171   if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
172     *trap = kTrapDivUnrepresentable;
173     return 0;
174   }
175   return a / b;
176 }
177 
ExecuteI32DivU(uint32_t a,uint32_t b,TrapReason * trap)178 static inline uint32_t ExecuteI32DivU(uint32_t a, uint32_t b,
179                                       TrapReason* trap) {
180   if (b == 0) {
181     *trap = kTrapDivByZero;
182     return 0;
183   }
184   return a / b;
185 }
186 
ExecuteI32RemS(int32_t a,int32_t b,TrapReason * trap)187 static inline int32_t ExecuteI32RemS(int32_t a, int32_t b, TrapReason* trap) {
188   if (b == 0) {
189     *trap = kTrapRemByZero;
190     return 0;
191   }
192   if (b == -1) return 0;
193   return a % b;
194 }
195 
ExecuteI32RemU(uint32_t a,uint32_t b,TrapReason * trap)196 static inline uint32_t ExecuteI32RemU(uint32_t a, uint32_t b,
197                                       TrapReason* trap) {
198   if (b == 0) {
199     *trap = kTrapRemByZero;
200     return 0;
201   }
202   return a % b;
203 }
204 
ExecuteI32Shl(uint32_t a,uint32_t b,TrapReason * trap)205 static inline uint32_t ExecuteI32Shl(uint32_t a, uint32_t b, TrapReason* trap) {
206   return a << (b & 0x1f);
207 }
208 
ExecuteI32ShrU(uint32_t a,uint32_t b,TrapReason * trap)209 static inline uint32_t ExecuteI32ShrU(uint32_t a, uint32_t b,
210                                       TrapReason* trap) {
211   return a >> (b & 0x1f);
212 }
213 
ExecuteI32ShrS(int32_t a,int32_t b,TrapReason * trap)214 static inline int32_t ExecuteI32ShrS(int32_t a, int32_t b, TrapReason* trap) {
215   return a >> (b & 0x1f);
216 }
217 
ExecuteI64DivS(int64_t a,int64_t b,TrapReason * trap)218 static inline int64_t ExecuteI64DivS(int64_t a, int64_t b, TrapReason* trap) {
219   if (b == 0) {
220     *trap = kTrapDivByZero;
221     return 0;
222   }
223   if (b == -1 && a == std::numeric_limits<int64_t>::min()) {
224     *trap = kTrapDivUnrepresentable;
225     return 0;
226   }
227   return a / b;
228 }
229 
ExecuteI64DivU(uint64_t a,uint64_t b,TrapReason * trap)230 static inline uint64_t ExecuteI64DivU(uint64_t a, uint64_t b,
231                                       TrapReason* trap) {
232   if (b == 0) {
233     *trap = kTrapDivByZero;
234     return 0;
235   }
236   return a / b;
237 }
238 
ExecuteI64RemS(int64_t a,int64_t b,TrapReason * trap)239 static inline int64_t ExecuteI64RemS(int64_t a, int64_t b, TrapReason* trap) {
240   if (b == 0) {
241     *trap = kTrapRemByZero;
242     return 0;
243   }
244   if (b == -1) return 0;
245   return a % b;
246 }
247 
ExecuteI64RemU(uint64_t a,uint64_t b,TrapReason * trap)248 static inline uint64_t ExecuteI64RemU(uint64_t a, uint64_t b,
249                                       TrapReason* trap) {
250   if (b == 0) {
251     *trap = kTrapRemByZero;
252     return 0;
253   }
254   return a % b;
255 }
256 
ExecuteI64Shl(uint64_t a,uint64_t b,TrapReason * trap)257 static inline uint64_t ExecuteI64Shl(uint64_t a, uint64_t b, TrapReason* trap) {
258   return a << (b & 0x3f);
259 }
260 
ExecuteI64ShrU(uint64_t a,uint64_t b,TrapReason * trap)261 static inline uint64_t ExecuteI64ShrU(uint64_t a, uint64_t b,
262                                       TrapReason* trap) {
263   return a >> (b & 0x3f);
264 }
265 
ExecuteI64ShrS(int64_t a,int64_t b,TrapReason * trap)266 static inline int64_t ExecuteI64ShrS(int64_t a, int64_t b, TrapReason* trap) {
267   return a >> (b & 0x3f);
268 }
269 
ExecuteI32Ror(uint32_t a,uint32_t b,TrapReason * trap)270 static inline uint32_t ExecuteI32Ror(uint32_t a, uint32_t b, TrapReason* trap) {
271   uint32_t shift = (b & 0x1f);
272   return (a >> shift) | (a << (32 - shift));
273 }
274 
ExecuteI32Rol(uint32_t a,uint32_t b,TrapReason * trap)275 static inline uint32_t ExecuteI32Rol(uint32_t a, uint32_t b, TrapReason* trap) {
276   uint32_t shift = (b & 0x1f);
277   return (a << shift) | (a >> (32 - shift));
278 }
279 
ExecuteI64Ror(uint64_t a,uint64_t b,TrapReason * trap)280 static inline uint64_t ExecuteI64Ror(uint64_t a, uint64_t b, TrapReason* trap) {
281   uint32_t shift = (b & 0x3f);
282   return (a >> shift) | (a << (64 - shift));
283 }
284 
ExecuteI64Rol(uint64_t a,uint64_t b,TrapReason * trap)285 static inline uint64_t ExecuteI64Rol(uint64_t a, uint64_t b, TrapReason* trap) {
286   uint32_t shift = (b & 0x3f);
287   return (a << shift) | (a >> (64 - shift));
288 }
289 
quiet(float a)290 static float quiet(float a) {
291   static const uint32_t kSignalingBit = 1 << 22;
292   uint32_t q = bit_cast<uint32_t>(std::numeric_limits<float>::quiet_NaN());
293   if ((q & kSignalingBit) != 0) {
294     // On some machines, the signaling bit set indicates it's a quiet NaN.
295     return bit_cast<float>(bit_cast<uint32_t>(a) | kSignalingBit);
296   } else {
297     // On others, the signaling bit set indicates it's a signaling NaN.
298     return bit_cast<float>(bit_cast<uint32_t>(a) & ~kSignalingBit);
299   }
300 }
301 
quiet(double a)302 static double quiet(double a) {
303   static const uint64_t kSignalingBit = 1ULL << 51;
304   uint64_t q = bit_cast<uint64_t>(std::numeric_limits<double>::quiet_NaN());
305   if ((q & kSignalingBit) != 0) {
306     // On some machines, the signaling bit set indicates it's a quiet NaN.
307     return bit_cast<double>(bit_cast<uint64_t>(a) | kSignalingBit);
308   } else {
309     // On others, the signaling bit set indicates it's a signaling NaN.
310     return bit_cast<double>(bit_cast<uint64_t>(a) & ~kSignalingBit);
311   }
312 }
313 
ExecuteF32Sub(float a,float b,TrapReason * trap)314 static inline float ExecuteF32Sub(float a, float b, TrapReason* trap) {
315   float result = a - b;
316   // Some architectures (e.g. MIPS) need extra checking to preserve the payload
317   // of a NaN operand.
318   if (result - result != 0) {
319     if (std::isnan(a)) return quiet(a);
320     if (std::isnan(b)) return quiet(b);
321   }
322   return result;
323 }
324 
ExecuteF32Min(float a,float b,TrapReason * trap)325 static inline float ExecuteF32Min(float a, float b, TrapReason* trap) {
326   if (std::isnan(a)) return quiet(a);
327   if (std::isnan(b)) return quiet(b);
328   return std::min(a, b);
329 }
330 
ExecuteF32Max(float a,float b,TrapReason * trap)331 static inline float ExecuteF32Max(float a, float b, TrapReason* trap) {
332   if (std::isnan(a)) return quiet(a);
333   if (std::isnan(b)) return quiet(b);
334   return std::max(a, b);
335 }
336 
ExecuteF32CopySign(float a,float b,TrapReason * trap)337 static inline float ExecuteF32CopySign(float a, float b, TrapReason* trap) {
338   return copysignf(a, b);
339 }
340 
ExecuteF64Sub(double a,double b,TrapReason * trap)341 static inline double ExecuteF64Sub(double a, double b, TrapReason* trap) {
342   double result = a - b;
343   // Some architectures (e.g. MIPS) need extra checking to preserve the payload
344   // of a NaN operand.
345   if (result - result != 0) {
346     if (std::isnan(a)) return quiet(a);
347     if (std::isnan(b)) return quiet(b);
348   }
349   return result;
350 }
351 
ExecuteF64Min(double a,double b,TrapReason * trap)352 static inline double ExecuteF64Min(double a, double b, TrapReason* trap) {
353   if (std::isnan(a)) return quiet(a);
354   if (std::isnan(b)) return quiet(b);
355   return std::min(a, b);
356 }
357 
ExecuteF64Max(double a,double b,TrapReason * trap)358 static inline double ExecuteF64Max(double a, double b, TrapReason* trap) {
359   if (std::isnan(a)) return quiet(a);
360   if (std::isnan(b)) return quiet(b);
361   return std::max(a, b);
362 }
363 
ExecuteF64CopySign(double a,double b,TrapReason * trap)364 static inline double ExecuteF64CopySign(double a, double b, TrapReason* trap) {
365   return copysign(a, b);
366 }
367 
ExecuteI32AsmjsDivS(int32_t a,int32_t b,TrapReason * trap)368 static inline int32_t ExecuteI32AsmjsDivS(int32_t a, int32_t b,
369                                           TrapReason* trap) {
370   if (b == 0) return 0;
371   if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
372     return std::numeric_limits<int32_t>::min();
373   }
374   return a / b;
375 }
376 
ExecuteI32AsmjsDivU(uint32_t a,uint32_t b,TrapReason * trap)377 static inline uint32_t ExecuteI32AsmjsDivU(uint32_t a, uint32_t b,
378                                            TrapReason* trap) {
379   if (b == 0) return 0;
380   return a / b;
381 }
382 
ExecuteI32AsmjsRemS(int32_t a,int32_t b,TrapReason * trap)383 static inline int32_t ExecuteI32AsmjsRemS(int32_t a, int32_t b,
384                                           TrapReason* trap) {
385   if (b == 0) return 0;
386   if (b == -1) return 0;
387   return a % b;
388 }
389 
ExecuteI32AsmjsRemU(uint32_t a,uint32_t b,TrapReason * trap)390 static inline uint32_t ExecuteI32AsmjsRemU(uint32_t a, uint32_t b,
391                                            TrapReason* trap) {
392   if (b == 0) return 0;
393   return a % b;
394 }
395 
ExecuteI32AsmjsSConvertF32(float a,TrapReason * trap)396 static inline int32_t ExecuteI32AsmjsSConvertF32(float a, TrapReason* trap) {
397   return DoubleToInt32(a);
398 }
399 
ExecuteI32AsmjsUConvertF32(float a,TrapReason * trap)400 static inline uint32_t ExecuteI32AsmjsUConvertF32(float a, TrapReason* trap) {
401   return DoubleToUint32(a);
402 }
403 
ExecuteI32AsmjsSConvertF64(double a,TrapReason * trap)404 static inline int32_t ExecuteI32AsmjsSConvertF64(double a, TrapReason* trap) {
405   return DoubleToInt32(a);
406 }
407 
ExecuteI32AsmjsUConvertF64(double a,TrapReason * trap)408 static inline uint32_t ExecuteI32AsmjsUConvertF64(double a, TrapReason* trap) {
409   return DoubleToUint32(a);
410 }
411 
ExecuteI32Clz(uint32_t val,TrapReason * trap)412 static int32_t ExecuteI32Clz(uint32_t val, TrapReason* trap) {
413   return base::bits::CountLeadingZeros32(val);
414 }
415 
ExecuteI32Ctz(uint32_t val,TrapReason * trap)416 static uint32_t ExecuteI32Ctz(uint32_t val, TrapReason* trap) {
417   return base::bits::CountTrailingZeros32(val);
418 }
419 
ExecuteI32Popcnt(uint32_t val,TrapReason * trap)420 static uint32_t ExecuteI32Popcnt(uint32_t val, TrapReason* trap) {
421   return word32_popcnt_wrapper(&val);
422 }
423 
ExecuteI32Eqz(uint32_t val,TrapReason * trap)424 static inline uint32_t ExecuteI32Eqz(uint32_t val, TrapReason* trap) {
425   return val == 0 ? 1 : 0;
426 }
427 
ExecuteI64Clz(uint64_t val,TrapReason * trap)428 static int64_t ExecuteI64Clz(uint64_t val, TrapReason* trap) {
429   return base::bits::CountLeadingZeros64(val);
430 }
431 
ExecuteI64Ctz(uint64_t val,TrapReason * trap)432 static inline uint64_t ExecuteI64Ctz(uint64_t val, TrapReason* trap) {
433   return base::bits::CountTrailingZeros64(val);
434 }
435 
ExecuteI64Popcnt(uint64_t val,TrapReason * trap)436 static inline int64_t ExecuteI64Popcnt(uint64_t val, TrapReason* trap) {
437   return word64_popcnt_wrapper(&val);
438 }
439 
ExecuteI64Eqz(uint64_t val,TrapReason * trap)440 static inline int32_t ExecuteI64Eqz(uint64_t val, TrapReason* trap) {
441   return val == 0 ? 1 : 0;
442 }
443 
ExecuteF32Abs(float a,TrapReason * trap)444 static inline float ExecuteF32Abs(float a, TrapReason* trap) {
445   return bit_cast<float>(bit_cast<uint32_t>(a) & 0x7fffffff);
446 }
447 
ExecuteF32Neg(float a,TrapReason * trap)448 static inline float ExecuteF32Neg(float a, TrapReason* trap) {
449   return bit_cast<float>(bit_cast<uint32_t>(a) ^ 0x80000000);
450 }
451 
ExecuteF32Ceil(float a,TrapReason * trap)452 static inline float ExecuteF32Ceil(float a, TrapReason* trap) {
453   return ceilf(a);
454 }
455 
ExecuteF32Floor(float a,TrapReason * trap)456 static inline float ExecuteF32Floor(float a, TrapReason* trap) {
457   return floorf(a);
458 }
459 
ExecuteF32Trunc(float a,TrapReason * trap)460 static inline float ExecuteF32Trunc(float a, TrapReason* trap) {
461   return truncf(a);
462 }
463 
ExecuteF32NearestInt(float a,TrapReason * trap)464 static inline float ExecuteF32NearestInt(float a, TrapReason* trap) {
465   return nearbyintf(a);
466 }
467 
ExecuteF32Sqrt(float a,TrapReason * trap)468 static inline float ExecuteF32Sqrt(float a, TrapReason* trap) {
469   return sqrtf(a);
470 }
471 
ExecuteF64Abs(double a,TrapReason * trap)472 static inline double ExecuteF64Abs(double a, TrapReason* trap) {
473   return bit_cast<double>(bit_cast<uint64_t>(a) & 0x7fffffffffffffff);
474 }
475 
ExecuteF64Neg(double a,TrapReason * trap)476 static inline double ExecuteF64Neg(double a, TrapReason* trap) {
477   return bit_cast<double>(bit_cast<uint64_t>(a) ^ 0x8000000000000000);
478 }
479 
ExecuteF64Ceil(double a,TrapReason * trap)480 static inline double ExecuteF64Ceil(double a, TrapReason* trap) {
481   return ceil(a);
482 }
483 
ExecuteF64Floor(double a,TrapReason * trap)484 static inline double ExecuteF64Floor(double a, TrapReason* trap) {
485   return floor(a);
486 }
487 
ExecuteF64Trunc(double a,TrapReason * trap)488 static inline double ExecuteF64Trunc(double a, TrapReason* trap) {
489   return trunc(a);
490 }
491 
ExecuteF64NearestInt(double a,TrapReason * trap)492 static inline double ExecuteF64NearestInt(double a, TrapReason* trap) {
493   return nearbyint(a);
494 }
495 
ExecuteF64Sqrt(double a,TrapReason * trap)496 static inline double ExecuteF64Sqrt(double a, TrapReason* trap) {
497   return sqrt(a);
498 }
499 
ExecuteI32SConvertF32(float a,TrapReason * trap)500 static int32_t ExecuteI32SConvertF32(float a, TrapReason* trap) {
501   if (a < static_cast<float>(INT32_MAX) && a >= static_cast<float>(INT32_MIN)) {
502     return static_cast<int32_t>(a);
503   }
504   *trap = kTrapFloatUnrepresentable;
505   return 0;
506 }
507 
ExecuteI32SConvertF64(double a,TrapReason * trap)508 static int32_t ExecuteI32SConvertF64(double a, TrapReason* trap) {
509   if (a < (static_cast<double>(INT32_MAX) + 1.0) &&
510       a > (static_cast<double>(INT32_MIN) - 1.0)) {
511     return static_cast<int32_t>(a);
512   }
513   *trap = kTrapFloatUnrepresentable;
514   return 0;
515 }
516 
ExecuteI32UConvertF32(float a,TrapReason * trap)517 static uint32_t ExecuteI32UConvertF32(float a, TrapReason* trap) {
518   if (a < (static_cast<float>(UINT32_MAX) + 1.0) && a > -1) {
519     return static_cast<uint32_t>(a);
520   }
521   *trap = kTrapFloatUnrepresentable;
522   return 0;
523 }
524 
ExecuteI32UConvertF64(double a,TrapReason * trap)525 static uint32_t ExecuteI32UConvertF64(double a, TrapReason* trap) {
526   if (a < (static_cast<float>(UINT32_MAX) + 1.0) && a > -1) {
527     return static_cast<uint32_t>(a);
528   }
529   *trap = kTrapFloatUnrepresentable;
530   return 0;
531 }
532 
ExecuteI32ConvertI64(int64_t a,TrapReason * trap)533 static inline uint32_t ExecuteI32ConvertI64(int64_t a, TrapReason* trap) {
534   return static_cast<uint32_t>(a & 0xFFFFFFFF);
535 }
536 
ExecuteI64SConvertF32(float a,TrapReason * trap)537 static int64_t ExecuteI64SConvertF32(float a, TrapReason* trap) {
538   int64_t output;
539   if (!float32_to_int64_wrapper(&a, &output)) {
540     *trap = kTrapFloatUnrepresentable;
541   }
542   return output;
543 }
544 
ExecuteI64SConvertF64(double a,TrapReason * trap)545 static int64_t ExecuteI64SConvertF64(double a, TrapReason* trap) {
546   int64_t output;
547   if (!float64_to_int64_wrapper(&a, &output)) {
548     *trap = kTrapFloatUnrepresentable;
549   }
550   return output;
551 }
552 
ExecuteI64UConvertF32(float a,TrapReason * trap)553 static uint64_t ExecuteI64UConvertF32(float a, TrapReason* trap) {
554   uint64_t output;
555   if (!float32_to_uint64_wrapper(&a, &output)) {
556     *trap = kTrapFloatUnrepresentable;
557   }
558   return output;
559 }
560 
ExecuteI64UConvertF64(double a,TrapReason * trap)561 static uint64_t ExecuteI64UConvertF64(double a, TrapReason* trap) {
562   uint64_t output;
563   if (!float64_to_uint64_wrapper(&a, &output)) {
564     *trap = kTrapFloatUnrepresentable;
565   }
566   return output;
567 }
568 
ExecuteI64SConvertI32(int32_t a,TrapReason * trap)569 static inline int64_t ExecuteI64SConvertI32(int32_t a, TrapReason* trap) {
570   return static_cast<int64_t>(a);
571 }
572 
ExecuteI64UConvertI32(uint32_t a,TrapReason * trap)573 static inline int64_t ExecuteI64UConvertI32(uint32_t a, TrapReason* trap) {
574   return static_cast<uint64_t>(a);
575 }
576 
ExecuteF32SConvertI32(int32_t a,TrapReason * trap)577 static inline float ExecuteF32SConvertI32(int32_t a, TrapReason* trap) {
578   return static_cast<float>(a);
579 }
580 
ExecuteF32UConvertI32(uint32_t a,TrapReason * trap)581 static inline float ExecuteF32UConvertI32(uint32_t a, TrapReason* trap) {
582   return static_cast<float>(a);
583 }
584 
ExecuteF32SConvertI64(int64_t a,TrapReason * trap)585 static inline float ExecuteF32SConvertI64(int64_t a, TrapReason* trap) {
586   float output;
587   int64_to_float32_wrapper(&a, &output);
588   return output;
589 }
590 
ExecuteF32UConvertI64(uint64_t a,TrapReason * trap)591 static inline float ExecuteF32UConvertI64(uint64_t a, TrapReason* trap) {
592   float output;
593   uint64_to_float32_wrapper(&a, &output);
594   return output;
595 }
596 
ExecuteF32ConvertF64(double a,TrapReason * trap)597 static inline float ExecuteF32ConvertF64(double a, TrapReason* trap) {
598   return static_cast<float>(a);
599 }
600 
ExecuteF32ReinterpretI32(int32_t a,TrapReason * trap)601 static inline float ExecuteF32ReinterpretI32(int32_t a, TrapReason* trap) {
602   return bit_cast<float>(a);
603 }
604 
ExecuteF64SConvertI32(int32_t a,TrapReason * trap)605 static inline double ExecuteF64SConvertI32(int32_t a, TrapReason* trap) {
606   return static_cast<double>(a);
607 }
608 
ExecuteF64UConvertI32(uint32_t a,TrapReason * trap)609 static inline double ExecuteF64UConvertI32(uint32_t a, TrapReason* trap) {
610   return static_cast<double>(a);
611 }
612 
ExecuteF64SConvertI64(int64_t a,TrapReason * trap)613 static inline double ExecuteF64SConvertI64(int64_t a, TrapReason* trap) {
614   double output;
615   int64_to_float64_wrapper(&a, &output);
616   return output;
617 }
618 
ExecuteF64UConvertI64(uint64_t a,TrapReason * trap)619 static inline double ExecuteF64UConvertI64(uint64_t a, TrapReason* trap) {
620   double output;
621   uint64_to_float64_wrapper(&a, &output);
622   return output;
623 }
624 
ExecuteF64ConvertF32(float a,TrapReason * trap)625 static inline double ExecuteF64ConvertF32(float a, TrapReason* trap) {
626   return static_cast<double>(a);
627 }
628 
ExecuteF64ReinterpretI64(int64_t a,TrapReason * trap)629 static inline double ExecuteF64ReinterpretI64(int64_t a, TrapReason* trap) {
630   return bit_cast<double>(a);
631 }
632 
ExecuteI32ReinterpretF32(float a,TrapReason * trap)633 static inline int32_t ExecuteI32ReinterpretF32(float a, TrapReason* trap) {
634   return bit_cast<int32_t>(a);
635 }
636 
ExecuteI64ReinterpretF64(double a,TrapReason * trap)637 static inline int64_t ExecuteI64ReinterpretF64(double a, TrapReason* trap) {
638   return bit_cast<int64_t>(a);
639 }
640 
641 enum InternalOpcode {
642 #define DECL_INTERNAL_ENUM(name, value) kInternal##name = value,
643   FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_ENUM)
644 #undef DECL_INTERNAL_ENUM
645 };
646 
OpcodeName(uint32_t val)647 static const char* OpcodeName(uint32_t val) {
648   switch (val) {
649 #define DECL_INTERNAL_CASE(name, value) \
650   case kInternal##name:                 \
651     return "Internal" #name;
652     FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_CASE)
653 #undef DECL_INTERNAL_CASE
654   }
655   return WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(val));
656 }
657 
658 static const int kRunSteps = 1000;
659 
660 // A helper class to compute the control transfers for each bytecode offset.
661 // Control transfers allow Br, BrIf, BrTable, If, Else, and End bytecodes to
662 // be directly executed without the need to dynamically track blocks.
663 class ControlTransfers : public ZoneObject {
664  public:
665   ControlTransferMap map_;
666 
ControlTransfers(Zone * zone,size_t locals_encoded_size,const byte * start,const byte * end)667   ControlTransfers(Zone* zone, size_t locals_encoded_size, const byte* start,
668                    const byte* end)
669       : map_(zone) {
670     // A control reference including from PC, from value depth, and whether
671     // a value is explicitly passed (e.g. br/br_if/br_table with value).
672     struct CRef {
673       const byte* pc;
674       sp_t value_depth;
675       bool explicit_value;
676     };
677 
678     // Represents a control flow label.
679     struct CLabel : public ZoneObject {
680       const byte* target;
681       size_t value_depth;
682       ZoneVector<CRef> refs;
683 
684       CLabel(Zone* zone, size_t v)
685           : target(nullptr), value_depth(v), refs(zone) {}
686 
687       // Bind this label to the given PC.
688       void Bind(ControlTransferMap* map, const byte* start, const byte* pc,
689                 bool expect_value) {
690         DCHECK_NULL(target);
691         target = pc;
692         for (auto from : refs) {
693           auto pcdiff = static_cast<pcdiff_t>(target - from.pc);
694           auto spdiff = static_cast<spdiff_t>(from.value_depth - value_depth);
695           ControlTransfer::StackAction action = ControlTransfer::kNoAction;
696           if (expect_value && !from.explicit_value) {
697             action = spdiff == 0 ? ControlTransfer::kPushVoid
698                                  : ControlTransfer::kPopAndRepush;
699           }
700           pc_t offset = static_cast<size_t>(from.pc - start);
701           (*map)[offset] = {pcdiff, spdiff, action};
702         }
703       }
704 
705       // Reference this label from the given location.
706       void Ref(ControlTransferMap* map, const byte* start, CRef from) {
707         DCHECK_GE(from.value_depth, value_depth);
708         if (target) {
709           auto pcdiff = static_cast<pcdiff_t>(target - from.pc);
710           auto spdiff = static_cast<spdiff_t>(from.value_depth - value_depth);
711           pc_t offset = static_cast<size_t>(from.pc - start);
712           (*map)[offset] = {pcdiff, spdiff, ControlTransfer::kNoAction};
713         } else {
714           refs.push_back(from);
715         }
716       }
717     };
718 
719     // An entry in the control stack.
720     struct Control {
721       const byte* pc;
722       CLabel* end_label;
723       CLabel* else_label;
724 
725       void Ref(ControlTransferMap* map, const byte* start, const byte* from_pc,
726                size_t from_value_depth, bool explicit_value) {
727         end_label->Ref(map, start, {from_pc, from_value_depth, explicit_value});
728       }
729     };
730 
731     // Compute the ControlTransfer map.
732     // This works by maintaining a stack of control constructs similar to the
733     // AST decoder. The {control_stack} allows matching {br,br_if,br_table}
734     // bytecodes with their target, as well as determining whether the current
735     // bytecodes are within the true or false block of an else.
736     // The value stack depth is tracked as {value_depth} and is needed to
737     // determine how many values to pop off the stack for explicit and
738     // implicit control flow.
739 
740     std::vector<Control> control_stack;
741     size_t value_depth = 0;
742     Decoder decoder(start, end);  // for reading operands.
743     const byte* pc = start + locals_encoded_size;
744 
745     while (pc < end) {
746       WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
747       TRACE("@%td: control %s (depth = %zu)\n", (pc - start),
748             WasmOpcodes::OpcodeName(opcode), value_depth);
749       switch (opcode) {
750         case kExprBlock: {
751           TRACE("control @%td $%zu: Block\n", (pc - start), value_depth);
752           CLabel* label = new (zone) CLabel(zone, value_depth);
753           control_stack.push_back({pc, label, nullptr});
754           break;
755         }
756         case kExprLoop: {
757           TRACE("control @%td $%zu: Loop\n", (pc - start), value_depth);
758           CLabel* label1 = new (zone) CLabel(zone, value_depth);
759           CLabel* label2 = new (zone) CLabel(zone, value_depth);
760           control_stack.push_back({pc, label1, nullptr});
761           control_stack.push_back({pc, label2, nullptr});
762           label2->Bind(&map_, start, pc, false);
763           break;
764         }
765         case kExprIf: {
766           TRACE("control @%td $%zu: If\n", (pc - start), value_depth);
767           value_depth--;
768           CLabel* end_label = new (zone) CLabel(zone, value_depth);
769           CLabel* else_label = new (zone) CLabel(zone, value_depth);
770           control_stack.push_back({pc, end_label, else_label});
771           else_label->Ref(&map_, start, {pc, value_depth, false});
772           break;
773         }
774         case kExprElse: {
775           Control* c = &control_stack.back();
776           TRACE("control @%td $%zu: Else\n", (pc - start), value_depth);
777           c->end_label->Ref(&map_, start, {pc, value_depth, false});
778           value_depth = c->end_label->value_depth;
779           DCHECK_NOT_NULL(c->else_label);
780           c->else_label->Bind(&map_, start, pc + 1, false);
781           c->else_label = nullptr;
782           break;
783         }
784         case kExprEnd: {
785           Control* c = &control_stack.back();
786           TRACE("control @%td $%zu: End\n", (pc - start), value_depth);
787           if (c->end_label->target) {
788             // only loops have bound labels.
789             DCHECK_EQ(kExprLoop, *c->pc);
790             control_stack.pop_back();
791             c = &control_stack.back();
792           }
793           if (c->else_label) c->else_label->Bind(&map_, start, pc + 1, true);
794           c->end_label->Ref(&map_, start, {pc, value_depth, false});
795           c->end_label->Bind(&map_, start, pc + 1, true);
796           value_depth = c->end_label->value_depth + 1;
797           control_stack.pop_back();
798           break;
799         }
800         case kExprBr: {
801           BreakDepthOperand operand(&decoder, pc);
802           TRACE("control @%td $%zu: Br[arity=%u, depth=%u]\n", (pc - start),
803                 value_depth, operand.arity, operand.depth);
804           value_depth -= operand.arity;
805           control_stack[control_stack.size() - operand.depth - 1].Ref(
806               &map_, start, pc, value_depth, operand.arity > 0);
807           value_depth++;
808           break;
809         }
810         case kExprBrIf: {
811           BreakDepthOperand operand(&decoder, pc);
812           TRACE("control @%td $%zu: BrIf[arity=%u, depth=%u]\n", (pc - start),
813                 value_depth, operand.arity, operand.depth);
814           value_depth -= (operand.arity + 1);
815           control_stack[control_stack.size() - operand.depth - 1].Ref(
816               &map_, start, pc, value_depth, operand.arity > 0);
817           value_depth++;
818           break;
819         }
820         case kExprBrTable: {
821           BranchTableOperand operand(&decoder, pc);
822           TRACE("control @%td $%zu: BrTable[arity=%u count=%u]\n", (pc - start),
823                 value_depth, operand.arity, operand.table_count);
824           value_depth -= (operand.arity + 1);
825           for (uint32_t i = 0; i < operand.table_count + 1; ++i) {
826             uint32_t target = operand.read_entry(&decoder, i);
827             control_stack[control_stack.size() - target - 1].Ref(
828                 &map_, start, pc + i, value_depth, operand.arity > 0);
829           }
830           value_depth++;
831           break;
832         }
833         default: {
834           value_depth = value_depth - OpcodeArity(pc, end) + 1;
835           break;
836         }
837       }
838 
839       pc += OpcodeLength(pc, end);
840     }
841   }
842 
Lookup(pc_t from)843   ControlTransfer Lookup(pc_t from) {
844     auto result = map_.find(from);
845     if (result == map_.end()) {
846       V8_Fatal(__FILE__, __LINE__, "no control target for pc %zu", from);
847     }
848     return result->second;
849   }
850 };
851 
852 // Code and metadata needed to execute a function.
853 struct InterpreterCode {
854   const WasmFunction* function;  // wasm function
855   AstLocalDecls locals;          // local declarations
856   const byte* orig_start;        // start of original code
857   const byte* orig_end;          // end of original code
858   byte* start;                   // start of (maybe altered) code
859   byte* end;                     // end of (maybe altered) code
860   ControlTransfers* targets;     // helper for control flow.
861 
atv8::internal::wasm::InterpreterCode862   const byte* at(pc_t pc) { return start + pc; }
863 };
864 
865 // The main storage for interpreter code. It maps {WasmFunction} to the
866 // metadata needed to execute each function.
867 class CodeMap {
868  public:
869   Zone* zone_;
870   const WasmModule* module_;
871   ZoneVector<InterpreterCode> interpreter_code_;
872 
CodeMap(const WasmModule * module,Zone * zone)873   CodeMap(const WasmModule* module, Zone* zone)
874       : zone_(zone), module_(module), interpreter_code_(zone) {
875     if (module == nullptr) return;
876     for (size_t i = 0; i < module->functions.size(); ++i) {
877       const WasmFunction* function = &module->functions[i];
878       const byte* code_start =
879           module->module_start + function->code_start_offset;
880       const byte* code_end = module->module_start + function->code_end_offset;
881       AddFunction(function, code_start, code_end);
882     }
883   }
884 
FindCode(const WasmFunction * function)885   InterpreterCode* FindCode(const WasmFunction* function) {
886     if (function->func_index < interpreter_code_.size()) {
887       InterpreterCode* code = &interpreter_code_[function->func_index];
888       DCHECK_EQ(function, code->function);
889       return code;
890     }
891     return nullptr;
892   }
893 
GetCode(uint32_t function_index)894   InterpreterCode* GetCode(uint32_t function_index) {
895     CHECK_LT(function_index, interpreter_code_.size());
896     return Preprocess(&interpreter_code_[function_index]);
897   }
898 
GetIndirectCode(uint32_t indirect_index)899   InterpreterCode* GetIndirectCode(uint32_t indirect_index) {
900     if (indirect_index >= module_->function_table.size()) return nullptr;
901     uint32_t index = module_->function_table[indirect_index];
902     if (index >= interpreter_code_.size()) return nullptr;
903     return GetCode(index);
904   }
905 
Preprocess(InterpreterCode * code)906   InterpreterCode* Preprocess(InterpreterCode* code) {
907     if (code->targets == nullptr && code->start) {
908       // Compute the control targets map and the local declarations.
909       CHECK(DecodeLocalDecls(code->locals, code->start, code->end));
910       code->targets =
911           new (zone_) ControlTransfers(zone_, code->locals.decls_encoded_size,
912                                        code->orig_start, code->orig_end);
913     }
914     return code;
915   }
916 
AddFunction(const WasmFunction * function,const byte * code_start,const byte * code_end)917   int AddFunction(const WasmFunction* function, const byte* code_start,
918                   const byte* code_end) {
919     InterpreterCode code = {
920         function, AstLocalDecls(zone_),          code_start,
921         code_end, const_cast<byte*>(code_start), const_cast<byte*>(code_end),
922         nullptr};
923 
924     DCHECK_EQ(interpreter_code_.size(), function->func_index);
925     interpreter_code_.push_back(code);
926     return static_cast<int>(interpreter_code_.size()) - 1;
927   }
928 
SetFunctionCode(const WasmFunction * function,const byte * start,const byte * end)929   bool SetFunctionCode(const WasmFunction* function, const byte* start,
930                        const byte* end) {
931     InterpreterCode* code = FindCode(function);
932     if (code == nullptr) return false;
933     code->targets = nullptr;
934     code->orig_start = start;
935     code->orig_end = end;
936     code->start = const_cast<byte*>(start);
937     code->end = const_cast<byte*>(end);
938     Preprocess(code);
939     return true;
940   }
941 };
942 
943 // Responsible for executing code directly.
944 class ThreadImpl : public WasmInterpreter::Thread {
945  public:
ThreadImpl(Zone * zone,CodeMap * codemap,WasmModuleInstance * instance)946   ThreadImpl(Zone* zone, CodeMap* codemap, WasmModuleInstance* instance)
947       : codemap_(codemap),
948         instance_(instance),
949         stack_(zone),
950         frames_(zone),
951         state_(WasmInterpreter::STOPPED),
952         break_pc_(kInvalidPc),
953         trap_reason_(kTrapCount) {}
954 
~ThreadImpl()955   virtual ~ThreadImpl() {}
956 
957   //==========================================================================
958   // Implementation of public interface for WasmInterpreter::Thread.
959   //==========================================================================
960 
state()961   virtual WasmInterpreter::State state() { return state_; }
962 
PushFrame(const WasmFunction * function,WasmVal * args)963   virtual void PushFrame(const WasmFunction* function, WasmVal* args) {
964     InterpreterCode* code = codemap()->FindCode(function);
965     CHECK_NOT_NULL(code);
966     frames_.push_back({code, 0, 0, stack_.size()});
967     for (size_t i = 0; i < function->sig->parameter_count(); ++i) {
968       stack_.push_back(args[i]);
969     }
970     frames_.back().ret_pc = InitLocals(code);
971     TRACE("  => PushFrame(#%u @%zu)\n", code->function->func_index,
972           frames_.back().ret_pc);
973   }
974 
Run()975   virtual WasmInterpreter::State Run() {
976     do {
977       TRACE("  => Run()\n");
978       if (state_ == WasmInterpreter::STOPPED ||
979           state_ == WasmInterpreter::PAUSED) {
980         state_ = WasmInterpreter::RUNNING;
981         Execute(frames_.back().code, frames_.back().ret_pc, kRunSteps);
982       }
983     } while (state_ == WasmInterpreter::STOPPED);
984     return state_;
985   }
986 
Step()987   virtual WasmInterpreter::State Step() {
988     TRACE("  => Step()\n");
989     if (state_ == WasmInterpreter::STOPPED ||
990         state_ == WasmInterpreter::PAUSED) {
991       state_ = WasmInterpreter::RUNNING;
992       Execute(frames_.back().code, frames_.back().ret_pc, 1);
993     }
994     return state_;
995   }
996 
Pause()997   virtual void Pause() { UNIMPLEMENTED(); }
998 
Reset()999   virtual void Reset() {
1000     TRACE("----- RESET -----\n");
1001     stack_.clear();
1002     frames_.clear();
1003     state_ = WasmInterpreter::STOPPED;
1004     trap_reason_ = kTrapCount;
1005   }
1006 
GetFrameCount()1007   virtual int GetFrameCount() { return static_cast<int>(frames_.size()); }
1008 
GetFrame(int index)1009   virtual const WasmFrame* GetFrame(int index) {
1010     UNIMPLEMENTED();
1011     return nullptr;
1012   }
1013 
GetMutableFrame(int index)1014   virtual WasmFrame* GetMutableFrame(int index) {
1015     UNIMPLEMENTED();
1016     return nullptr;
1017   }
1018 
GetReturnValue()1019   virtual WasmVal GetReturnValue() {
1020     if (state_ == WasmInterpreter::TRAPPED) return WasmVal(0xdeadbeef);
1021     CHECK_EQ(WasmInterpreter::FINISHED, state_);
1022     CHECK_EQ(1, stack_.size());
1023     return stack_[0];
1024   }
1025 
GetBreakpointPc()1026   virtual pc_t GetBreakpointPc() { return break_pc_; }
1027 
Terminated()1028   bool Terminated() {
1029     return state_ == WasmInterpreter::TRAPPED ||
1030            state_ == WasmInterpreter::FINISHED;
1031   }
1032 
1033  private:
1034   // Entries on the stack of functions being evaluated.
1035   struct Frame {
1036     InterpreterCode* code;
1037     pc_t call_pc;
1038     pc_t ret_pc;
1039     sp_t sp;
1040 
1041     // Limit of parameters.
plimitv8::internal::wasm::ThreadImpl::Frame1042     sp_t plimit() { return sp + code->function->sig->parameter_count(); }
1043     // Limit of locals.
llimitv8::internal::wasm::ThreadImpl::Frame1044     sp_t llimit() { return plimit() + code->locals.total_local_count; }
1045   };
1046 
1047   CodeMap* codemap_;
1048   WasmModuleInstance* instance_;
1049   ZoneVector<WasmVal> stack_;
1050   ZoneVector<Frame> frames_;
1051   WasmInterpreter::State state_;
1052   pc_t break_pc_;
1053   TrapReason trap_reason_;
1054 
codemap()1055   CodeMap* codemap() { return codemap_; }
instance()1056   WasmModuleInstance* instance() { return instance_; }
module()1057   const WasmModule* module() { return instance_->module; }
1058 
DoTrap(TrapReason trap,pc_t pc)1059   void DoTrap(TrapReason trap, pc_t pc) {
1060     state_ = WasmInterpreter::TRAPPED;
1061     trap_reason_ = trap;
1062     CommitPc(pc);
1063   }
1064 
1065   // Push a frame with arguments already on the stack.
PushFrame(InterpreterCode * code,pc_t call_pc,pc_t ret_pc)1066   void PushFrame(InterpreterCode* code, pc_t call_pc, pc_t ret_pc) {
1067     CHECK_NOT_NULL(code);
1068     DCHECK(!frames_.empty());
1069     frames_.back().call_pc = call_pc;
1070     frames_.back().ret_pc = ret_pc;
1071     size_t arity = code->function->sig->parameter_count();
1072     DCHECK_GE(stack_.size(), arity);
1073     // The parameters will overlap the arguments already on the stack.
1074     frames_.push_back({code, 0, 0, stack_.size() - arity});
1075     frames_.back().ret_pc = InitLocals(code);
1076     TRACE("  => push func#%u @%zu\n", code->function->func_index,
1077           frames_.back().ret_pc);
1078   }
1079 
InitLocals(InterpreterCode * code)1080   pc_t InitLocals(InterpreterCode* code) {
1081     for (auto p : code->locals.local_types) {
1082       WasmVal val;
1083       switch (p.first) {
1084         case kAstI32:
1085           val = WasmVal(static_cast<int32_t>(0));
1086           break;
1087         case kAstI64:
1088           val = WasmVal(static_cast<int64_t>(0));
1089           break;
1090         case kAstF32:
1091           val = WasmVal(static_cast<float>(0));
1092           break;
1093         case kAstF64:
1094           val = WasmVal(static_cast<double>(0));
1095           break;
1096         default:
1097           UNREACHABLE();
1098           break;
1099       }
1100       stack_.insert(stack_.end(), p.second, val);
1101     }
1102     return code->locals.decls_encoded_size;
1103   }
1104 
CommitPc(pc_t pc)1105   void CommitPc(pc_t pc) {
1106     if (!frames_.empty()) {
1107       frames_.back().ret_pc = pc;
1108     }
1109   }
1110 
SkipBreakpoint(InterpreterCode * code,pc_t pc)1111   bool SkipBreakpoint(InterpreterCode* code, pc_t pc) {
1112     if (pc == break_pc_) {
1113       break_pc_ = kInvalidPc;
1114       return true;
1115     }
1116     return false;
1117   }
1118 
DoReturn(InterpreterCode ** code,pc_t * pc,pc_t * limit,WasmVal val)1119   bool DoReturn(InterpreterCode** code, pc_t* pc, pc_t* limit, WasmVal val) {
1120     DCHECK_GT(frames_.size(), 0u);
1121     stack_.resize(frames_.back().sp);
1122     frames_.pop_back();
1123     if (frames_.size() == 0) {
1124       // A return from the top frame terminates the execution.
1125       state_ = WasmInterpreter::FINISHED;
1126       stack_.clear();
1127       stack_.push_back(val);
1128       TRACE("  => finish\n");
1129       return false;
1130     } else {
1131       // Return to caller frame.
1132       Frame* top = &frames_.back();
1133       *code = top->code;
1134       *pc = top->ret_pc;
1135       *limit = top->code->end - top->code->start;
1136       if (top->code->start[top->call_pc] == kExprCallIndirect ||
1137           (top->code->orig_start &&
1138            top->code->orig_start[top->call_pc] == kExprCallIndirect)) {
1139         // UGLY: An indirect call has the additional function index on the
1140         // stack.
1141         stack_.pop_back();
1142       }
1143       TRACE("  => pop func#%u @%zu\n", (*code)->function->func_index, *pc);
1144 
1145       stack_.push_back(val);
1146       return true;
1147     }
1148   }
1149 
DoCall(InterpreterCode * target,pc_t * pc,pc_t ret_pc,pc_t * limit)1150   void DoCall(InterpreterCode* target, pc_t* pc, pc_t ret_pc, pc_t* limit) {
1151     PushFrame(target, *pc, ret_pc);
1152     *pc = frames_.back().ret_pc;
1153     *limit = target->end - target->start;
1154   }
1155 
1156   // Adjust the program counter {pc} and the stack contents according to the
1157   // code's precomputed control transfer map. Returns the different between
1158   // the new pc and the old pc.
DoControlTransfer(InterpreterCode * code,pc_t pc)1159   int DoControlTransfer(InterpreterCode* code, pc_t pc) {
1160     auto target = code->targets->Lookup(pc);
1161     switch (target.action) {
1162       case ControlTransfer::kNoAction:
1163         TRACE("  action [sp-%u]\n", target.spdiff);
1164         PopN(target.spdiff);
1165         break;
1166       case ControlTransfer::kPopAndRepush: {
1167         WasmVal val = Pop();
1168         TRACE("  action [pop x, sp-%u, push x]\n", target.spdiff - 1);
1169         DCHECK_GE(target.spdiff, 1u);
1170         PopN(target.spdiff - 1);
1171         Push(pc, val);
1172         break;
1173       }
1174       case ControlTransfer::kPushVoid:
1175         TRACE("  action [sp-%u, push void]\n", target.spdiff);
1176         PopN(target.spdiff);
1177         Push(pc, WasmVal());
1178         break;
1179     }
1180     return target.pcdiff;
1181   }
1182 
Execute(InterpreterCode * code,pc_t pc,int max)1183   void Execute(InterpreterCode* code, pc_t pc, int max) {
1184     Decoder decoder(code->start, code->end);
1185     pc_t limit = code->end - code->start;
1186     while (true) {
1187       if (max-- <= 0) {
1188         // Maximum number of instructions reached.
1189         state_ = WasmInterpreter::PAUSED;
1190         return CommitPc(pc);
1191       }
1192 
1193       if (pc >= limit) {
1194         // Fell off end of code; do an implicit return.
1195         TRACE("@%-3zu: ImplicitReturn\n", pc);
1196         WasmVal val = PopArity(code->function->sig->return_count());
1197         if (!DoReturn(&code, &pc, &limit, val)) return;
1198         decoder.Reset(code->start, code->end);
1199         continue;
1200       }
1201 
1202       const char* skip = "        ";
1203       int len = 1;
1204       byte opcode = code->start[pc];
1205       byte orig = opcode;
1206       if (opcode == kInternalBreakpoint) {
1207         orig = code->orig_start[pc];
1208         if (SkipBreakpoint(code, pc)) {
1209           // skip breakpoint by switching on original code.
1210           skip = "[skip]  ";
1211         } else {
1212           state_ = WasmInterpreter::PAUSED;
1213           TRACE("@%-3zu: [break] %-24s:", pc,
1214                 WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(orig)));
1215           TraceValueStack();
1216           TRACE("\n");
1217           break_pc_ = pc;
1218           return CommitPc(pc);
1219         }
1220       }
1221 
1222       USE(skip);
1223       TRACE("@%-3zu: %s%-24s:", pc, skip,
1224             WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(orig)));
1225       TraceValueStack();
1226       TRACE("\n");
1227 
1228       switch (orig) {
1229         case kExprNop:
1230           Push(pc, WasmVal());
1231           break;
1232         case kExprBlock:
1233         case kExprLoop: {
1234           // Do nothing.
1235           break;
1236         }
1237         case kExprIf: {
1238           WasmVal cond = Pop();
1239           bool is_true = cond.to<uint32_t>() != 0;
1240           if (is_true) {
1241             // fall through to the true block.
1242             TRACE("  true => fallthrough\n");
1243           } else {
1244             len = DoControlTransfer(code, pc);
1245             TRACE("  false => @%zu\n", pc + len);
1246           }
1247           break;
1248         }
1249         case kExprElse: {
1250           len = DoControlTransfer(code, pc);
1251           TRACE("  end => @%zu\n", pc + len);
1252           break;
1253         }
1254         case kExprSelect: {
1255           WasmVal cond = Pop();
1256           WasmVal fval = Pop();
1257           WasmVal tval = Pop();
1258           Push(pc, cond.to<int32_t>() != 0 ? tval : fval);
1259           break;
1260         }
1261         case kExprBr: {
1262           BreakDepthOperand operand(&decoder, code->at(pc));
1263           WasmVal val = PopArity(operand.arity);
1264           len = DoControlTransfer(code, pc);
1265           TRACE("  br => @%zu\n", pc + len);
1266           if (operand.arity > 0) Push(pc, val);
1267           break;
1268         }
1269         case kExprBrIf: {
1270           BreakDepthOperand operand(&decoder, code->at(pc));
1271           WasmVal cond = Pop();
1272           WasmVal val = PopArity(operand.arity);
1273           bool is_true = cond.to<uint32_t>() != 0;
1274           if (is_true) {
1275             len = DoControlTransfer(code, pc);
1276             TRACE("  br_if => @%zu\n", pc + len);
1277             if (operand.arity > 0) Push(pc, val);
1278           } else {
1279             TRACE("  false => fallthrough\n");
1280             len = 1 + operand.length;
1281             Push(pc, WasmVal());
1282           }
1283           break;
1284         }
1285         case kExprBrTable: {
1286           BranchTableOperand operand(&decoder, code->at(pc));
1287           uint32_t key = Pop().to<uint32_t>();
1288           WasmVal val = PopArity(operand.arity);
1289           if (key >= operand.table_count) key = operand.table_count;
1290           len = DoControlTransfer(code, pc + key) + key;
1291           TRACE("  br[%u] => @%zu\n", key, pc + len);
1292           if (operand.arity > 0) Push(pc, val);
1293           break;
1294         }
1295         case kExprReturn: {
1296           ReturnArityOperand operand(&decoder, code->at(pc));
1297           WasmVal val = PopArity(operand.arity);
1298           if (!DoReturn(&code, &pc, &limit, val)) return;
1299           decoder.Reset(code->start, code->end);
1300           continue;
1301         }
1302         case kExprUnreachable: {
1303           DoTrap(kTrapUnreachable, pc);
1304           return CommitPc(pc);
1305         }
1306         case kExprEnd: {
1307           len = DoControlTransfer(code, pc);
1308           DCHECK_EQ(1, len);
1309           break;
1310         }
1311         case kExprI8Const: {
1312           ImmI8Operand operand(&decoder, code->at(pc));
1313           Push(pc, WasmVal(operand.value));
1314           len = 1 + operand.length;
1315           break;
1316         }
1317         case kExprI32Const: {
1318           ImmI32Operand operand(&decoder, code->at(pc));
1319           Push(pc, WasmVal(operand.value));
1320           len = 1 + operand.length;
1321           break;
1322         }
1323         case kExprI64Const: {
1324           ImmI64Operand operand(&decoder, code->at(pc));
1325           Push(pc, WasmVal(operand.value));
1326           len = 1 + operand.length;
1327           break;
1328         }
1329         case kExprF32Const: {
1330           ImmF32Operand operand(&decoder, code->at(pc));
1331           Push(pc, WasmVal(operand.value));
1332           len = 1 + operand.length;
1333           break;
1334         }
1335         case kExprF64Const: {
1336           ImmF64Operand operand(&decoder, code->at(pc));
1337           Push(pc, WasmVal(operand.value));
1338           len = 1 + operand.length;
1339           break;
1340         }
1341         case kExprGetLocal: {
1342           LocalIndexOperand operand(&decoder, code->at(pc));
1343           Push(pc, stack_[frames_.back().sp + operand.index]);
1344           len = 1 + operand.length;
1345           break;
1346         }
1347         case kExprSetLocal: {
1348           LocalIndexOperand operand(&decoder, code->at(pc));
1349           WasmVal val = Pop();
1350           stack_[frames_.back().sp + operand.index] = val;
1351           Push(pc, val);
1352           len = 1 + operand.length;
1353           break;
1354         }
1355         case kExprCallFunction: {
1356           CallFunctionOperand operand(&decoder, code->at(pc));
1357           InterpreterCode* target = codemap()->GetCode(operand.index);
1358           DoCall(target, &pc, pc + 1 + operand.length, &limit);
1359           code = target;
1360           decoder.Reset(code->start, code->end);
1361           continue;
1362         }
1363         case kExprCallIndirect: {
1364           CallIndirectOperand operand(&decoder, code->at(pc));
1365           size_t index = stack_.size() - operand.arity - 1;
1366           DCHECK_LT(index, stack_.size());
1367           uint32_t table_index = stack_[index].to<uint32_t>();
1368           if (table_index >= module()->function_table.size()) {
1369             return DoTrap(kTrapFuncInvalid, pc);
1370           }
1371           uint16_t function_index = module()->function_table[table_index];
1372           InterpreterCode* target = codemap()->GetCode(function_index);
1373           DCHECK(target);
1374           if (target->function->sig_index != operand.index) {
1375             return DoTrap(kTrapFuncSigMismatch, pc);
1376           }
1377 
1378           DoCall(target, &pc, pc + 1 + operand.length, &limit);
1379           code = target;
1380           decoder.Reset(code->start, code->end);
1381           continue;
1382         }
1383         case kExprCallImport: {
1384           UNIMPLEMENTED();
1385           break;
1386         }
1387         case kExprLoadGlobal: {
1388           GlobalIndexOperand operand(&decoder, code->at(pc));
1389           const WasmGlobal* global = &module()->globals[operand.index];
1390           byte* ptr = instance()->globals_start + global->offset;
1391           MachineType type = global->type;
1392           WasmVal val;
1393           if (type == MachineType::Int8()) {
1394             val =
1395                 WasmVal(static_cast<int32_t>(*reinterpret_cast<int8_t*>(ptr)));
1396           } else if (type == MachineType::Uint8()) {
1397             val =
1398                 WasmVal(static_cast<int32_t>(*reinterpret_cast<uint8_t*>(ptr)));
1399           } else if (type == MachineType::Int16()) {
1400             val =
1401                 WasmVal(static_cast<int32_t>(*reinterpret_cast<int16_t*>(ptr)));
1402           } else if (type == MachineType::Uint16()) {
1403             val = WasmVal(
1404                 static_cast<int32_t>(*reinterpret_cast<uint16_t*>(ptr)));
1405           } else if (type == MachineType::Int32()) {
1406             val = WasmVal(*reinterpret_cast<int32_t*>(ptr));
1407           } else if (type == MachineType::Uint32()) {
1408             val = WasmVal(*reinterpret_cast<uint32_t*>(ptr));
1409           } else if (type == MachineType::Int64()) {
1410             val = WasmVal(*reinterpret_cast<int64_t*>(ptr));
1411           } else if (type == MachineType::Uint64()) {
1412             val = WasmVal(*reinterpret_cast<uint64_t*>(ptr));
1413           } else if (type == MachineType::Float32()) {
1414             val = WasmVal(*reinterpret_cast<float*>(ptr));
1415           } else if (type == MachineType::Float64()) {
1416             val = WasmVal(*reinterpret_cast<double*>(ptr));
1417           } else {
1418             UNREACHABLE();
1419           }
1420           Push(pc, val);
1421           len = 1 + operand.length;
1422           break;
1423         }
1424         case kExprStoreGlobal: {
1425           GlobalIndexOperand operand(&decoder, code->at(pc));
1426           const WasmGlobal* global = &module()->globals[operand.index];
1427           byte* ptr = instance()->globals_start + global->offset;
1428           MachineType type = global->type;
1429           WasmVal val = Pop();
1430           if (type == MachineType::Int8()) {
1431             *reinterpret_cast<int8_t*>(ptr) =
1432                 static_cast<int8_t>(val.to<int32_t>());
1433           } else if (type == MachineType::Uint8()) {
1434             *reinterpret_cast<uint8_t*>(ptr) =
1435                 static_cast<uint8_t>(val.to<uint32_t>());
1436           } else if (type == MachineType::Int16()) {
1437             *reinterpret_cast<int16_t*>(ptr) =
1438                 static_cast<int16_t>(val.to<int32_t>());
1439           } else if (type == MachineType::Uint16()) {
1440             *reinterpret_cast<uint16_t*>(ptr) =
1441                 static_cast<uint16_t>(val.to<uint32_t>());
1442           } else if (type == MachineType::Int32()) {
1443             *reinterpret_cast<int32_t*>(ptr) = val.to<int32_t>();
1444           } else if (type == MachineType::Uint32()) {
1445             *reinterpret_cast<uint32_t*>(ptr) = val.to<uint32_t>();
1446           } else if (type == MachineType::Int64()) {
1447             *reinterpret_cast<int64_t*>(ptr) = val.to<int64_t>();
1448           } else if (type == MachineType::Uint64()) {
1449             *reinterpret_cast<uint64_t*>(ptr) = val.to<uint64_t>();
1450           } else if (type == MachineType::Float32()) {
1451             *reinterpret_cast<float*>(ptr) = val.to<float>();
1452           } else if (type == MachineType::Float64()) {
1453             *reinterpret_cast<double*>(ptr) = val.to<double>();
1454           } else {
1455             UNREACHABLE();
1456           }
1457           Push(pc, val);
1458           len = 1 + operand.length;
1459           break;
1460         }
1461 
1462 #define LOAD_CASE(name, ctype, mtype)                                    \
1463   case kExpr##name: {                                                    \
1464     MemoryAccessOperand operand(&decoder, code->at(pc));                 \
1465     uint32_t index = Pop().to<uint32_t>();                               \
1466     size_t effective_mem_size = instance()->mem_size - sizeof(mtype);    \
1467     if (operand.offset > effective_mem_size ||                           \
1468         index > (effective_mem_size - operand.offset)) {                 \
1469       return DoTrap(kTrapMemOutOfBounds, pc);                            \
1470     }                                                                    \
1471     byte* addr = instance()->mem_start + operand.offset + index;         \
1472     WasmVal result(static_cast<ctype>(ReadUnalignedValue<mtype>(addr))); \
1473     Push(pc, result);                                                    \
1474     len = 1 + operand.length;                                            \
1475     break;                                                               \
1476   }
1477 
1478           LOAD_CASE(I32LoadMem8S, int32_t, int8_t);
1479           LOAD_CASE(I32LoadMem8U, int32_t, uint8_t);
1480           LOAD_CASE(I32LoadMem16S, int32_t, int16_t);
1481           LOAD_CASE(I32LoadMem16U, int32_t, uint16_t);
1482           LOAD_CASE(I64LoadMem8S, int64_t, int8_t);
1483           LOAD_CASE(I64LoadMem8U, int64_t, uint8_t);
1484           LOAD_CASE(I64LoadMem16S, int64_t, int16_t);
1485           LOAD_CASE(I64LoadMem16U, int64_t, uint16_t);
1486           LOAD_CASE(I64LoadMem32S, int64_t, int32_t);
1487           LOAD_CASE(I64LoadMem32U, int64_t, uint32_t);
1488           LOAD_CASE(I32LoadMem, int32_t, int32_t);
1489           LOAD_CASE(I64LoadMem, int64_t, int64_t);
1490           LOAD_CASE(F32LoadMem, float, float);
1491           LOAD_CASE(F64LoadMem, double, double);
1492 #undef LOAD_CASE
1493 
1494 #define STORE_CASE(name, ctype, mtype)                                     \
1495   case kExpr##name: {                                                      \
1496     MemoryAccessOperand operand(&decoder, code->at(pc));                   \
1497     WasmVal val = Pop();                                                   \
1498     uint32_t index = Pop().to<uint32_t>();                                 \
1499     size_t effective_mem_size = instance()->mem_size - sizeof(mtype);      \
1500     if (operand.offset > effective_mem_size ||                             \
1501         index > (effective_mem_size - operand.offset)) {                   \
1502       return DoTrap(kTrapMemOutOfBounds, pc);                              \
1503     }                                                                      \
1504     byte* addr = instance()->mem_start + operand.offset + index;           \
1505     WriteUnalignedValue<mtype>(addr, static_cast<mtype>(val.to<ctype>())); \
1506     Push(pc, val);                                                         \
1507     len = 1 + operand.length;                                              \
1508     break;                                                                 \
1509   }
1510 
1511           STORE_CASE(I32StoreMem8, int32_t, int8_t);
1512           STORE_CASE(I32StoreMem16, int32_t, int16_t);
1513           STORE_CASE(I64StoreMem8, int64_t, int8_t);
1514           STORE_CASE(I64StoreMem16, int64_t, int16_t);
1515           STORE_CASE(I64StoreMem32, int64_t, int32_t);
1516           STORE_CASE(I32StoreMem, int32_t, int32_t);
1517           STORE_CASE(I64StoreMem, int64_t, int64_t);
1518           STORE_CASE(F32StoreMem, float, float);
1519           STORE_CASE(F64StoreMem, double, double);
1520 #undef STORE_CASE
1521 
1522 #define ASMJS_LOAD_CASE(name, ctype, mtype, defval)                 \
1523   case kExpr##name: {                                               \
1524     uint32_t index = Pop().to<uint32_t>();                          \
1525     ctype result;                                                   \
1526     if (index >= (instance()->mem_size - sizeof(mtype))) {          \
1527       result = defval;                                              \
1528     } else {                                                        \
1529       byte* addr = instance()->mem_start + index;                   \
1530       /* TODO(titzer): alignment for asmjs load mem? */             \
1531       result = static_cast<ctype>(*reinterpret_cast<mtype*>(addr)); \
1532     }                                                               \
1533     Push(pc, WasmVal(result));                                      \
1534     break;                                                          \
1535   }
1536           ASMJS_LOAD_CASE(I32AsmjsLoadMem8S, int32_t, int8_t, 0);
1537           ASMJS_LOAD_CASE(I32AsmjsLoadMem8U, int32_t, uint8_t, 0);
1538           ASMJS_LOAD_CASE(I32AsmjsLoadMem16S, int32_t, int16_t, 0);
1539           ASMJS_LOAD_CASE(I32AsmjsLoadMem16U, int32_t, uint16_t, 0);
1540           ASMJS_LOAD_CASE(I32AsmjsLoadMem, int32_t, int32_t, 0);
1541           ASMJS_LOAD_CASE(F32AsmjsLoadMem, float, float,
1542                           std::numeric_limits<float>::quiet_NaN());
1543           ASMJS_LOAD_CASE(F64AsmjsLoadMem, double, double,
1544                           std::numeric_limits<double>::quiet_NaN());
1545 #undef ASMJS_LOAD_CASE
1546 
1547 #define ASMJS_STORE_CASE(name, ctype, mtype)                                   \
1548   case kExpr##name: {                                                          \
1549     WasmVal val = Pop();                                                       \
1550     uint32_t index = Pop().to<uint32_t>();                                     \
1551     if (index < (instance()->mem_size - sizeof(mtype))) {                      \
1552       byte* addr = instance()->mem_start + index;                              \
1553       /* TODO(titzer): alignment for asmjs store mem? */                       \
1554       *(reinterpret_cast<mtype*>(addr)) = static_cast<mtype>(val.to<ctype>()); \
1555     }                                                                          \
1556     Push(pc, val);                                                             \
1557     break;                                                                     \
1558   }
1559 
1560           ASMJS_STORE_CASE(I32AsmjsStoreMem8, int32_t, int8_t);
1561           ASMJS_STORE_CASE(I32AsmjsStoreMem16, int32_t, int16_t);
1562           ASMJS_STORE_CASE(I32AsmjsStoreMem, int32_t, int32_t);
1563           ASMJS_STORE_CASE(F32AsmjsStoreMem, float, float);
1564           ASMJS_STORE_CASE(F64AsmjsStoreMem, double, double);
1565 #undef ASMJS_STORE_CASE
1566 
1567         case kExprMemorySize: {
1568           Push(pc, WasmVal(static_cast<uint32_t>(instance()->mem_size)));
1569           break;
1570         }
1571 #define EXECUTE_SIMPLE_BINOP(name, ctype, op)             \
1572   case kExpr##name: {                                     \
1573     WasmVal rval = Pop();                                 \
1574     WasmVal lval = Pop();                                 \
1575     WasmVal result(lval.to<ctype>() op rval.to<ctype>()); \
1576     Push(pc, result);                                     \
1577     break;                                                \
1578   }
1579           FOREACH_SIMPLE_BINOP(EXECUTE_SIMPLE_BINOP)
1580 #undef EXECUTE_SIMPLE_BINOP
1581 
1582 #define EXECUTE_OTHER_BINOP(name, ctype)              \
1583   case kExpr##name: {                                 \
1584     TrapReason trap = kTrapCount;                     \
1585     volatile ctype rval = Pop().to<ctype>();          \
1586     volatile ctype lval = Pop().to<ctype>();          \
1587     WasmVal result(Execute##name(lval, rval, &trap)); \
1588     if (trap != kTrapCount) return DoTrap(trap, pc);  \
1589     Push(pc, result);                                 \
1590     break;                                            \
1591   }
1592           FOREACH_OTHER_BINOP(EXECUTE_OTHER_BINOP)
1593 #undef EXECUTE_OTHER_BINOP
1594 
1595 #define EXECUTE_OTHER_UNOP(name, ctype)              \
1596   case kExpr##name: {                                \
1597     TrapReason trap = kTrapCount;                    \
1598     volatile ctype val = Pop().to<ctype>();          \
1599     WasmVal result(Execute##name(val, &trap));       \
1600     if (trap != kTrapCount) return DoTrap(trap, pc); \
1601     Push(pc, result);                                \
1602     break;                                           \
1603   }
1604           FOREACH_OTHER_UNOP(EXECUTE_OTHER_UNOP)
1605 #undef EXECUTE_OTHER_UNOP
1606 
1607         default:
1608           V8_Fatal(__FILE__, __LINE__, "Unknown or unimplemented opcode #%d:%s",
1609                    code->start[pc], OpcodeName(code->start[pc]));
1610           UNREACHABLE();
1611       }
1612 
1613       pc += len;
1614     }
1615     UNREACHABLE();  // above decoding loop should run forever.
1616   }
1617 
Pop()1618   WasmVal Pop() {
1619     DCHECK_GT(stack_.size(), 0u);
1620     DCHECK_GT(frames_.size(), 0u);
1621     DCHECK_GT(stack_.size(), frames_.back().llimit());  // can't pop into locals
1622     WasmVal val = stack_.back();
1623     stack_.pop_back();
1624     return val;
1625   }
1626 
PopN(int n)1627   void PopN(int n) {
1628     DCHECK_GE(stack_.size(), static_cast<size_t>(n));
1629     DCHECK_GT(frames_.size(), 0u);
1630     size_t nsize = stack_.size() - n;
1631     DCHECK_GE(nsize, frames_.back().llimit());  // can't pop into locals
1632     stack_.resize(nsize);
1633   }
1634 
PopArity(size_t arity)1635   WasmVal PopArity(size_t arity) {
1636     if (arity == 0) return WasmVal();
1637     CHECK_EQ(1, arity);
1638     return Pop();
1639   }
1640 
Push(pc_t pc,WasmVal val)1641   void Push(pc_t pc, WasmVal val) {
1642     // TODO(titzer): store PC as well?
1643     stack_.push_back(val);
1644   }
1645 
TraceStack(const char * phase,pc_t pc)1646   void TraceStack(const char* phase, pc_t pc) {
1647     if (FLAG_trace_wasm_interpreter) {
1648       PrintF("%s @%zu", phase, pc);
1649       UNIMPLEMENTED();
1650       PrintF("\n");
1651     }
1652   }
1653 
TraceValueStack()1654   void TraceValueStack() {
1655     Frame* top = frames_.size() > 0 ? &frames_.back() : nullptr;
1656     sp_t sp = top ? top->sp : 0;
1657     sp_t plimit = top ? top->plimit() : 0;
1658     sp_t llimit = top ? top->llimit() : 0;
1659     if (FLAG_trace_wasm_interpreter) {
1660       for (size_t i = sp; i < stack_.size(); ++i) {
1661         if (i < plimit)
1662           PrintF(" p%zu:", i);
1663         else if (i < llimit)
1664           PrintF(" l%zu:", i);
1665         else
1666           PrintF(" s%zu:", i);
1667         WasmVal val = stack_[i];
1668         switch (val.type) {
1669           case kAstI32:
1670             PrintF("i32:%d", val.to<int32_t>());
1671             break;
1672           case kAstI64:
1673             PrintF("i64:%" PRId64 "", val.to<int64_t>());
1674             break;
1675           case kAstF32:
1676             PrintF("f32:%f", val.to<float>());
1677             break;
1678           case kAstF64:
1679             PrintF("f64:%lf", val.to<double>());
1680             break;
1681           case kAstStmt:
1682             PrintF("void");
1683             break;
1684           default:
1685             UNREACHABLE();
1686             break;
1687         }
1688       }
1689     }
1690   }
1691 };
1692 
1693 //============================================================================
1694 // The implementation details of the interpreter.
1695 //============================================================================
1696 class WasmInterpreterInternals : public ZoneObject {
1697  public:
1698   WasmModuleInstance* instance_;
1699   CodeMap codemap_;
1700   ZoneVector<ThreadImpl*> threads_;
1701 
WasmInterpreterInternals(Zone * zone,WasmModuleInstance * instance)1702   WasmInterpreterInternals(Zone* zone, WasmModuleInstance* instance)
1703       : instance_(instance),
1704         codemap_(instance_ ? instance_->module : nullptr, zone),
1705         threads_(zone) {
1706     threads_.push_back(new ThreadImpl(zone, &codemap_, instance));
1707   }
1708 
Delete()1709   void Delete() {
1710     // TODO(titzer): CFI doesn't like threads in the ZoneVector.
1711     for (auto t : threads_) delete t;
1712     threads_.resize(0);
1713   }
1714 };
1715 
1716 //============================================================================
1717 // Implementation of the public interface of the interpreter.
1718 //============================================================================
WasmInterpreter(WasmModuleInstance * instance,base::AccountingAllocator * allocator)1719 WasmInterpreter::WasmInterpreter(WasmModuleInstance* instance,
1720                                  base::AccountingAllocator* allocator)
1721     : zone_(allocator),
1722       internals_(new (&zone_) WasmInterpreterInternals(&zone_, instance)) {}
1723 
~WasmInterpreter()1724 WasmInterpreter::~WasmInterpreter() { internals_->Delete(); }
1725 
Run()1726 void WasmInterpreter::Run() { internals_->threads_[0]->Run(); }
1727 
Pause()1728 void WasmInterpreter::Pause() { internals_->threads_[0]->Pause(); }
1729 
SetBreakpoint(const WasmFunction * function,pc_t pc,bool enabled)1730 bool WasmInterpreter::SetBreakpoint(const WasmFunction* function, pc_t pc,
1731                                     bool enabled) {
1732   InterpreterCode* code = internals_->codemap_.FindCode(function);
1733   if (!code) return false;
1734   size_t size = static_cast<size_t>(code->end - code->start);
1735   // Check bounds for {pc}.
1736   if (pc < code->locals.decls_encoded_size || pc >= size) return false;
1737   // Make a copy of the code before enabling a breakpoint.
1738   if (enabled && code->orig_start == code->start) {
1739     code->start = reinterpret_cast<byte*>(zone_.New(size));
1740     memcpy(code->start, code->orig_start, size);
1741     code->end = code->start + size;
1742   }
1743   bool prev = code->start[pc] == kInternalBreakpoint;
1744   if (enabled) {
1745     code->start[pc] = kInternalBreakpoint;
1746   } else {
1747     code->start[pc] = code->orig_start[pc];
1748   }
1749   return prev;
1750 }
1751 
GetBreakpoint(const WasmFunction * function,pc_t pc)1752 bool WasmInterpreter::GetBreakpoint(const WasmFunction* function, pc_t pc) {
1753   InterpreterCode* code = internals_->codemap_.FindCode(function);
1754   if (!code) return false;
1755   size_t size = static_cast<size_t>(code->end - code->start);
1756   // Check bounds for {pc}.
1757   if (pc < code->locals.decls_encoded_size || pc >= size) return false;
1758   // Check if a breakpoint is present at that place in the code.
1759   return code->start[pc] == kInternalBreakpoint;
1760 }
1761 
SetTracing(const WasmFunction * function,bool enabled)1762 bool WasmInterpreter::SetTracing(const WasmFunction* function, bool enabled) {
1763   UNIMPLEMENTED();
1764   return false;
1765 }
1766 
GetThreadCount()1767 int WasmInterpreter::GetThreadCount() {
1768   return 1;  // only one thread for now.
1769 }
1770 
GetThread(int id)1771 WasmInterpreter::Thread* WasmInterpreter::GetThread(int id) {
1772   CHECK_EQ(0, id);  // only one thread for now.
1773   return internals_->threads_[id];
1774 }
1775 
GetLocalVal(const WasmFrame * frame,int index)1776 WasmVal WasmInterpreter::GetLocalVal(const WasmFrame* frame, int index) {
1777   CHECK_GE(index, 0);
1778   UNIMPLEMENTED();
1779   WasmVal none;
1780   none.type = kAstStmt;
1781   return none;
1782 }
1783 
GetExprVal(const WasmFrame * frame,int pc)1784 WasmVal WasmInterpreter::GetExprVal(const WasmFrame* frame, int pc) {
1785   UNIMPLEMENTED();
1786   WasmVal none;
1787   none.type = kAstStmt;
1788   return none;
1789 }
1790 
SetLocalVal(WasmFrame * frame,int index,WasmVal val)1791 void WasmInterpreter::SetLocalVal(WasmFrame* frame, int index, WasmVal val) {
1792   UNIMPLEMENTED();
1793 }
1794 
SetExprVal(WasmFrame * frame,int pc,WasmVal val)1795 void WasmInterpreter::SetExprVal(WasmFrame* frame, int pc, WasmVal val) {
1796   UNIMPLEMENTED();
1797 }
1798 
GetMemorySize()1799 size_t WasmInterpreter::GetMemorySize() {
1800   return internals_->instance_->mem_size;
1801 }
1802 
ReadMemory(size_t offset)1803 WasmVal WasmInterpreter::ReadMemory(size_t offset) {
1804   UNIMPLEMENTED();
1805   return WasmVal();
1806 }
1807 
WriteMemory(size_t offset,WasmVal val)1808 void WasmInterpreter::WriteMemory(size_t offset, WasmVal val) {
1809   UNIMPLEMENTED();
1810 }
1811 
AddFunctionForTesting(const WasmFunction * function)1812 int WasmInterpreter::AddFunctionForTesting(const WasmFunction* function) {
1813   return internals_->codemap_.AddFunction(function, nullptr, nullptr);
1814 }
1815 
SetFunctionCodeForTesting(const WasmFunction * function,const byte * start,const byte * end)1816 bool WasmInterpreter::SetFunctionCodeForTesting(const WasmFunction* function,
1817                                                 const byte* start,
1818                                                 const byte* end) {
1819   return internals_->codemap_.SetFunctionCode(function, start, end);
1820 }
1821 
ComputeControlTransfersForTesting(Zone * zone,const byte * start,const byte * end)1822 ControlTransferMap WasmInterpreter::ComputeControlTransfersForTesting(
1823     Zone* zone, const byte* start, const byte* end) {
1824   ControlTransfers targets(zone, 0, start, end);
1825   return targets.map_;
1826 }
1827 
1828 }  // namespace wasm
1829 }  // namespace internal
1830 }  // namespace v8
1831